Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(44)

Side by Side Diff: src/heap.cc

Issue 2084017: Version 2.2.11... (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 10 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/ia32/assembler-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
108 // ConfigureHeap. 108 // ConfigureHeap.
109 109
110 // Will be 4 * reserved_semispace_size_ to ensure that young 110 // Will be 4 * reserved_semispace_size_ to ensure that young
111 // generation can be aligned to its size. 111 // generation can be aligned to its size.
112 int Heap::survived_since_last_expansion_ = 0; 112 int Heap::survived_since_last_expansion_ = 0;
113 int Heap::external_allocation_limit_ = 0; 113 int Heap::external_allocation_limit_ = 0;
114 114
115 Heap::HeapState Heap::gc_state_ = NOT_IN_GC; 115 Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
116 116
117 int Heap::mc_count_ = 0; 117 int Heap::mc_count_ = 0;
118 int Heap::ms_count_ = 0;
118 int Heap::gc_count_ = 0; 119 int Heap::gc_count_ = 0;
119 120
121 GCTracer* Heap::tracer_ = NULL;
122
120 int Heap::unflattened_strings_length_ = 0; 123 int Heap::unflattened_strings_length_ = 0;
121 124
122 int Heap::always_allocate_scope_depth_ = 0; 125 int Heap::always_allocate_scope_depth_ = 0;
123 int Heap::linear_allocation_scope_depth_ = 0; 126 int Heap::linear_allocation_scope_depth_ = 0;
124 int Heap::contexts_disposed_ = 0; 127 int Heap::contexts_disposed_ = 0;
125 128
126 #ifdef DEBUG 129 #ifdef DEBUG
127 bool Heap::allocation_allowed_ = true; 130 bool Heap::allocation_allowed_ = true;
128 131
129 int Heap::allocation_timeout_ = 0; 132 int Heap::allocation_timeout_ = 0;
130 bool Heap::disallow_allocation_failure_ = false; 133 bool Heap::disallow_allocation_failure_ = false;
131 #endif // DEBUG 134 #endif // DEBUG
132 135
136 int GCTracer::alive_after_last_gc_ = 0;
137 double GCTracer::last_gc_end_timestamp_ = 0.0;
138 int GCTracer::max_gc_pause_ = 0;
139 int GCTracer::max_alive_after_gc_ = 0;
140 int GCTracer::min_in_mutator_ = kMaxInt;
133 141
134 int Heap::Capacity() { 142 int Heap::Capacity() {
135 if (!HasBeenSetup()) return 0; 143 if (!HasBeenSetup()) return 0;
136 144
137 return new_space_.Capacity() + 145 return new_space_.Capacity() +
138 old_pointer_space_->Capacity() + 146 old_pointer_space_->Capacity() +
139 old_data_space_->Capacity() + 147 old_data_space_->Capacity() +
140 code_space_->Capacity() + 148 code_space_->Capacity() +
141 map_space_->Capacity() + 149 map_space_->Capacity() +
142 cell_space_->Capacity(); 150 cell_space_->Capacity();
(...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after
563 ThreadManager::IterateThreads(&visitor); 571 ThreadManager::IterateThreads(&visitor);
564 } 572 }
565 573
566 574
567 void Heap::PerformGarbageCollection(AllocationSpace space, 575 void Heap::PerformGarbageCollection(AllocationSpace space,
568 GarbageCollector collector, 576 GarbageCollector collector,
569 GCTracer* tracer) { 577 GCTracer* tracer) {
570 VerifySymbolTable(); 578 VerifySymbolTable();
571 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { 579 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
572 ASSERT(!allocation_allowed_); 580 ASSERT(!allocation_allowed_);
573 GCTracer::ExternalScope scope(tracer); 581 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
574 global_gc_prologue_callback_(); 582 global_gc_prologue_callback_();
575 } 583 }
576 584
577 GCType gc_type = 585 GCType gc_type =
578 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge; 586 collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
579 587
580 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { 588 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
581 if (gc_type & gc_prologue_callbacks_[i].gc_type) { 589 if (gc_type & gc_prologue_callbacks_[i].gc_type) {
582 gc_prologue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags); 590 gc_prologue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags);
583 } 591 }
584 } 592 }
585 593
586 EnsureFromSpaceIsCommitted(); 594 EnsureFromSpaceIsCommitted();
587 595
588 if (collector == MARK_COMPACTOR) { 596 if (collector == MARK_COMPACTOR) {
589 // Perform mark-sweep with optional compaction. 597 // Perform mark-sweep with optional compaction.
590 MarkCompact(tracer); 598 MarkCompact(tracer);
591 599
592 int old_gen_size = PromotedSpaceSize(); 600 int old_gen_size = PromotedSpaceSize();
593 old_gen_promotion_limit_ = 601 old_gen_promotion_limit_ =
594 old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3); 602 old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
595 old_gen_allocation_limit_ = 603 old_gen_allocation_limit_ =
596 old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2); 604 old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
597 old_gen_exhausted_ = false; 605 old_gen_exhausted_ = false;
598 } else { 606 } else {
607 tracer_ = tracer;
599 Scavenge(); 608 Scavenge();
609 tracer_ = NULL;
600 } 610 }
601 611
602 Counters::objs_since_last_young.Set(0); 612 Counters::objs_since_last_young.Set(0);
603 613
604 if (collector == MARK_COMPACTOR) { 614 if (collector == MARK_COMPACTOR) {
605 DisableAssertNoAllocation allow_allocation; 615 DisableAssertNoAllocation allow_allocation;
606 GCTracer::ExternalScope scope(tracer); 616 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
607 GlobalHandles::PostGarbageCollectionProcessing(); 617 GlobalHandles::PostGarbageCollectionProcessing();
608 } 618 }
609 619
610 // Update relocatables. 620 // Update relocatables.
611 Relocatable::PostGarbageCollectionProcessing(); 621 Relocatable::PostGarbageCollectionProcessing();
612 622
613 if (collector == MARK_COMPACTOR) { 623 if (collector == MARK_COMPACTOR) {
614 // Register the amount of external allocated memory. 624 // Register the amount of external allocated memory.
615 amount_of_external_allocated_memory_at_last_global_gc_ = 625 amount_of_external_allocated_memory_at_last_global_gc_ =
616 amount_of_external_allocated_memory_; 626 amount_of_external_allocated_memory_;
617 } 627 }
618 628
619 GCCallbackFlags callback_flags = tracer->is_compacting() 629 GCCallbackFlags callback_flags = tracer->is_compacting()
620 ? kGCCallbackFlagCompacted 630 ? kGCCallbackFlagCompacted
621 : kNoGCCallbackFlags; 631 : kNoGCCallbackFlags;
622 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { 632 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
623 if (gc_type & gc_epilogue_callbacks_[i].gc_type) { 633 if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
624 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags); 634 gc_epilogue_callbacks_[i].callback(gc_type, callback_flags);
625 } 635 }
626 } 636 }
627 637
628 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) { 638 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
629 ASSERT(!allocation_allowed_); 639 ASSERT(!allocation_allowed_);
630 GCTracer::ExternalScope scope(tracer); 640 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
631 global_gc_epilogue_callback_(); 641 global_gc_epilogue_callback_();
632 } 642 }
633 VerifySymbolTable(); 643 VerifySymbolTable();
634 } 644 }
635 645
636 646
637 void Heap::MarkCompact(GCTracer* tracer) { 647 void Heap::MarkCompact(GCTracer* tracer) {
638 gc_state_ = MARK_COMPACT; 648 gc_state_ = MARK_COMPACT;
639 mc_count_++; 649 if (MarkCompactCollector::IsCompacting()) {
650 mc_count_++;
651 } else {
652 ms_count_++;
653 }
640 tracer->set_full_gc_count(mc_count_); 654 tracer->set_full_gc_count(mc_count_);
641 LOG(ResourceEvent("markcompact", "begin")); 655 LOG(ResourceEvent("markcompact", "begin"));
642 656
643 MarkCompactCollector::Prepare(tracer); 657 MarkCompactCollector::Prepare(tracer);
644 658
645 bool is_compacting = MarkCompactCollector::IsCompacting(); 659 bool is_compacting = MarkCompactCollector::IsCompacting();
646 660
647 MarkCompactPrologue(is_compacting); 661 MarkCompactPrologue(is_compacting);
648 662
649 MarkCompactCollector::CollectGarbage(); 663 MarkCompactCollector::CollectGarbage();
(...skipping 522 matching lines...) Expand 10 before | Expand all | Expand 10 after
1172 promotion_queue.insert(object, first_word.ToMap()); 1186 promotion_queue.insert(object, first_word.ToMap());
1173 object->set_map_word(MapWord::FromForwardingAddress(target)); 1187 object->set_map_word(MapWord::FromForwardingAddress(target));
1174 1188
1175 // Give the space allocated for the result a proper map by 1189 // Give the space allocated for the result a proper map by
1176 // treating it as a free list node (not linked into the free 1190 // treating it as a free list node (not linked into the free
1177 // list). 1191 // list).
1178 FreeListNode* node = FreeListNode::FromAddress(target->address()); 1192 FreeListNode* node = FreeListNode::FromAddress(target->address());
1179 node->set_size(object_size); 1193 node->set_size(object_size);
1180 1194
1181 *p = target; 1195 *p = target;
1196 tracer()->increment_promoted_objects_size(object_size);
1182 return; 1197 return;
1183 } 1198 }
1184 } else { 1199 } else {
1185 OldSpace* target_space = Heap::TargetSpace(object); 1200 OldSpace* target_space = Heap::TargetSpace(object);
1186 ASSERT(target_space == Heap::old_pointer_space_ || 1201 ASSERT(target_space == Heap::old_pointer_space_ ||
1187 target_space == Heap::old_data_space_); 1202 target_space == Heap::old_data_space_);
1188 result = target_space->AllocateRaw(object_size); 1203 result = target_space->AllocateRaw(object_size);
1189 if (!result->IsFailure()) { 1204 if (!result->IsFailure()) {
1190 HeapObject* target = HeapObject::cast(result); 1205 HeapObject* target = HeapObject::cast(result);
1191 if (target_space == Heap::old_pointer_space_) { 1206 if (target_space == Heap::old_pointer_space_) {
(...skipping 15 matching lines...) Expand all
1207 // Objects promoted to the data space can be copied immediately 1222 // Objects promoted to the data space can be copied immediately
1208 // and not revisited---we will never sweep that space for 1223 // and not revisited---we will never sweep that space for
1209 // pointers and the copied objects do not contain pointers to 1224 // pointers and the copied objects do not contain pointers to
1210 // new space objects. 1225 // new space objects.
1211 *p = MigrateObject(object, target, object_size); 1226 *p = MigrateObject(object, target, object_size);
1212 #ifdef DEBUG 1227 #ifdef DEBUG
1213 VerifyNonPointerSpacePointersVisitor v; 1228 VerifyNonPointerSpacePointersVisitor v;
1214 (*p)->Iterate(&v); 1229 (*p)->Iterate(&v);
1215 #endif 1230 #endif
1216 } 1231 }
1232 tracer()->increment_promoted_objects_size(object_size);
1217 return; 1233 return;
1218 } 1234 }
1219 } 1235 }
1220 } 1236 }
1221 // The object should remain in new space or the old space allocation failed. 1237 // The object should remain in new space or the old space allocation failed.
1222 Object* result = new_space_.AllocateRaw(object_size); 1238 Object* result = new_space_.AllocateRaw(object_size);
1223 // Failed allocation at this point is utterly unexpected. 1239 // Failed allocation at this point is utterly unexpected.
1224 ASSERT(!result->IsFailure()); 1240 ASSERT(!result->IsFailure());
1225 *p = MigrateObject(object, HeapObject::cast(result), object_size); 1241 *p = MigrateObject(object, HeapObject::cast(result), object_size);
1226 } 1242 }
(...skipping 830 matching lines...) Expand 10 before | Expand all | Expand 10 after
2057 } else if (length == 2) { 2073 } else if (length == 2) {
2058 // Optimization for 2-byte strings often used as keys in a decompression 2074 // Optimization for 2-byte strings often used as keys in a decompression
2059 // dictionary. Check whether we already have the string in the symbol 2075 // dictionary. Check whether we already have the string in the symbol
2060 // table to prevent creation of many unneccesary strings. 2076 // table to prevent creation of many unneccesary strings.
2061 unsigned c1 = buffer->Get(start); 2077 unsigned c1 = buffer->Get(start);
2062 unsigned c2 = buffer->Get(start + 1); 2078 unsigned c2 = buffer->Get(start + 1);
2063 return MakeOrFindTwoCharacterString(c1, c2); 2079 return MakeOrFindTwoCharacterString(c1, c2);
2064 } 2080 }
2065 2081
2066 // Make an attempt to flatten the buffer to reduce access time. 2082 // Make an attempt to flatten the buffer to reduce access time.
2067 buffer->TryFlatten(); 2083 buffer = buffer->TryFlattenGetString();
2068 2084
2069 Object* result = buffer->IsAsciiRepresentation() 2085 Object* result = buffer->IsAsciiRepresentation()
2070 ? AllocateRawAsciiString(length, pretenure ) 2086 ? AllocateRawAsciiString(length, pretenure )
2071 : AllocateRawTwoByteString(length, pretenure); 2087 : AllocateRawTwoByteString(length, pretenure);
2072 if (result->IsFailure()) return result; 2088 if (result->IsFailure()) return result;
2073 String* string_result = String::cast(result); 2089 String* string_result = String::cast(result);
2074 // Copy the characters into the new object. 2090 // Copy the characters into the new object.
2075 if (buffer->IsAsciiRepresentation()) { 2091 if (buffer->IsAsciiRepresentation()) {
2076 ASSERT(string_result->IsAsciiRepresentation()); 2092 ASSERT(string_result->IsAsciiRepresentation());
2077 char* dest = SeqAsciiString::cast(string_result)->GetChars(); 2093 char* dest = SeqAsciiString::cast(string_result)->GetChars();
(...skipping 1675 matching lines...) Expand 10 before | Expand all | Expand 10 after
3753 roots_[kStackLimitRootIndex] = 3769 roots_[kStackLimitRootIndex] =
3754 reinterpret_cast<Object*>( 3770 reinterpret_cast<Object*>(
3755 (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag); 3771 (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag);
3756 roots_[kRealStackLimitRootIndex] = 3772 roots_[kRealStackLimitRootIndex] =
3757 reinterpret_cast<Object*>( 3773 reinterpret_cast<Object*>(
3758 (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag); 3774 (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag);
3759 } 3775 }
3760 3776
3761 3777
3762 void Heap::TearDown() { 3778 void Heap::TearDown() {
3779 if (FLAG_print_cumulative_gc_stat) {
3780 PrintF("\n\n");
3781 PrintF("gc_count=%d ", gc_count_);
3782 PrintF("mark_sweep_count=%d ", ms_count_);
3783 PrintF("mark_compact_count=%d ", mc_count_);
3784 PrintF("max_gc_pause=%d ", GCTracer::get_max_gc_pause());
3785 PrintF("min_in_mutator=%d ", GCTracer::get_min_in_mutator());
3786 PrintF("max_alive_after_gc=%d ", GCTracer::get_max_alive_after_gc());
3787 PrintF("\n\n");
3788 }
3789
3763 GlobalHandles::TearDown(); 3790 GlobalHandles::TearDown();
3764 3791
3765 ExternalStringTable::TearDown(); 3792 ExternalStringTable::TearDown();
3766 3793
3767 new_space_.TearDown(); 3794 new_space_.TearDown();
3768 3795
3769 if (old_pointer_space_ != NULL) { 3796 if (old_pointer_space_ != NULL) {
3770 old_pointer_space_->TearDown(); 3797 old_pointer_space_->TearDown();
3771 delete old_pointer_space_; 3798 delete old_pointer_space_;
3772 old_pointer_space_ = NULL; 3799 old_pointer_space_ = NULL;
(...skipping 455 matching lines...) Expand 10 before | Expand all | Expand 10 after
4228 void Heap::TracePathToGlobal() { 4255 void Heap::TracePathToGlobal() {
4229 search_target = NULL; 4256 search_target = NULL;
4230 search_for_any_global = true; 4257 search_for_any_global = true;
4231 4258
4232 MarkRootVisitor root_visitor; 4259 MarkRootVisitor root_visitor;
4233 IterateRoots(&root_visitor, VISIT_ONLY_STRONG); 4260 IterateRoots(&root_visitor, VISIT_ONLY_STRONG);
4234 } 4261 }
4235 #endif 4262 #endif
4236 4263
4237 4264
4265 static int CountTotalHolesSize() {
4266 int holes_size = 0;
4267 OldSpaces spaces;
4268 for (OldSpace* space = spaces.next();
4269 space != NULL;
4270 space = spaces.next()) {
4271 holes_size += space->Waste() + space->AvailableFree();
4272 }
4273 return holes_size;
4274 }
4275
4276
4238 GCTracer::GCTracer() 4277 GCTracer::GCTracer()
4239 : start_time_(0.0), 4278 : start_time_(0.0),
4240 start_size_(0.0), 4279 start_size_(0),
4241 external_time_(0.0),
4242 gc_count_(0), 4280 gc_count_(0),
4243 full_gc_count_(0), 4281 full_gc_count_(0),
4244 is_compacting_(false), 4282 is_compacting_(false),
4245 marked_count_(0) { 4283 marked_count_(0),
4284 allocated_since_last_gc_(0),
4285 spent_in_mutator_(0),
4286 promoted_objects_size_(0) {
4246 // These two fields reflect the state of the previous full collection. 4287 // These two fields reflect the state of the previous full collection.
4247 // Set them before they are changed by the collector. 4288 // Set them before they are changed by the collector.
4248 previous_has_compacted_ = MarkCompactCollector::HasCompacted(); 4289 previous_has_compacted_ = MarkCompactCollector::HasCompacted();
4249 previous_marked_count_ = MarkCompactCollector::previous_marked_count(); 4290 previous_marked_count_ = MarkCompactCollector::previous_marked_count();
4250 if (!FLAG_trace_gc) return; 4291 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
4251 start_time_ = OS::TimeCurrentMillis(); 4292 start_time_ = OS::TimeCurrentMillis();
4252 start_size_ = SizeOfHeapObjects(); 4293 start_size_ = Heap::SizeOfObjects();
4294
4295 for (int i = 0; i < Scope::kNumberOfScopes; i++) {
4296 scopes_[i] = 0;
4297 }
4298
4299 in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
4300
4301 allocated_since_last_gc_ = Heap::SizeOfObjects() - alive_after_last_gc_;
4302
4303 if (last_gc_end_timestamp_ > 0) {
4304 spent_in_mutator_ = Max(start_time_ - last_gc_end_timestamp_, 0.0);
4305 }
4253 } 4306 }
4254 4307
4255 4308
4256 GCTracer::~GCTracer() { 4309 GCTracer::~GCTracer() {
4257 if (!FLAG_trace_gc) return;
4258 // Printf ONE line iff flag is set. 4310 // Printf ONE line iff flag is set.
4259 int time = static_cast<int>(OS::TimeCurrentMillis() - start_time_); 4311 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
4260 int external_time = static_cast<int>(external_time_); 4312
4261 PrintF("%s %.1f -> %.1f MB, ", 4313 bool first_gc = (last_gc_end_timestamp_ == 0);
4262 CollectorString(), start_size_, SizeOfHeapObjects()); 4314
4263 if (external_time > 0) PrintF("%d / ", external_time); 4315 alive_after_last_gc_ = Heap::SizeOfObjects();
4264 PrintF("%d ms.\n", time); 4316 last_gc_end_timestamp_ = OS::TimeCurrentMillis();
4317
4318 int time = static_cast<int>(last_gc_end_timestamp_ - start_time_);
4319
4320 // Update cumulative GC statistics if required.
4321 if (FLAG_print_cumulative_gc_stat) {
4322 max_gc_pause_ = Max(max_gc_pause_, time);
4323 max_alive_after_gc_ = Max(max_alive_after_gc_, alive_after_last_gc_);
4324 if (!first_gc) {
4325 min_in_mutator_ = Min(min_in_mutator_,
4326 static_cast<int>(spent_in_mutator_));
4327 }
4328 }
4329
4330 if (!FLAG_trace_gc_nvp) {
4331 int external_time = static_cast<int>(scopes_[Scope::EXTERNAL]);
4332
4333 PrintF("%s %.1f -> %.1f MB, ",
4334 CollectorString(),
4335 static_cast<double>(start_size_) / MB,
4336 SizeOfHeapObjects());
4337
4338 if (external_time > 0) PrintF("%d / ", external_time);
4339 PrintF("%d ms.\n", time);
4340 } else {
4341 PrintF("pause=%d ", time);
4342 PrintF("mutator=%d ",
4343 static_cast<int>(spent_in_mutator_));
4344
4345 PrintF("gc=");
4346 switch (collector_) {
4347 case SCAVENGER:
4348 PrintF("s");
4349 break;
4350 case MARK_COMPACTOR:
4351 PrintF(MarkCompactCollector::HasCompacted() ? "mc" : "ms");
4352 break;
4353 default:
4354 UNREACHABLE();
4355 }
4356 PrintF(" ");
4357
4358 PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
4359 PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
4360 PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
4361 PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
4362
4363 PrintF("total_size_before=%d ", start_size_);
4364 PrintF("total_size_after=%d ", Heap::SizeOfObjects());
4365 PrintF("holes_size_before=%d ", in_free_list_or_wasted_before_gc_);
4366 PrintF("holes_size_after=%d ", CountTotalHolesSize());
4367
4368 PrintF("allocated=%d ", allocated_since_last_gc_);
4369 PrintF("promoted=%d ", promoted_objects_size_);
4370
4371 PrintF("\n");
4372 }
4265 4373
4266 #if defined(ENABLE_LOGGING_AND_PROFILING) 4374 #if defined(ENABLE_LOGGING_AND_PROFILING)
4267 Heap::PrintShortHeapStatistics(); 4375 Heap::PrintShortHeapStatistics();
4268 #endif 4376 #endif
4269 } 4377 }
4270 4378
4271 4379
4272 const char* GCTracer::CollectorString() { 4380 const char* GCTracer::CollectorString() {
4273 switch (collector_) { 4381 switch (collector_) {
4274 case SCAVENGER: 4382 case SCAVENGER:
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
4393 void ExternalStringTable::TearDown() { 4501 void ExternalStringTable::TearDown() {
4394 new_space_strings_.Free(); 4502 new_space_strings_.Free();
4395 old_space_strings_.Free(); 4503 old_space_strings_.Free();
4396 } 4504 }
4397 4505
4398 4506
4399 List<Object*> ExternalStringTable::new_space_strings_; 4507 List<Object*> ExternalStringTable::new_space_strings_;
4400 List<Object*> ExternalStringTable::old_space_strings_; 4508 List<Object*> ExternalStringTable::old_space_strings_;
4401 4509
4402 } } // namespace v8::internal 4510 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/ia32/assembler-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698