Chromium Code Reviews| Index: src/heap.cc |
| diff --git a/src/heap.cc b/src/heap.cc |
| index 5d1a66e2c9e4d49ffb14101190ffc61ec7d219ed..8875abc6a873189cd77a9be79deda6d39fa674f7 100644 |
| --- a/src/heap.cc |
| +++ b/src/heap.cc |
| @@ -941,6 +941,8 @@ void Heap::Scavenge() { |
| gc_state_ = SCAVENGE; |
| + SwitchScavengingVisitorsTableIfProfilingWasEnabled(); |
| + |
| Page::FlipMeaningOfInvalidatedWatermarkFlag(this); |
| #ifdef DEBUG |
| VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID); |
| @@ -1232,6 +1234,29 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, |
| } |
| +enum LoggingAndProfiling { |
| + LOGGING_AND_PROFILING_ENABLED, |
| + LOGGING_AND_PROFILING_DISABLED |
| +}; |
| + |
| + |
| +typedef void (*ScavengingCallback)(Map* map, |
| + HeapObject** slot, |
| + HeapObject* object); |
| + |
| + |
| +static Atomic32 scavenging_visitors_table_mode_; |
| +static VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_; |
| + |
| + |
| +static inline void DoScavengeObject(Map* map, |
|
Vitaly Repeshko
2011/03/30 15:11:04
This probably needs INLINE to prevent surprises wi
|
| + HeapObject** slot, |
| + HeapObject* obj) { |
| + scavenging_visitors_table_.GetVisitor(map)(map, slot, obj); |
| +} |
| + |
| + |
| +template<LoggingAndProfiling logging_and_profiling_mode> |
| class ScavengingVisitor : public StaticVisitorBase { |
| public: |
| static void Initialize() { |
| @@ -1240,23 +1265,22 @@ class ScavengingVisitor : public StaticVisitorBase { |
| table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); |
| table_.Register(kVisitByteArray, &EvacuateByteArray); |
| table_.Register(kVisitFixedArray, &EvacuateFixedArray); |
| + |
| table_.Register(kVisitGlobalContext, |
| &ObjectEvacuationStrategy<POINTER_OBJECT>:: |
| - VisitSpecialized<Context::kSize>); |
| - |
| - typedef ObjectEvacuationStrategy<POINTER_OBJECT> PointerObject; |
| + template VisitSpecialized<Context::kSize>); |
| table_.Register(kVisitConsString, |
| &ObjectEvacuationStrategy<POINTER_OBJECT>:: |
| - VisitSpecialized<ConsString::kSize>); |
| + template VisitSpecialized<ConsString::kSize>); |
| table_.Register(kVisitSharedFunctionInfo, |
| &ObjectEvacuationStrategy<POINTER_OBJECT>:: |
| - VisitSpecialized<SharedFunctionInfo::kSize>); |
| + template VisitSpecialized<SharedFunctionInfo::kSize>); |
| table_.Register(kVisitJSFunction, |
| &ObjectEvacuationStrategy<POINTER_OBJECT>:: |
| - VisitSpecialized<JSFunction::kSize>); |
| + template VisitSpecialized<JSFunction::kSize>); |
| table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>, |
| kVisitDataObject, |
| @@ -1271,12 +1295,10 @@ class ScavengingVisitor : public StaticVisitorBase { |
| kVisitStructGeneric>(); |
| } |
| - |
| - static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) { |
| - table_.GetVisitor(map)(map, slot, obj); |
| + static VisitorDispatchTable<ScavengingCallback>* GetTable() { |
| + return &table_; |
| } |
| - |
| private: |
| enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; |
| enum SizeRestriction { SMALL, UNKNOWN_SIZE }; |
| @@ -1313,21 +1335,24 @@ class ScavengingVisitor : public StaticVisitorBase { |
| // Set the forwarding address. |
| source->set_map_word(MapWord::FromForwardingAddress(target)); |
| + if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) { |
| #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) |
| - // Update NewSpace stats if necessary. |
| - RecordCopiedObject(heap, target); |
| + // Update NewSpace stats if necessary. |
| + RecordCopiedObject(heap, target); |
| #endif |
| - HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address())); |
| + HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address())); |
| #if defined(ENABLE_LOGGING_AND_PROFILING) |
| - Isolate* isolate = heap->isolate(); |
| - if (isolate->logger()->is_logging() || |
| - isolate->cpu_profiler()->is_profiling()) { |
| - if (target->IsSharedFunctionInfo()) { |
| - PROFILE(isolate, SharedFunctionInfoMoveEvent( |
| - source->address(), target->address())); |
| + Isolate* isolate = heap->isolate(); |
| + if (isolate->logger()->is_logging() || |
| + isolate->cpu_profiler()->is_profiling()) { |
| + if (target->IsSharedFunctionInfo()) { |
| + PROFILE(isolate, SharedFunctionInfoMoveEvent( |
| + source->address(), target->address())); |
| + } |
| } |
| - } |
| #endif |
| + } |
| + |
| return target; |
| } |
| @@ -1443,7 +1468,7 @@ class ScavengingVisitor : public StaticVisitorBase { |
| return; |
| } |
| - Scavenge(first->map(), slot, first); |
| + DoScavengeObject(first->map(), slot, first); |
| object->set_map_word(MapWord::FromForwardingAddress(*slot)); |
| return; |
| } |
| @@ -1470,13 +1495,51 @@ class ScavengingVisitor : public StaticVisitorBase { |
| } |
| }; |
| - typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object); |
| - |
| - static VisitorDispatchTable<Callback> table_; |
| + static VisitorDispatchTable<ScavengingCallback> table_; |
| }; |
| -VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_; |
| +template<LoggingAndProfiling logging_and_profiling_mode> |
| +VisitorDispatchTable<ScavengingCallback> |
| + ScavengingVisitor<logging_and_profiling_mode>::table_; |
| + |
| + |
| +static void InitializeScavengingVisitorsTables() { |
| + ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::Initialize(); |
| + ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::Initialize(); |
| + scavenging_visitors_table_.CopyFrom( |
| + ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::GetTable()); |
| + scavenging_visitors_table_mode_ = LOGGING_AND_PROFILING_DISABLED; |
| +} |
| + |
| + |
| +void Heap::SwitchScavengingVisitorsTableIfProfilingWasEnabled() { |
| + if (scavenging_visitors_table_mode_ == LOGGING_AND_PROFILING_ENABLED) { |
| + // Table was already updated by some isolate. |
| + return; |
| + } |
| + |
| + if (isolate()->logger()->is_logging() || |
| + isolate()->cpu_profiler()->is_profiling() || |
| + (isolate()->heap_profiler() != NULL && |
| + isolate()->heap_profiler()->is_profiling())) { |
| + // If one of the isolates is doing scavenge at this moment of time |
| + // it might see this table in an inconsitent state when |
| + // some of the callbacks point to |
| + // ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED> and others |
| + // to ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>. |
| + // However this does not lead any bugs as such isolate does not have |
|
Vitaly Repeshko
2011/03/30 15:11:04
"lead to"
|
| + // profiling enabled and any isolate with enabled profiling is guaranteed |
| + // to see table it in the consistent state. |
|
Vitaly Repeshko
2011/03/30 15:11:04
"table it" -> "the table"
|
| + scavenging_visitors_table_.CopyFrom( |
| + ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::GetTable()); |
| + |
| + // We use Release_Store to prevent reordering of this write before writes |
| + // to the table. |
| + Release_Store(&scavenging_visitors_table_mode_, |
| + LOGGING_AND_PROFILING_ENABLED); |
| + } |
| +} |
| void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { |
| @@ -1484,7 +1547,7 @@ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { |
| MapWord first_word = object->map_word(); |
| ASSERT(!first_word.IsForwardingAddress()); |
| Map* map = first_word.ToMap(); |
| - ScavengingVisitor::Scavenge(map, p, object); |
| + DoScavengeObject(map, p, object); |
| } |
| @@ -4758,7 +4821,7 @@ bool Heap::Setup(bool create_heap_objects) { |
| static bool initialized_gc = false; |
| if (!initialized_gc) { |
| initialized_gc = true; |
|
Vitaly Repeshko
2011/03/30 15:11:04
nit: Is indentation off here?
|
| - ScavengingVisitor::Initialize(); |
| + InitializeScavengingVisitorsTables(); |
| NewSpaceScavenger::Initialize(); |
| MarkCompactCollector::Initialize(); |
| } |