Chromium Code Reviews| Index: src/mark-compact.cc |
| diff --git a/src/mark-compact.cc b/src/mark-compact.cc |
| index 2704f51f0adf83ed3d90a1de4dcb14ce0e189c4a..75d50bf410e16387534854a59c3676be46736647 100644 |
| --- a/src/mark-compact.cc |
| +++ b/src/mark-compact.cc |
| @@ -857,133 +857,69 @@ void MarkCompactCollector::Finish() { |
| // and continue with marking. This process repeats until all reachable |
| // objects have been marked. |
| -class CodeFlusher { |
| - public: |
| - explicit CodeFlusher(Isolate* isolate) |
| - : isolate_(isolate), |
| - jsfunction_candidates_head_(NULL), |
| - shared_function_info_candidates_head_(NULL) {} |
| - |
| - void AddCandidate(SharedFunctionInfo* shared_info) { |
| - SetNextCandidate(shared_info, shared_function_info_candidates_head_); |
| - shared_function_info_candidates_head_ = shared_info; |
| - } |
| +void CodeFlusher::ProcessJSFunctionCandidates() { |
| + Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); |
| - void AddCandidate(JSFunction* function) { |
| - ASSERT(function->code() == function->shared()->code()); |
| + JSFunction* candidate = jsfunction_candidates_head_; |
| + JSFunction* next_candidate; |
| + while (candidate != NULL) { |
| + next_candidate = GetNextCandidate(candidate); |
| - SetNextCandidate(function, jsfunction_candidates_head_); |
| - jsfunction_candidates_head_ = function; |
| - } |
| - |
| - void ProcessCandidates() { |
| - ProcessSharedFunctionInfoCandidates(); |
| - ProcessJSFunctionCandidates(); |
| - } |
| + SharedFunctionInfo* shared = candidate->shared(); |
| - private: |
| - void ProcessJSFunctionCandidates() { |
| - Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); |
| - |
| - JSFunction* candidate = jsfunction_candidates_head_; |
| - JSFunction* next_candidate; |
| - while (candidate != NULL) { |
| - next_candidate = GetNextCandidate(candidate); |
| - |
| - SharedFunctionInfo* shared = candidate->shared(); |
| - |
| - Code* code = shared->code(); |
| - MarkBit code_mark = Marking::MarkBitFrom(code); |
| - if (!code_mark.Get()) { |
| - shared->set_code(lazy_compile); |
| - candidate->set_code(lazy_compile); |
| - } else { |
| - candidate->set_code(shared->code()); |
| - } |
| - |
| - // We are in the middle of a GC cycle so the write barrier in the code |
| - // setter did not record the slot update and we have to do that manually. |
| - Address slot = candidate->address() + JSFunction::kCodeEntryOffset; |
| - Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); |
| - isolate_->heap()->mark_compact_collector()-> |
| - RecordCodeEntrySlot(slot, target); |
| - |
| - RecordSharedFunctionInfoCodeSlot(shared); |
| - |
| - candidate = next_candidate; |
| + Code* code = shared->code(); |
| + MarkBit code_mark = Marking::MarkBitFrom(code); |
| + if (!code_mark.Get()) { |
| + shared->set_code(lazy_compile); |
| + candidate->set_code(lazy_compile); |
| + } else { |
| + candidate->set_code(shared->code()); |
| } |
| - jsfunction_candidates_head_ = NULL; |
| - } |
| - |
| - |
| - void ProcessSharedFunctionInfoCandidates() { |
| - Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); |
| - |
| - SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
| - SharedFunctionInfo* next_candidate; |
| - while (candidate != NULL) { |
| - next_candidate = GetNextCandidate(candidate); |
| - SetNextCandidate(candidate, NULL); |
| - |
| - Code* code = candidate->code(); |
| - MarkBit code_mark = Marking::MarkBitFrom(code); |
| - if (!code_mark.Get()) { |
| - candidate->set_code(lazy_compile); |
| - } |
| - |
| - RecordSharedFunctionInfoCodeSlot(candidate); |
| + // We are in the middle of a GC cycle so the write barrier in the code |
| + // setter did not record the slot update and we have to do that manually. |
| + Address slot = candidate->address() + JSFunction::kCodeEntryOffset; |
| + Code* target = Code::cast(Code::GetObjectFromEntryAddress(slot)); |
| + isolate_->heap()->mark_compact_collector()-> |
| + RecordCodeEntrySlot(slot, target); |
| - candidate = next_candidate; |
| - } |
| + Object** shared_code_slot = |
| + HeapObject::RawField(shared, SharedFunctionInfo::kCodeOffset); |
| + isolate_->heap()->mark_compact_collector()-> |
| + RecordSlot(shared_code_slot, shared_code_slot, *shared_code_slot); |
| - shared_function_info_candidates_head_ = NULL; |
| + candidate = next_candidate; |
| } |
| - void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) { |
| - Object** slot = HeapObject::RawField(shared, |
| - SharedFunctionInfo::kCodeOffset); |
| - isolate_->heap()->mark_compact_collector()-> |
| - RecordSlot(slot, slot, HeapObject::cast(*slot)); |
| - } |
| + jsfunction_candidates_head_ = NULL; |
| +} |
| - static JSFunction** GetNextCandidateField(JSFunction* candidate) { |
| - return reinterpret_cast<JSFunction**>( |
| - candidate->address() + JSFunction::kCodeEntryOffset); |
| - } |
| - static JSFunction* GetNextCandidate(JSFunction* candidate) { |
| - return *GetNextCandidateField(candidate); |
| - } |
| +void CodeFlusher::ProcessSharedFunctionInfoCandidates() { |
| + Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile); |
| - static void SetNextCandidate(JSFunction* candidate, |
| - JSFunction* next_candidate) { |
| - *GetNextCandidateField(candidate) = next_candidate; |
| - } |
| + SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
| + SharedFunctionInfo* next_candidate; |
| + while (candidate != NULL) { |
| + next_candidate = GetNextCandidate(candidate); |
| + SetNextCandidate(candidate, NULL); |
| - static SharedFunctionInfo** GetNextCandidateField( |
| - SharedFunctionInfo* candidate) { |
| Code* code = candidate->code(); |
| - return reinterpret_cast<SharedFunctionInfo**>( |
| - code->address() + Code::kGCMetadataOffset); |
| - } |
| + MarkBit code_mark = Marking::MarkBitFrom(code); |
| + if (!code_mark.Get()) { |
| + candidate->set_code(lazy_compile); |
| + } |
| - static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) { |
| - return reinterpret_cast<SharedFunctionInfo*>( |
| - candidate->code()->gc_metadata()); |
| - } |
| + Object** code_slot = |
| + HeapObject::RawField(candidate, SharedFunctionInfo::kCodeOffset); |
| + isolate_->heap()->mark_compact_collector()-> |
| + RecordSlot(code_slot, code_slot, *code_slot); |
| - static void SetNextCandidate(SharedFunctionInfo* candidate, |
| - SharedFunctionInfo* next_candidate) { |
| - candidate->code()->set_gc_metadata(next_candidate); |
| + candidate = next_candidate; |
| } |
| - Isolate* isolate_; |
| - JSFunction* jsfunction_candidates_head_; |
| - SharedFunctionInfo* shared_function_info_candidates_head_; |
| - |
| - DISALLOW_COPY_AND_ASSIGN(CodeFlusher); |
| -}; |
| + shared_function_info_candidates_head_ = NULL; |
| +} |
| MarkCompactCollector::~MarkCompactCollector() { |
| @@ -1163,6 +1099,11 @@ class MarkCompactMarkingVisitor |
| return true; |
| } |
| + INLINE(static void BeforeVisitingSharedFunctionInfo(HeapObject* object)) { |
| + SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); |
| + shared->BeforeVisitingPointers(); |
| + } |
| + |
| static void VisitJSWeakMap(Map* map, HeapObject* object) { |
| MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); |
| JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object); |
| @@ -1206,123 +1147,8 @@ class MarkCompactMarkingVisitor |
| // Code flushing support. |
| - // How many collections newly compiled code object will survive before being |
| - // flushed. |
| - static const int kCodeAgeThreshold = 5; |
| - |
| static const int kRegExpCodeThreshold = 5; |
| - inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { |
| - Object* undefined = heap->undefined_value(); |
| - return (info->script() != undefined) && |
| - (reinterpret_cast<Script*>(info->script())->source() != undefined); |
| - } |
| - |
| - |
| - inline static bool IsCompiled(JSFunction* function) { |
| - return function->code() != |
| - function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); |
| - } |
| - |
| - inline static bool IsCompiled(SharedFunctionInfo* function) { |
| - return function->code() != |
| - function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); |
| - } |
| - |
| - inline static bool IsFlushable(Heap* heap, JSFunction* function) { |
| - SharedFunctionInfo* shared_info = function->unchecked_shared(); |
| - |
| - // Code is either on stack, in compilation cache or referenced |
| - // by optimized version of function. |
| - MarkBit code_mark = Marking::MarkBitFrom(function->code()); |
| - if (code_mark.Get()) { |
| - if (!Marking::MarkBitFrom(shared_info).Get()) { |
| - shared_info->set_code_age(0); |
| - } |
| - return false; |
| - } |
| - |
| - // We do not flush code for optimized functions. |
| - if (function->code() != shared_info->code()) { |
| - return false; |
| - } |
| - |
| - return IsFlushable(heap, shared_info); |
| - } |
| - |
| - inline static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info) { |
| - // Code is either on stack, in compilation cache or referenced |
| - // by optimized version of function. |
| - MarkBit code_mark = |
| - Marking::MarkBitFrom(shared_info->code()); |
| - if (code_mark.Get()) { |
| - return false; |
| - } |
| - |
| - // The function must be compiled and have the source code available, |
| - // to be able to recompile it in case we need the function again. |
| - if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) { |
| - return false; |
| - } |
| - |
| - // We never flush code for Api functions. |
| - Object* function_data = shared_info->function_data(); |
| - if (function_data->IsFunctionTemplateInfo()) { |
| - return false; |
| - } |
| - |
| - // Only flush code for functions. |
| - if (shared_info->code()->kind() != Code::FUNCTION) { |
| - return false; |
| - } |
| - |
| - // Function must be lazy compilable. |
| - if (!shared_info->allows_lazy_compilation()) { |
| - return false; |
| - } |
| - |
| - // If this is a full script wrapped in a function we do no flush the code. |
| - if (shared_info->is_toplevel()) { |
| - return false; |
| - } |
| - |
| - // Age this shared function info. |
| - if (shared_info->code_age() < kCodeAgeThreshold) { |
| - shared_info->set_code_age(shared_info->code_age() + 1); |
| - return false; |
| - } |
| - |
| - return true; |
| - } |
| - |
| - |
| - static bool FlushCodeForFunction(Heap* heap, JSFunction* function) { |
| - if (!IsFlushable(heap, function)) return false; |
| - |
| - // This function's code looks flushable. But we have to postpone the |
| - // decision until we see all functions that point to the same |
| - // SharedFunctionInfo because some of them might be optimized. |
| - // That would make the nonoptimized version of the code nonflushable, |
| - // because it is required for bailing out from optimized code. |
| - heap->mark_compact_collector()->code_flusher()->AddCandidate(function); |
| - return true; |
| - } |
| - |
| - static inline bool IsValidNotBuiltinContext(Object* ctx) { |
| - return ctx->IsContext() && |
| - !Context::cast(ctx)->global_object()->IsJSBuiltinsObject(); |
| - } |
| - |
| - |
| - static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) { |
| - SharedFunctionInfo::cast(object)->BeforeVisitingPointers(); |
| - |
| - FixedBodyVisitor<MarkCompactMarkingVisitor, |
| - SharedFunctionInfo::BodyDescriptor, |
| - void>::Visit(map, object); |
| - } |
| - |
| - |
| static void UpdateRegExpCodeAgeAndFlush(Heap* heap, |
| JSRegExp* re, |
| bool is_ascii) { |
| @@ -1375,7 +1201,6 @@ class MarkCompactMarkingVisitor |
| } |
| } |
|
ulan
2012/10/12 11:48:32
This line seems to be accidentally removed.
Michael Starzinger
2012/10/12 12:16:37
Done.
|
| - |
| // Works by setting the current sweep_generation (as a smi) in the |
| // code object place in the data array of the RegExp and keeps a copy |
| // around that can be reinstated if we reuse the RegExp before flushing. |
| @@ -1396,143 +1221,6 @@ class MarkCompactMarkingVisitor |
| VisitJSRegExp(map, object); |
| } |
| - |
| - static void VisitSharedFunctionInfoAndFlushCode(Map* map, |
| - HeapObject* object) { |
| - Heap* heap = map->GetHeap(); |
| - SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); |
| - if (shared->ic_age() != heap->global_ic_age()) { |
| - shared->ResetForNewContext(heap->global_ic_age()); |
| - } |
| - |
| - MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); |
| - if (!collector->is_code_flushing_enabled()) { |
| - VisitSharedFunctionInfoGeneric(map, object); |
| - return; |
| - } |
| - VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false); |
| - } |
| - |
| - |
| - static void VisitSharedFunctionInfoAndFlushCodeGeneric( |
| - Map* map, HeapObject* object, bool known_flush_code_candidate) { |
| - Heap* heap = map->GetHeap(); |
| - SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); |
| - |
| - shared->BeforeVisitingPointers(); |
| - |
| - if (!known_flush_code_candidate) { |
| - known_flush_code_candidate = IsFlushable(heap, shared); |
| - if (known_flush_code_candidate) { |
| - heap->mark_compact_collector()->code_flusher()->AddCandidate(shared); |
| - } |
| - } |
| - |
| - VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate); |
| - } |
| - |
| - |
| - static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) { |
| - Heap* heap = map->GetHeap(); |
| - MarkCompactCollector* collector = heap->mark_compact_collector(); |
| - if (!collector->is_code_flushing_enabled()) { |
| - VisitJSFunction(map, object); |
| - return; |
| - } |
| - |
| - JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object); |
| - // The function must have a valid context and not be a builtin. |
| - bool flush_code_candidate = false; |
| - if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) { |
| - flush_code_candidate = FlushCodeForFunction(heap, jsfunction); |
| - } |
| - |
| - if (!flush_code_candidate) { |
| - Code* code = jsfunction->shared()->code(); |
| - MarkBit code_mark = Marking::MarkBitFrom(code); |
| - collector->MarkObject(code, code_mark); |
| - |
| - if (jsfunction->code()->kind() == Code::OPTIMIZED_FUNCTION) { |
| - collector->MarkInlinedFunctionsCode(jsfunction->code()); |
| - } |
| - } |
| - |
| - VisitJSFunctionFields(map, |
| - reinterpret_cast<JSFunction*>(object), |
| - flush_code_candidate); |
| - } |
| - |
| - |
| - static void VisitJSFunction(Map* map, HeapObject* object) { |
| - VisitJSFunctionFields(map, |
| - reinterpret_cast<JSFunction*>(object), |
| - false); |
| - } |
| - |
| - |
| - static inline void VisitJSFunctionFields(Map* map, |
| - JSFunction* object, |
| - bool flush_code_candidate) { |
| - Heap* heap = map->GetHeap(); |
| - |
| - Object** start_slot = |
| - HeapObject::RawField(object, JSFunction::kPropertiesOffset); |
| - Object** end_slot = |
| - HeapObject::RawField(object, JSFunction::kCodeEntryOffset); |
| - VisitPointers(heap, start_slot, start_slot, end_slot); |
| - |
| - if (!flush_code_candidate) { |
| - VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); |
| - } else { |
| - // Don't visit code object. |
| - |
| - // Visit shared function info to avoid double checking of its |
| - // flushability. |
| - SharedFunctionInfo* shared_info = object->unchecked_shared(); |
| - MarkBit shared_info_mark = Marking::MarkBitFrom(shared_info); |
| - if (!shared_info_mark.Get()) { |
| - Map* shared_info_map = shared_info->map(); |
| - MarkBit shared_info_map_mark = |
| - Marking::MarkBitFrom(shared_info_map); |
| - heap->mark_compact_collector()->SetMark(shared_info, shared_info_mark); |
| - heap->mark_compact_collector()->MarkObject(shared_info_map, |
| - shared_info_map_mark); |
| - VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map, |
| - shared_info, |
| - true); |
| - } |
| - } |
| - |
| - start_slot = |
| - HeapObject::RawField(object, |
| - JSFunction::kCodeEntryOffset + kPointerSize); |
| - end_slot = |
| - HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); |
| - VisitPointers(heap, start_slot, start_slot, end_slot); |
| - } |
| - |
| - |
| - static void VisitSharedFunctionInfoFields(Heap* heap, |
| - HeapObject* object, |
| - bool flush_code_candidate) { |
| - VisitPointer(heap, |
| - HeapObject::RawField(object, SharedFunctionInfo::kNameOffset)); |
| - |
| - if (!flush_code_candidate) { |
| - VisitPointer(heap, |
| - HeapObject::RawField(object, |
| - SharedFunctionInfo::kCodeOffset)); |
| - } |
| - |
| - Object** start_slot = |
| - HeapObject::RawField(object, |
| - SharedFunctionInfo::kOptimizedCodeMapOffset); |
| - Object** end_slot = |
| - HeapObject::RawField(object, SharedFunctionInfo::kSize); |
| - |
| - VisitPointers(heap, start_slot, start_slot, end_slot); |
| - } |
| - |
| static VisitorDispatchTable<Callback> non_count_table_; |
| }; |
| @@ -1690,12 +1378,6 @@ class MarkCompactMarkingVisitor::ObjectStatsTracker< |
| void MarkCompactMarkingVisitor::Initialize() { |
| StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize(); |
| - table_.Register(kVisitSharedFunctionInfo, |
| - &VisitSharedFunctionInfoAndFlushCode); |
| - |
| - table_.Register(kVisitJSFunction, |
| - &VisitJSFunctionAndFlushCode); |
| - |
| table_.Register(kVisitJSRegExp, |
| &VisitRegExpAndFlushCode); |
| @@ -1774,26 +1456,6 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { |
| }; |
| -void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) { |
| - // For optimized functions we should retain both non-optimized version |
| - // of its code and non-optimized version of all inlined functions. |
| - // This is required to support bailing out from inlined code. |
| - DeoptimizationInputData* data = |
| - DeoptimizationInputData::cast(code->deoptimization_data()); |
| - |
| - FixedArray* literals = data->LiteralArray(); |
| - |
| - for (int i = 0, count = data->InlinedFunctionCount()->value(); |
| - i < count; |
| - i++) { |
| - JSFunction* inlined = JSFunction::cast(literals->get(i)); |
| - Code* inlined_code = inlined->shared()->code(); |
| - MarkBit inlined_code_mark = Marking::MarkBitFrom(inlined_code); |
| - MarkObject(inlined_code, inlined_code_mark); |
| - } |
| -} |
| - |
| - |
| void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate, |
| ThreadLocalTop* top) { |
| for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) { |
| @@ -1806,7 +1468,8 @@ void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate, |
| MarkBit code_mark = Marking::MarkBitFrom(code); |
| MarkObject(code, code_mark); |
| if (frame->is_optimized()) { |
| - MarkInlinedFunctionsCode(frame->LookupCode()); |
| + MarkCompactMarkingVisitor::MarkInlinedFunctionsCode(heap(), |
| + frame->LookupCode()); |
| } |
| } |
| } |