OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
67 marking_parity_(ODD_MARKING_PARITY), | 67 marking_parity_(ODD_MARKING_PARITY), |
68 compacting_(false), | 68 compacting_(false), |
69 was_marked_incrementally_(false), | 69 was_marked_incrementally_(false), |
70 sweeping_pending_(false), | 70 sweeping_pending_(false), |
71 sequential_sweeping_(false), | 71 sequential_sweeping_(false), |
72 tracer_(NULL), | 72 tracer_(NULL), |
73 migration_slots_buffer_(NULL), | 73 migration_slots_buffer_(NULL), |
74 heap_(NULL), | 74 heap_(NULL), |
75 code_flusher_(NULL), | 75 code_flusher_(NULL), |
76 encountered_weak_collections_(NULL), | 76 encountered_weak_collections_(NULL), |
77 code_to_deoptimize_(NULL) { } | 77 have_code_to_deoptimize_(false) { } |
78 | 78 |
79 #ifdef VERIFY_HEAP | 79 #ifdef VERIFY_HEAP |
80 class VerifyMarkingVisitor: public ObjectVisitor { | 80 class VerifyMarkingVisitor: public ObjectVisitor { |
81 public: | 81 public: |
82 void VisitPointers(Object** start, Object** end) { | 82 void VisitPointers(Object** start, Object** end) { |
83 for (Object** current = start; current < end; current++) { | 83 for (Object** current = start; current < end; current++) { |
84 if ((*current)->IsHeapObject()) { | 84 if ((*current)->IsHeapObject()) { |
85 HeapObject* object = HeapObject::cast(*current); | 85 HeapObject* object = HeapObject::cast(*current); |
86 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); | 86 CHECK(HEAP->mark_compact_collector()->IsMarked(object)); |
87 } | 87 } |
(...skipping 866 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
954 #ifdef DEBUG | 954 #ifdef DEBUG |
955 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); | 955 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); |
956 state_ = IDLE; | 956 state_ = IDLE; |
957 #endif | 957 #endif |
958 // The stub cache is not traversed during GC; clear the cache to | 958 // The stub cache is not traversed during GC; clear the cache to |
959 // force lazy re-initialization of it. This must be done after the | 959 // force lazy re-initialization of it. This must be done after the |
960 // GC, because it relies on the new address of certain old space | 960 // GC, because it relies on the new address of certain old space |
961 // objects (empty string, illegal builtin). | 961 // objects (empty string, illegal builtin). |
962 isolate()->stub_cache()->Clear(); | 962 isolate()->stub_cache()->Clear(); |
963 | 963 |
964 if (code_to_deoptimize_ != Smi::FromInt(0)) { | 964 if (have_code_to_deoptimize_) { |
965 // Convert the linked list of Code objects into a ZoneList. | 965 // Some code objects were marked for deoptimization during the GC. |
966 Zone zone(isolate()); | 966 Deoptimizer::DeoptimizeMarkedCode(isolate()); |
967 ZoneList<Code*> codes(4, &zone); | 967 have_code_to_deoptimize_ = false; |
968 | |
969 Object *list = code_to_deoptimize_; | |
970 while (list->IsCode()) { | |
971 Code *code = Code::cast(list); | |
972 list = code->code_to_deoptimize_link(); | |
973 codes.Add(code, &zone); | |
974 // Destroy the link and don't ever try to deoptimize this code again. | |
975 code->set_code_to_deoptimize_link(Smi::FromInt(0)); | |
976 } | |
977 code_to_deoptimize_ = Smi::FromInt(0); | |
978 | |
979 Deoptimizer::DeoptimizeCodeList(isolate(), &codes); | |
980 } | 968 } |
981 } | 969 } |
982 | 970 |
983 | 971 |
984 // ------------------------------------------------------------------------- | 972 // ------------------------------------------------------------------------- |
985 // Phase 1: tracing and marking live objects. | 973 // Phase 1: tracing and marking live objects. |
986 // before: all objects are in normal state. | 974 // before: all objects are in normal state. |
987 // after: a live object's map pointer is marked as '00'. | 975 // after: a live object's map pointer is marked as '00'. |
988 | 976 |
989 // Marking all live objects in the heap as part of mark-sweep or mark-compact | 977 // Marking all live objects in the heap as part of mark-sweep or mark-compact |
(...skipping 1626 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2616 DependentCode* entries = map->dependent_code(); | 2604 DependentCode* entries = map->dependent_code(); |
2617 DependentCode::GroupStartIndexes starts(entries); | 2605 DependentCode::GroupStartIndexes starts(entries); |
2618 int number_of_entries = starts.number_of_entries(); | 2606 int number_of_entries = starts.number_of_entries(); |
2619 if (number_of_entries == 0) return; | 2607 if (number_of_entries == 0) return; |
2620 for (int i = 0; i < number_of_entries; i++) { | 2608 for (int i = 0; i < number_of_entries; i++) { |
2621 // If the entry is compilation info then the map must be alive, | 2609 // If the entry is compilation info then the map must be alive, |
2622 // and ClearAndDeoptimizeDependentCode shouldn't be called. | 2610 // and ClearAndDeoptimizeDependentCode shouldn't be called. |
2623 ASSERT(entries->is_code_at(i)); | 2611 ASSERT(entries->is_code_at(i)); |
2624 Code* code = entries->code_at(i); | 2612 Code* code = entries->code_at(i); |
2625 | 2613 |
2626 if (IsMarked(code) && !WillBeDeoptimized(code)) { | 2614 if (IsMarked(code) && !code->marked_for_deoptimization()) { |
2627 // Insert the code into the code_to_deoptimize linked list. | 2615 code->set_marked_for_deoptimization(true); |
2628 Object* next = code_to_deoptimize_; | 2616 have_code_to_deoptimize_ = true; |
2629 if (next != Smi::FromInt(0)) { | |
2630 // Record the slot so that it is updated. | |
2631 Object** slot = code->code_to_deoptimize_link_slot(); | |
2632 RecordSlot(slot, slot, next); | |
2633 } | |
2634 code->set_code_to_deoptimize_link(next); | |
2635 code_to_deoptimize_ = code; | |
2636 } | 2617 } |
2637 entries->clear_at(i); | 2618 entries->clear_at(i); |
2638 } | 2619 } |
2639 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); | 2620 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); |
2640 } | 2621 } |
2641 | 2622 |
2642 | 2623 |
2643 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { | 2624 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { |
2644 DisallowHeapAllocation no_allocation; | 2625 DisallowHeapAllocation no_allocation; |
2645 DependentCode::GroupStartIndexes starts(entries); | 2626 DependentCode::GroupStartIndexes starts(entries); |
(...skipping 630 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3276 MarkBit mark_bit = Marking::MarkBitFrom(code); | 3257 MarkBit mark_bit = Marking::MarkBitFrom(code); |
3277 if (Marking::IsWhite(mark_bit)) return; | 3258 if (Marking::IsWhite(mark_bit)) return; |
3278 | 3259 |
3279 invalidated_code_.Add(code); | 3260 invalidated_code_.Add(code); |
3280 } | 3261 } |
3281 } | 3262 } |
3282 | 3263 |
3283 | 3264 |
3284 // Return true if the given code is deoptimized or will be deoptimized. | 3265 // Return true if the given code is deoptimized or will be deoptimized. |
3285 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { | 3266 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { |
3286 // We assume the code_to_deoptimize_link is initialized to undefined. | 3267 return code->marked_for_deoptimization(); |
3287 // If it is 0, or refers to another Code object, then this code | |
3288 // is already linked, or was already linked into the list. | |
3289 return code->code_to_deoptimize_link() != heap()->undefined_value() | |
3290 || code->marked_for_deoptimization(); | |
3291 } | 3268 } |
3292 | 3269 |
3293 | 3270 |
3294 bool MarkCompactCollector::MarkInvalidatedCode() { | 3271 bool MarkCompactCollector::MarkInvalidatedCode() { |
3295 bool code_marked = false; | 3272 bool code_marked = false; |
3296 | 3273 |
3297 int length = invalidated_code_.length(); | 3274 int length = invalidated_code_.length(); |
3298 for (int i = 0; i < length; i++) { | 3275 for (int i = 0; i < length; i++) { |
3299 Code* code = invalidated_code_[i]; | 3276 Code* code = invalidated_code_[i]; |
3300 | 3277 |
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3467 HeapObjectIterator js_global_property_cell_iterator( | 3444 HeapObjectIterator js_global_property_cell_iterator( |
3468 heap_->property_cell_space()); | 3445 heap_->property_cell_space()); |
3469 for (HeapObject* cell = js_global_property_cell_iterator.Next(); | 3446 for (HeapObject* cell = js_global_property_cell_iterator.Next(); |
3470 cell != NULL; | 3447 cell != NULL; |
3471 cell = js_global_property_cell_iterator.Next()) { | 3448 cell = js_global_property_cell_iterator.Next()) { |
3472 if (cell->IsPropertyCell()) { | 3449 if (cell->IsPropertyCell()) { |
3473 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor); | 3450 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor); |
3474 } | 3451 } |
3475 } | 3452 } |
3476 | 3453 |
3477 // Update the heads of the native contexts list the code to deoptimize list. | 3454 // Update the head of the native contexts list in the heap. |
3478 updating_visitor.VisitPointer(heap_->native_contexts_list_address()); | 3455 updating_visitor.VisitPointer(heap_->native_contexts_list_address()); |
3479 updating_visitor.VisitPointer(&code_to_deoptimize_); | |
3480 | 3456 |
3481 heap_->string_table()->Iterate(&updating_visitor); | 3457 heap_->string_table()->Iterate(&updating_visitor); |
3482 | 3458 |
3483 // Update pointers from external string table. | 3459 // Update pointers from external string table. |
3484 heap_->UpdateReferencesInExternalStringTable( | 3460 heap_->UpdateReferencesInExternalStringTable( |
3485 &UpdateReferenceInExternalStringTableEntry); | 3461 &UpdateReferenceInExternalStringTableEntry); |
3486 | 3462 |
3487 if (!FLAG_watch_ic_patching) { | 3463 if (!FLAG_watch_ic_patching) { |
3488 // Update JSFunction pointers from the runtime profiler. | 3464 // Update JSFunction pointers from the runtime profiler. |
3489 heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( | 3465 heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( |
(...skipping 843 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4333 while (buffer != NULL) { | 4309 while (buffer != NULL) { |
4334 SlotsBuffer* next_buffer = buffer->next(); | 4310 SlotsBuffer* next_buffer = buffer->next(); |
4335 DeallocateBuffer(buffer); | 4311 DeallocateBuffer(buffer); |
4336 buffer = next_buffer; | 4312 buffer = next_buffer; |
4337 } | 4313 } |
4338 *buffer_address = NULL; | 4314 *buffer_address = NULL; |
4339 } | 4315 } |
4340 | 4316 |
4341 | 4317 |
4342 } } // namespace v8::internal | 4318 } } // namespace v8::internal |
OLD | NEW |