OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
(...skipping 367 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
378 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); | 378 CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); |
379 } | 379 } |
380 } | 380 } |
381 | 381 |
382 | 382 |
383 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { | 383 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() { |
384 HeapObjectIterator code_iterator(heap()->code_space()); | 384 HeapObjectIterator code_iterator(heap()->code_space()); |
385 for (HeapObject* obj = code_iterator.Next(); obj != NULL; | 385 for (HeapObject* obj = code_iterator.Next(); obj != NULL; |
386 obj = code_iterator.Next()) { | 386 obj = code_iterator.Next()) { |
387 Code* code = Code::cast(obj); | 387 Code* code = Code::cast(obj); |
388 if (!code->is_optimized_code() && !code->is_weak_stub()) continue; | 388 if (!code->is_optimized_code()) continue; |
389 if (WillBeDeoptimized(code)) continue; | 389 if (WillBeDeoptimized(code)) continue; |
390 code->VerifyEmbeddedObjectsDependency(); | 390 code->VerifyEmbeddedObjectsDependency(); |
391 } | 391 } |
392 } | 392 } |
393 | 393 |
394 | 394 |
395 void MarkCompactCollector::VerifyOmittedMapChecks() { | 395 void MarkCompactCollector::VerifyOmittedMapChecks() { |
396 HeapObjectIterator iterator(heap()->map_space()); | 396 HeapObjectIterator iterator(heap()->map_space()); |
397 for (HeapObject* obj = iterator.Next(); obj != NULL; obj = iterator.Next()) { | 397 for (HeapObject* obj = iterator.Next(); obj != NULL; obj = iterator.Next()) { |
398 Map* map = Map::cast(obj); | 398 Map* map = Map::cast(obj); |
(...skipping 2139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2538 if (to_trim <= 0) return; | 2538 if (to_trim <= 0) return; |
2539 heap_->RightTrimFixedArray<Heap::FROM_GC>(descriptors->GetEnumCache(), | 2539 heap_->RightTrimFixedArray<Heap::FROM_GC>(descriptors->GetEnumCache(), |
2540 to_trim); | 2540 to_trim); |
2541 | 2541 |
2542 if (!descriptors->HasEnumIndicesCache()) return; | 2542 if (!descriptors->HasEnumIndicesCache()) return; |
2543 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); | 2543 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); |
2544 heap_->RightTrimFixedArray<Heap::FROM_GC>(enum_indices_cache, to_trim); | 2544 heap_->RightTrimFixedArray<Heap::FROM_GC>(enum_indices_cache, to_trim); |
2545 } | 2545 } |
2546 | 2546 |
2547 | 2547 |
2548 void MarkCompactCollector::ClearDependentICList(Object* head) { | |
2549 Object* current = head; | |
2550 Object* undefined = heap()->undefined_value(); | |
2551 while (current != undefined) { | |
2552 Code* code = Code::cast(current); | |
2553 if (IsMarked(code)) { | |
2554 DCHECK(code->is_weak_stub()); | |
2555 IC::InvalidateMaps(code); | |
2556 } | |
2557 current = code->next_code_link(); | |
2558 code->set_next_code_link(undefined); | |
2559 } | |
2560 } | |
2561 | |
2562 | |
2563 void MarkCompactCollector::ClearDependentCode(DependentCode* entries) { | 2548 void MarkCompactCollector::ClearDependentCode(DependentCode* entries) { |
2564 DisallowHeapAllocation no_allocation; | 2549 DisallowHeapAllocation no_allocation; |
2565 DependentCode::GroupStartIndexes starts(entries); | 2550 DependentCode::GroupStartIndexes starts(entries); |
2566 int number_of_entries = starts.number_of_entries(); | 2551 int number_of_entries = starts.number_of_entries(); |
2567 if (number_of_entries == 0) return; | 2552 if (number_of_entries == 0) return; |
2568 int g = DependentCode::kWeakICGroup; | 2553 int g = DependentCode::kWeakCodeGroup; |
2569 if (starts.at(g) != starts.at(g + 1)) { | |
2570 int i = starts.at(g); | |
2571 DCHECK(i + 1 == starts.at(g + 1)); | |
2572 Object* head = entries->object_at(i); | |
2573 ClearDependentICList(head); | |
2574 } | |
2575 g = DependentCode::kWeakCodeGroup; | |
2576 for (int i = starts.at(g); i < starts.at(g + 1); i++) { | 2554 for (int i = starts.at(g); i < starts.at(g + 1); i++) { |
2577 // If the entry is compilation info then the map must be alive, | 2555 // If the entry is compilation info then the map must be alive, |
2578 // and ClearDependentCode shouldn't be called. | 2556 // and ClearDependentCode shouldn't be called. |
2579 DCHECK(entries->is_code_at(i)); | 2557 DCHECK(entries->is_code_at(i)); |
2580 Code* code = entries->code_at(i); | 2558 Code* code = entries->code_at(i); |
2581 if (IsMarked(code) && !code->marked_for_deoptimization()) { | 2559 if (IsMarked(code) && !code->marked_for_deoptimization()) { |
2582 DependentCode::SetMarkedForDeoptimization( | 2560 DependentCode::SetMarkedForDeoptimization( |
2583 code, static_cast<DependentCode::DependencyGroup>(g)); | 2561 code, static_cast<DependentCode::DependencyGroup>(g)); |
2584 code->InvalidateEmbeddedObjects(); | 2562 code->InvalidateEmbeddedObjects(); |
2585 have_code_to_deoptimize_ = true; | 2563 have_code_to_deoptimize_ = true; |
2586 } | 2564 } |
2587 } | 2565 } |
2588 for (int i = 0; i < number_of_entries; i++) { | 2566 for (int i = 0; i < number_of_entries; i++) { |
2589 entries->clear_at(i); | 2567 entries->clear_at(i); |
2590 } | 2568 } |
2591 } | 2569 } |
2592 | 2570 |
2593 | 2571 |
2594 int MarkCompactCollector::ClearNonLiveDependentCodeInGroup( | 2572 int MarkCompactCollector::ClearNonLiveDependentCodeInGroup( |
2595 DependentCode* entries, int group, int start, int end, int new_start) { | 2573 DependentCode* entries, int group, int start, int end, int new_start) { |
2596 int survived = 0; | 2574 int survived = 0; |
2597 if (group == DependentCode::kWeakICGroup) { | 2575 for (int i = start; i < end; i++) { |
2598 // Dependent weak IC stubs form a linked list and only the head is stored | 2576 Object* obj = entries->object_at(i); |
2599 // in the dependent code array. | 2577 DCHECK(obj->IsCode() || IsMarked(obj)); |
2600 if (start != end) { | 2578 if (IsMarked(obj) && |
2601 DCHECK(start + 1 == end); | 2579 (!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) { |
2602 Object* old_head = entries->object_at(start); | 2580 if (new_start + survived != i) { |
2603 MarkCompactWeakObjectRetainer retainer; | 2581 entries->set_object_at(new_start + survived, obj); |
2604 Object* head = VisitWeakList<Code>(heap(), old_head, &retainer); | |
2605 entries->set_object_at(new_start, head); | |
2606 Object** slot = entries->slot_at(new_start); | |
2607 RecordSlot(slot, slot, head); | |
2608 // We do not compact this group even if the head is undefined, | |
2609 // more dependent ICs are likely to be added later. | |
2610 survived = 1; | |
2611 } | |
2612 } else { | |
2613 for (int i = start; i < end; i++) { | |
2614 Object* obj = entries->object_at(i); | |
2615 DCHECK(obj->IsCode() || IsMarked(obj)); | |
2616 if (IsMarked(obj) && | |
2617 (!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) { | |
2618 if (new_start + survived != i) { | |
2619 entries->set_object_at(new_start + survived, obj); | |
2620 } | |
2621 Object** slot = entries->slot_at(new_start + survived); | |
2622 RecordSlot(slot, slot, obj); | |
2623 survived++; | |
2624 } | 2582 } |
| 2583 Object** slot = entries->slot_at(new_start + survived); |
| 2584 RecordSlot(slot, slot, obj); |
| 2585 survived++; |
2625 } | 2586 } |
2626 } | 2587 } |
2627 entries->set_number_of_entries( | 2588 entries->set_number_of_entries( |
2628 static_cast<DependentCode::DependencyGroup>(group), survived); | 2589 static_cast<DependentCode::DependencyGroup>(group), survived); |
2629 return survived; | 2590 return survived; |
2630 } | 2591 } |
2631 | 2592 |
2632 | 2593 |
2633 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { | 2594 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) { |
2634 DisallowHeapAllocation no_allocation; | 2595 DisallowHeapAllocation no_allocation; |
(...skipping 1807 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4442 SlotsBuffer* buffer = *buffer_address; | 4403 SlotsBuffer* buffer = *buffer_address; |
4443 while (buffer != NULL) { | 4404 while (buffer != NULL) { |
4444 SlotsBuffer* next_buffer = buffer->next(); | 4405 SlotsBuffer* next_buffer = buffer->next(); |
4445 DeallocateBuffer(buffer); | 4406 DeallocateBuffer(buffer); |
4446 buffer = next_buffer; | 4407 buffer = next_buffer; |
4447 } | 4408 } |
4448 *buffer_address = NULL; | 4409 *buffer_address = NULL; |
4449 } | 4410 } |
4450 } | 4411 } |
4451 } // namespace v8::internal | 4412 } // namespace v8::internal |
OLD | NEW |