| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/ast/scopeinfo.h" | 9 #include "src/ast/scopeinfo.h" |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 3099 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3110 const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize; | 3110 const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize; |
| 3111 const int bytes_to_trim = elements_to_trim * element_size; | 3111 const int bytes_to_trim = elements_to_trim * element_size; |
| 3112 Map* map = object->map(); | 3112 Map* map = object->map(); |
| 3113 | 3113 |
| 3114 // For now this trick is only applied to objects in new and paged space. | 3114 // For now this trick is only applied to objects in new and paged space. |
| 3115 // In large object space the object's start must coincide with chunk | 3115 // In large object space the object's start must coincide with chunk |
| 3116 // and thus the trick is just not applicable. | 3116 // and thus the trick is just not applicable. |
| 3117 DCHECK(!lo_space()->Contains(object)); | 3117 DCHECK(!lo_space()->Contains(object)); |
| 3118 DCHECK(object->map() != fixed_cow_array_map()); | 3118 DCHECK(object->map() != fixed_cow_array_map()); |
| 3119 | 3119 |
| 3120 // Ensure that the no handle-scope has more than one pointer to the same | |
| 3121 // backing-store. | |
| 3122 SLOW_DCHECK(CountHandlesForObject(object) <= 1); | |
| 3123 | |
| 3124 STATIC_ASSERT(FixedArrayBase::kMapOffset == 0); | 3120 STATIC_ASSERT(FixedArrayBase::kMapOffset == 0); |
| 3125 STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize); | 3121 STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize); |
| 3126 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize); | 3122 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize); |
| 3127 | 3123 |
| 3128 const int len = object->length(); | 3124 const int len = object->length(); |
| 3129 DCHECK(elements_to_trim <= len); | 3125 DCHECK(elements_to_trim <= len); |
| 3130 | 3126 |
| 3131 // Calculate location of new array start. | 3127 // Calculate location of new array start. |
| 3132 Address new_start = object->address() + bytes_to_trim; | 3128 Address new_start = object->address() + bytes_to_trim; |
| 3133 | 3129 |
| (...skipping 1622 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4756 } | 4752 } |
| 4757 | 4753 |
| 4758 | 4754 |
| 4759 void Heap::IterateSmiRoots(ObjectVisitor* v) { | 4755 void Heap::IterateSmiRoots(ObjectVisitor* v) { |
| 4760 // Acquire execution access since we are going to read stack limit values. | 4756 // Acquire execution access since we are going to read stack limit values. |
| 4761 ExecutionAccess access(isolate()); | 4757 ExecutionAccess access(isolate()); |
| 4762 v->VisitPointers(&roots_[kSmiRootsStart], &roots_[kRootListLength]); | 4758 v->VisitPointers(&roots_[kSmiRootsStart], &roots_[kRootListLength]); |
| 4763 v->Synchronize(VisitorSynchronization::kSmiRootList); | 4759 v->Synchronize(VisitorSynchronization::kSmiRootList); |
| 4764 } | 4760 } |
| 4765 | 4761 |
| 4762 // We cannot avoid stale handles to left-trimmed objects, but can only make |
| 4763 // sure all handles still needed are updated. Filter out a stale pointer |
| 4764 // and clear the slot to allow post processing of handles (needed because |
| 4765 // the sweeper might actually free the underlying page). |
| 4766 class FixStaleLeftTrimmedHandlesVisitor : public ObjectVisitor { |
| 4767 public: |
| 4768 explicit FixStaleLeftTrimmedHandlesVisitor(Heap* heap) : heap_(heap) { |
| 4769 USE(heap_); |
| 4770 } |
| 4771 |
| 4772 void VisitPointer(Object** p) override { FixHandle(p); } |
| 4773 |
| 4774 void VisitPointers(Object** start, Object** end) override { |
| 4775 for (Object** p = start; p < end; p++) FixHandle(p); |
| 4776 } |
| 4777 |
| 4778 private: |
| 4779 inline void FixHandle(Object** p) { |
| 4780 HeapObject* current = reinterpret_cast<HeapObject*>(*p); |
| 4781 if (!current->IsHeapObject()) return; |
| 4782 const MapWord map_word = current->map_word(); |
| 4783 if (!map_word.IsForwardingAddress() && current->IsFiller()) { |
| 4784 #ifdef DEBUG |
| 4785 // We need to find a FixedArrayBase map after walking the fillers. |
| 4786 while (current->IsFiller()) { |
| 4787 Address next = reinterpret_cast<Address>(current); |
| 4788 if (current->map() == heap_->one_pointer_filler_map()) { |
| 4789 next += kPointerSize; |
| 4790 } else if (current->map() == heap_->two_pointer_filler_map()) { |
| 4791 next += 2 * kPointerSize; |
| 4792 } else { |
| 4793 next += current->Size(); |
| 4794 } |
| 4795 current = reinterpret_cast<HeapObject*>(next); |
| 4796 } |
| 4797 DCHECK(current->IsFixedArrayBase()); |
| 4798 #endif // DEBUG |
| 4799 *p = nullptr; |
| 4800 } |
| 4801 } |
| 4802 |
| 4803 Heap* heap_; |
| 4804 }; |
| 4766 | 4805 |
| 4767 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { | 4806 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { |
| 4768 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); | 4807 v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); |
| 4769 v->Synchronize(VisitorSynchronization::kStrongRootList); | 4808 v->Synchronize(VisitorSynchronization::kStrongRootList); |
| 4770 // The serializer/deserializer iterates the root list twice, first to pick | 4809 // The serializer/deserializer iterates the root list twice, first to pick |
| 4771 // off immortal immovable roots to make sure they end up on the first page, | 4810 // off immortal immovable roots to make sure they end up on the first page, |
| 4772 // and then again for the rest. | 4811 // and then again for the rest. |
| 4773 if (mode == VISIT_ONLY_STRONG_ROOT_LIST) return; | 4812 if (mode == VISIT_ONLY_STRONG_ROOT_LIST) return; |
| 4774 | 4813 |
| 4775 isolate_->bootstrapper()->Iterate(v); | 4814 isolate_->bootstrapper()->Iterate(v); |
| 4776 v->Synchronize(VisitorSynchronization::kBootstrapper); | 4815 v->Synchronize(VisitorSynchronization::kBootstrapper); |
| 4777 isolate_->Iterate(v); | 4816 isolate_->Iterate(v); |
| 4778 v->Synchronize(VisitorSynchronization::kTop); | 4817 v->Synchronize(VisitorSynchronization::kTop); |
| 4779 Relocatable::Iterate(isolate_, v); | 4818 Relocatable::Iterate(isolate_, v); |
| 4780 v->Synchronize(VisitorSynchronization::kRelocatable); | 4819 v->Synchronize(VisitorSynchronization::kRelocatable); |
| 4781 | 4820 |
| 4782 isolate_->compilation_cache()->Iterate(v); | 4821 isolate_->compilation_cache()->Iterate(v); |
| 4783 v->Synchronize(VisitorSynchronization::kCompilationCache); | 4822 v->Synchronize(VisitorSynchronization::kCompilationCache); |
| 4784 | 4823 |
| 4785 // Iterate over local handles in handle scopes. | 4824 // Iterate over local handles in handle scopes. |
| 4825 FixStaleLeftTrimmedHandlesVisitor left_trim_visitor(this); |
| 4826 isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor); |
| 4786 isolate_->handle_scope_implementer()->Iterate(v); | 4827 isolate_->handle_scope_implementer()->Iterate(v); |
| 4787 isolate_->IterateDeferredHandles(v); | 4828 isolate_->IterateDeferredHandles(v); |
| 4788 v->Synchronize(VisitorSynchronization::kHandleScope); | 4829 v->Synchronize(VisitorSynchronization::kHandleScope); |
| 4789 | 4830 |
| 4790 // Iterate over the builtin code objects and code stubs in the | 4831 // Iterate over the builtin code objects and code stubs in the |
| 4791 // heap. Note that it is not necessary to iterate over code objects | 4832 // heap. Note that it is not necessary to iterate over code objects |
| 4792 // on scavenge collections. | 4833 // on scavenge collections. |
| 4793 if (mode != VISIT_ALL_IN_SCAVENGE) { | 4834 if (mode != VISIT_ALL_IN_SCAVENGE) { |
| 4794 isolate_->builtins()->IterateBuiltins(v); | 4835 isolate_->builtins()->IterateBuiltins(v); |
| 4795 v->Synchronize(VisitorSynchronization::kBuiltins); | 4836 v->Synchronize(VisitorSynchronization::kBuiltins); |
| (...skipping 829 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5625 | 5666 |
| 5626 | 5667 |
| 5627 void Heap::PrintHandles() { | 5668 void Heap::PrintHandles() { |
| 5628 PrintF("Handles:\n"); | 5669 PrintF("Handles:\n"); |
| 5629 PrintHandleVisitor v; | 5670 PrintHandleVisitor v; |
| 5630 isolate_->handle_scope_implementer()->Iterate(&v); | 5671 isolate_->handle_scope_implementer()->Iterate(&v); |
| 5631 } | 5672 } |
| 5632 | 5673 |
| 5633 #endif | 5674 #endif |
| 5634 | 5675 |
| 5635 #ifdef ENABLE_SLOW_DCHECKS | |
| 5636 | |
| 5637 class CountHandleVisitor : public ObjectVisitor { | |
| 5638 public: | |
| 5639 explicit CountHandleVisitor(Object* object) : object_(object) {} | |
| 5640 | |
| 5641 void VisitPointers(Object** start, Object** end) override { | |
| 5642 for (Object** p = start; p < end; p++) { | |
| 5643 if (object_ == reinterpret_cast<Object*>(*p)) count_++; | |
| 5644 } | |
| 5645 } | |
| 5646 | |
| 5647 int count() { return count_; } | |
| 5648 | |
| 5649 private: | |
| 5650 Object* object_; | |
| 5651 int count_ = 0; | |
| 5652 }; | |
| 5653 | |
| 5654 int Heap::CountHandlesForObject(Object* object) { | |
| 5655 CountHandleVisitor v(object); | |
| 5656 isolate_->handle_scope_implementer()->Iterate(&v); | |
| 5657 return v.count(); | |
| 5658 } | |
| 5659 #endif | |
| 5660 | |
| 5661 class CheckHandleCountVisitor : public ObjectVisitor { | 5676 class CheckHandleCountVisitor : public ObjectVisitor { |
| 5662 public: | 5677 public: |
| 5663 CheckHandleCountVisitor() : handle_count_(0) {} | 5678 CheckHandleCountVisitor() : handle_count_(0) {} |
| 5664 ~CheckHandleCountVisitor() override { | 5679 ~CheckHandleCountVisitor() override { |
| 5665 CHECK(handle_count_ < HandleScope::kCheckHandleThreshold); | 5680 CHECK(handle_count_ < HandleScope::kCheckHandleThreshold); |
| 5666 } | 5681 } |
| 5667 void VisitPointers(Object** start, Object** end) override { | 5682 void VisitPointers(Object** start, Object** end) override { |
| 5668 handle_count_ += end - start; | 5683 handle_count_ += end - start; |
| 5669 } | 5684 } |
| 5670 | 5685 |
| (...skipping 689 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6360 } | 6375 } |
| 6361 | 6376 |
| 6362 | 6377 |
| 6363 // static | 6378 // static |
| 6364 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6379 int Heap::GetStaticVisitorIdForMap(Map* map) { |
| 6365 return StaticVisitorBase::GetVisitorId(map); | 6380 return StaticVisitorBase::GetVisitorId(map); |
| 6366 } | 6381 } |
| 6367 | 6382 |
| 6368 } // namespace internal | 6383 } // namespace internal |
| 6369 } // namespace v8 | 6384 } // namespace v8 |
| OLD | NEW |