OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
7 | 7 |
8 #include <cmath> | 8 #include <cmath> |
9 #include <map> | 9 #include <map> |
10 | 10 |
(...skipping 523 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
534 static const double kMaxHeapGrowingFactor; | 534 static const double kMaxHeapGrowingFactor; |
535 static const double kMaxHeapGrowingFactorMemoryConstrained; | 535 static const double kMaxHeapGrowingFactorMemoryConstrained; |
536 static const double kMaxHeapGrowingFactorIdle; | 536 static const double kMaxHeapGrowingFactorIdle; |
537 static const double kTargetMutatorUtilization; | 537 static const double kTargetMutatorUtilization; |
538 | 538 |
539 static const int kNoGCFlags = 0; | 539 static const int kNoGCFlags = 0; |
540 static const int kReduceMemoryFootprintMask = 1; | 540 static const int kReduceMemoryFootprintMask = 1; |
541 static const int kAbortIncrementalMarkingMask = 2; | 541 static const int kAbortIncrementalMarkingMask = 2; |
542 static const int kFinalizeIncrementalMarkingMask = 4; | 542 static const int kFinalizeIncrementalMarkingMask = 4; |
543 | 543 |
| 544 // Making the heap iterable requires us to abort incremental marking. |
| 545 static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask; |
| 546 |
544 // The roots that have an index less than this are always in old space. | 547 // The roots that have an index less than this are always in old space. |
545 static const int kOldSpaceRoots = 0x20; | 548 static const int kOldSpaceRoots = 0x20; |
546 | 549 |
547 // The minimum size of a HeapObject on the heap. | 550 // The minimum size of a HeapObject on the heap. |
548 static const int kMinObjectSizeInWords = 2; | 551 static const int kMinObjectSizeInWords = 2; |
549 | 552 |
550 STATIC_ASSERT(kUndefinedValueRootIndex == | 553 STATIC_ASSERT(kUndefinedValueRootIndex == |
551 Internals::kUndefinedValueRootIndex); | 554 Internals::kUndefinedValueRootIndex); |
552 STATIC_ASSERT(kTheHoleValueRootIndex == Internals::kTheHoleValueRootIndex); | 555 STATIC_ASSERT(kTheHoleValueRootIndex == Internals::kTheHoleValueRootIndex); |
553 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex); | 556 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex); |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
654 // start and hence is only valid if there is only a single reference to it. | 657 // start and hence is only valid if there is only a single reference to it. |
655 FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim); | 658 FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim); |
656 | 659 |
657 // Trim the given array from the right. | 660 // Trim the given array from the right. |
658 template<Heap::InvocationMode mode> | 661 template<Heap::InvocationMode mode> |
659 void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim); | 662 void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim); |
660 | 663 |
661 // Converts the given boolean condition to JavaScript boolean value. | 664 // Converts the given boolean condition to JavaScript boolean value. |
662 inline Oddball* ToBoolean(bool condition); | 665 inline Oddball* ToBoolean(bool condition); |
663 | 666 |
| 667 // Check whether the heap is currently iterable. |
| 668 bool IsHeapIterable(); |
| 669 |
664 // Notify the heap that a context has been disposed. | 670 // Notify the heap that a context has been disposed. |
665 int NotifyContextDisposed(bool dependant_context); | 671 int NotifyContextDisposed(bool dependant_context); |
666 | 672 |
667 void set_native_contexts_list(Object* object) { | 673 void set_native_contexts_list(Object* object) { |
668 native_contexts_list_ = object; | 674 native_contexts_list_ = object; |
669 } | 675 } |
670 Object* native_contexts_list() const { return native_contexts_list_; } | 676 Object* native_contexts_list() const { return native_contexts_list_; } |
671 | 677 |
672 void set_allocation_sites_list(Object* object) { | 678 void set_allocation_sites_list(Object* object) { |
673 allocation_sites_list_ = object; | 679 allocation_sites_list_ = object; |
(...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1020 // Methods triggering GCs. =================================================== | 1026 // Methods triggering GCs. =================================================== |
1021 // =========================================================================== | 1027 // =========================================================================== |
1022 | 1028 |
1023 // Performs garbage collection operation. | 1029 // Performs garbage collection operation. |
1024 // Returns whether there is a chance that another major GC could | 1030 // Returns whether there is a chance that another major GC could |
1025 // collect more garbage. | 1031 // collect more garbage. |
1026 inline bool CollectGarbage( | 1032 inline bool CollectGarbage( |
1027 AllocationSpace space, const char* gc_reason = NULL, | 1033 AllocationSpace space, const char* gc_reason = NULL, |
1028 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); | 1034 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); |
1029 | 1035 |
1030 // Performs a full garbage collection. | 1036 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is |
| 1037 // non-zero, then the slower precise sweeper is used, which leaves the heap |
| 1038 // in a state where we can iterate over the heap visiting all objects. |
1031 void CollectAllGarbage( | 1039 void CollectAllGarbage( |
1032 int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL, | 1040 int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL, |
1033 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); | 1041 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); |
1034 | 1042 |
1035 // Last hope GC, should try to squeeze as much as possible. | 1043 // Last hope GC, should try to squeeze as much as possible. |
1036 void CollectAllAvailableGarbage(const char* gc_reason = NULL); | 1044 void CollectAllAvailableGarbage(const char* gc_reason = NULL); |
1037 | 1045 |
1038 // Reports and external memory pressure event, either performs a major GC or | 1046 // Reports and external memory pressure event, either performs a major GC or |
1039 // completes incremental marking in order to free external resources. | 1047 // completes incremental marking in order to free external resources. |
1040 void ReportExternalMemoryPressure(const char* gc_reason = NULL); | 1048 void ReportExternalMemoryPressure(const char* gc_reason = NULL); |
(...skipping 481 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1522 // Checks whether a global GC is necessary | 1530 // Checks whether a global GC is necessary |
1523 GarbageCollector SelectGarbageCollector(AllocationSpace space, | 1531 GarbageCollector SelectGarbageCollector(AllocationSpace space, |
1524 const char** reason); | 1532 const char** reason); |
1525 | 1533 |
1526 // Make sure there is a filler value behind the top of the new space | 1534 // Make sure there is a filler value behind the top of the new space |
1527 // so that the GC does not confuse some unintialized/stale memory | 1535 // so that the GC does not confuse some unintialized/stale memory |
1528 // with the allocation memento of the object at the top | 1536 // with the allocation memento of the object at the top |
1529 void EnsureFillerObjectAtTop(); | 1537 void EnsureFillerObjectAtTop(); |
1530 | 1538 |
1531 // Ensure that we have swept all spaces in such a way that we can iterate | 1539 // Ensure that we have swept all spaces in such a way that we can iterate |
1532 // over all objects. | 1540 // over all objects. May cause a GC. |
1533 void MakeHeapIterable(); | 1541 void MakeHeapIterable(); |
1534 | 1542 |
1535 // Performs garbage collection operation. | 1543 // Performs garbage collection operation. |
1536 // Returns whether there is a chance that another major GC could | 1544 // Returns whether there is a chance that another major GC could |
1537 // collect more garbage. | 1545 // collect more garbage. |
1538 bool CollectGarbage( | 1546 bool CollectGarbage( |
1539 GarbageCollector collector, const char* gc_reason, | 1547 GarbageCollector collector, const char* gc_reason, |
1540 const char* collector_reason, | 1548 const char* collector_reason, |
1541 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); | 1549 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); |
1542 | 1550 |
(...skipping 818 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2361 ObjectIterator* next(); | 2369 ObjectIterator* next(); |
2362 | 2370 |
2363 private: | 2371 private: |
2364 ObjectIterator* CreateIterator(); | 2372 ObjectIterator* CreateIterator(); |
2365 | 2373 |
2366 Heap* heap_; | 2374 Heap* heap_; |
2367 int current_space_; // from enum AllocationSpace. | 2375 int current_space_; // from enum AllocationSpace. |
2368 ObjectIterator* iterator_; // object iterator for the current space. | 2376 ObjectIterator* iterator_; // object iterator for the current space. |
2369 }; | 2377 }; |
2370 | 2378 |
2371 enum class HeapObjectsFiltering { kNoFiltering, kFilterUnreachable }; | |
2372 | 2379 |
2373 // A HeapIterator provides iteration over the whole heap. It | 2380 // A HeapIterator provides iteration over the whole heap. It |
2374 // aggregates the specific iterators for the different spaces as | 2381 // aggregates the specific iterators for the different spaces as |
2375 // these can only iterate over one space only. | 2382 // these can only iterate over one space only. |
2376 // | 2383 // |
2377 // HeapIterator ensures there is no allocation during its lifetime | 2384 // HeapIterator ensures there is no allocation during its lifetime |
2378 // (using an embedded DisallowHeapAllocation instance). | 2385 // (using an embedded DisallowHeapAllocation instance). |
2379 // | 2386 // |
2380 // HeapIterator can skip free list nodes (that is, de-allocated heap | 2387 // HeapIterator can skip free list nodes (that is, de-allocated heap |
2381 // objects that still remain in the heap). As implementation of free | 2388 // objects that still remain in the heap). As implementation of free |
2382 // nodes filtering uses GC marks, it can't be used during MS/MC GC | 2389 // nodes filtering uses GC marks, it can't be used during MS/MC GC |
2383 // phases. Also, it is forbidden to interrupt iteration in this mode, | 2390 // phases. Also, it is forbidden to interrupt iteration in this mode, |
2384 // as this will leave heap objects marked (and thus, unusable). | 2391 // as this will leave heap objects marked (and thus, unusable). |
2385 class HeapIterator BASE_EMBEDDED { | 2392 class HeapIterator BASE_EMBEDDED { |
2386 public: | 2393 public: |
2387 explicit HeapIterator(Heap* heap, HeapObjectsFiltering filtering = | 2394 enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable }; |
2388 HeapObjectsFiltering::kNoFiltering); | 2395 |
| 2396 explicit HeapIterator(Heap* heap, |
| 2397 HeapObjectsFiltering filtering = kNoFiltering); |
2389 ~HeapIterator(); | 2398 ~HeapIterator(); |
2390 | 2399 |
2391 HeapObject* next(); | 2400 HeapObject* next(); |
2392 | 2401 |
2393 private: | 2402 private: |
2394 DisallowHeapAllocation* disallow_heap_allocation_; | 2403 struct MakeHeapIterableHelper { |
| 2404 explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); } |
| 2405 }; |
| 2406 |
| 2407 HeapObject* NextObject(); |
| 2408 |
| 2409 // The following two fields need to be declared in this order. Initialization |
| 2410 // order guarantees that we first make the heap iterable (which may involve |
| 2411 // allocations) and only then lock it down by not allowing further |
| 2412 // allocations. |
| 2413 MakeHeapIterableHelper make_heap_iterable_helper_; |
| 2414 DisallowHeapAllocation no_heap_allocation_; |
2395 | 2415 |
2396 Heap* heap_; | 2416 Heap* heap_; |
| 2417 HeapObjectsFiltering filtering_; |
| 2418 HeapObjectsFilter* filter_; |
2397 // Space iterator for iterating all the spaces. | 2419 // Space iterator for iterating all the spaces. |
2398 SpaceIterator* space_iterator_; | 2420 SpaceIterator* space_iterator_; |
2399 // Object iterator for the space currently being iterated. | 2421 // Object iterator for the space currently being iterated. |
2400 ObjectIterator* object_iterator_; | 2422 ObjectIterator* object_iterator_; |
2401 }; | 2423 }; |
2402 | 2424 |
2403 | 2425 |
2404 // Cache for mapping (map, property name) into field offset. | 2426 // Cache for mapping (map, property name) into field offset. |
2405 // Cleared at startup and prior to mark sweep collection. | 2427 // Cleared at startup and prior to mark sweep collection. |
2406 class KeyedLookupCache { | 2428 class KeyedLookupCache { |
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2625 friend class LargeObjectSpace; | 2647 friend class LargeObjectSpace; |
2626 friend class NewSpace; | 2648 friend class NewSpace; |
2627 friend class PagedSpace; | 2649 friend class PagedSpace; |
2628 DISALLOW_COPY_AND_ASSIGN(AllocationObserver); | 2650 DISALLOW_COPY_AND_ASSIGN(AllocationObserver); |
2629 }; | 2651 }; |
2630 | 2652 |
2631 } // namespace internal | 2653 } // namespace internal |
2632 } // namespace v8 | 2654 } // namespace v8 |
2633 | 2655 |
2634 #endif // V8_HEAP_HEAP_H_ | 2656 #endif // V8_HEAP_HEAP_H_ |
OLD | NEW |