Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(783)

Side by Side Diff: src/heap/heap.h

Issue 1992913004: [heap] Do not invoke GC to make heap iterable. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/debug/debug.cc ('k') | src/heap/heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_HEAP_H_ 5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_ 6 #define V8_HEAP_HEAP_H_
7 7
8 #include <cmath> 8 #include <cmath>
9 #include <map> 9 #include <map>
10 10
(...skipping 523 matching lines...) Expand 10 before | Expand all | Expand 10 after
534 static const double kMaxHeapGrowingFactor; 534 static const double kMaxHeapGrowingFactor;
535 static const double kMaxHeapGrowingFactorMemoryConstrained; 535 static const double kMaxHeapGrowingFactorMemoryConstrained;
536 static const double kMaxHeapGrowingFactorIdle; 536 static const double kMaxHeapGrowingFactorIdle;
537 static const double kTargetMutatorUtilization; 537 static const double kTargetMutatorUtilization;
538 538
539 static const int kNoGCFlags = 0; 539 static const int kNoGCFlags = 0;
540 static const int kReduceMemoryFootprintMask = 1; 540 static const int kReduceMemoryFootprintMask = 1;
541 static const int kAbortIncrementalMarkingMask = 2; 541 static const int kAbortIncrementalMarkingMask = 2;
542 static const int kFinalizeIncrementalMarkingMask = 4; 542 static const int kFinalizeIncrementalMarkingMask = 4;
543 543
544 // Making the heap iterable requires us to abort incremental marking.
545 static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
546
547 // The roots that have an index less than this are always in old space. 544 // The roots that have an index less than this are always in old space.
548 static const int kOldSpaceRoots = 0x20; 545 static const int kOldSpaceRoots = 0x20;
549 546
550 // The minimum size of a HeapObject on the heap. 547 // The minimum size of a HeapObject on the heap.
551 static const int kMinObjectSizeInWords = 2; 548 static const int kMinObjectSizeInWords = 2;
552 549
553 STATIC_ASSERT(kUndefinedValueRootIndex == 550 STATIC_ASSERT(kUndefinedValueRootIndex ==
554 Internals::kUndefinedValueRootIndex); 551 Internals::kUndefinedValueRootIndex);
555 STATIC_ASSERT(kTheHoleValueRootIndex == Internals::kTheHoleValueRootIndex); 552 STATIC_ASSERT(kTheHoleValueRootIndex == Internals::kTheHoleValueRootIndex);
556 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex); 553 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
657 // start and hence is only valid if there is only a single reference to it. 654 // start and hence is only valid if there is only a single reference to it.
658 FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim); 655 FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
659 656
660 // Trim the given array from the right. 657 // Trim the given array from the right.
661 template<Heap::InvocationMode mode> 658 template<Heap::InvocationMode mode>
662 void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim); 659 void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
663 660
664 // Converts the given boolean condition to JavaScript boolean value. 661 // Converts the given boolean condition to JavaScript boolean value.
665 inline Oddball* ToBoolean(bool condition); 662 inline Oddball* ToBoolean(bool condition);
666 663
667 // Check whether the heap is currently iterable.
668 bool IsHeapIterable();
669
670 // Notify the heap that a context has been disposed. 664 // Notify the heap that a context has been disposed.
671 int NotifyContextDisposed(bool dependant_context); 665 int NotifyContextDisposed(bool dependant_context);
672 666
673 void set_native_contexts_list(Object* object) { 667 void set_native_contexts_list(Object* object) {
674 native_contexts_list_ = object; 668 native_contexts_list_ = object;
675 } 669 }
676 Object* native_contexts_list() const { return native_contexts_list_; } 670 Object* native_contexts_list() const { return native_contexts_list_; }
677 671
678 void set_allocation_sites_list(Object* object) { 672 void set_allocation_sites_list(Object* object) {
679 allocation_sites_list_ = object; 673 allocation_sites_list_ = object;
(...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after
1026 // Methods triggering GCs. =================================================== 1020 // Methods triggering GCs. ===================================================
1027 // =========================================================================== 1021 // ===========================================================================
1028 1022
1029 // Performs garbage collection operation. 1023 // Performs garbage collection operation.
1030 // Returns whether there is a chance that another major GC could 1024 // Returns whether there is a chance that another major GC could
1031 // collect more garbage. 1025 // collect more garbage.
1032 inline bool CollectGarbage( 1026 inline bool CollectGarbage(
1033 AllocationSpace space, const char* gc_reason = NULL, 1027 AllocationSpace space, const char* gc_reason = NULL,
1034 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); 1028 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1035 1029
1036 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is 1030 // Performs a full garbage collection.
1037 // non-zero, then the slower precise sweeper is used, which leaves the heap
1038 // in a state where we can iterate over the heap visiting all objects.
1039 void CollectAllGarbage( 1031 void CollectAllGarbage(
1040 int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL, 1032 int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
1041 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); 1033 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1042 1034
1043 // Last hope GC, should try to squeeze as much as possible. 1035 // Last hope GC, should try to squeeze as much as possible.
1044 void CollectAllAvailableGarbage(const char* gc_reason = NULL); 1036 void CollectAllAvailableGarbage(const char* gc_reason = NULL);
1045 1037
1046 // Reports and external memory pressure event, either performs a major GC or 1038 // Reports and external memory pressure event, either performs a major GC or
1047 // completes incremental marking in order to free external resources. 1039 // completes incremental marking in order to free external resources.
1048 void ReportExternalMemoryPressure(const char* gc_reason = NULL); 1040 void ReportExternalMemoryPressure(const char* gc_reason = NULL);
(...skipping 481 matching lines...) Expand 10 before | Expand all | Expand 10 after
1530 // Checks whether a global GC is necessary 1522 // Checks whether a global GC is necessary
1531 GarbageCollector SelectGarbageCollector(AllocationSpace space, 1523 GarbageCollector SelectGarbageCollector(AllocationSpace space,
1532 const char** reason); 1524 const char** reason);
1533 1525
1534 // Make sure there is a filler value behind the top of the new space 1526 // Make sure there is a filler value behind the top of the new space
1535 // so that the GC does not confuse some unintialized/stale memory 1527 // so that the GC does not confuse some unintialized/stale memory
1536 // with the allocation memento of the object at the top 1528 // with the allocation memento of the object at the top
1537 void EnsureFillerObjectAtTop(); 1529 void EnsureFillerObjectAtTop();
1538 1530
1539 // Ensure that we have swept all spaces in such a way that we can iterate 1531 // Ensure that we have swept all spaces in such a way that we can iterate
1540 // over all objects. May cause a GC. 1532 // over all objects.
1541 void MakeHeapIterable(); 1533 void MakeHeapIterable();
1542 1534
1543 // Performs garbage collection operation. 1535 // Performs garbage collection operation.
1544 // Returns whether there is a chance that another major GC could 1536 // Returns whether there is a chance that another major GC could
1545 // collect more garbage. 1537 // collect more garbage.
1546 bool CollectGarbage( 1538 bool CollectGarbage(
1547 GarbageCollector collector, const char* gc_reason, 1539 GarbageCollector collector, const char* gc_reason,
1548 const char* collector_reason, 1540 const char* collector_reason,
1549 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); 1541 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1550 1542
(...skipping 818 matching lines...) Expand 10 before | Expand all | Expand 10 after
2369 ObjectIterator* next(); 2361 ObjectIterator* next();
2370 2362
2371 private: 2363 private:
2372 ObjectIterator* CreateIterator(); 2364 ObjectIterator* CreateIterator();
2373 2365
2374 Heap* heap_; 2366 Heap* heap_;
2375 int current_space_; // from enum AllocationSpace. 2367 int current_space_; // from enum AllocationSpace.
2376 ObjectIterator* iterator_; // object iterator for the current space. 2368 ObjectIterator* iterator_; // object iterator for the current space.
2377 }; 2369 };
2378 2370
2371 enum class HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2379 2372
2380 // A HeapIterator provides iteration over the whole heap. It 2373 // A HeapIterator provides iteration over the whole heap. It
2381 // aggregates the specific iterators for the different spaces as 2374 // aggregates the specific iterators for the different spaces as
2382 // these can only iterate over one space only. 2375 // these can only iterate over one space only.
2383 // 2376 //
2384 // HeapIterator ensures there is no allocation during its lifetime 2377 // HeapIterator ensures there is no allocation during its lifetime
2385 // (using an embedded DisallowHeapAllocation instance). 2378 // (using an embedded DisallowHeapAllocation instance).
2386 // 2379 //
2387 // HeapIterator can skip free list nodes (that is, de-allocated heap 2380 // HeapIterator can skip free list nodes (that is, de-allocated heap
2388 // objects that still remain in the heap). As implementation of free 2381 // objects that still remain in the heap). As implementation of free
2389 // nodes filtering uses GC marks, it can't be used during MS/MC GC 2382 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2390 // phases. Also, it is forbidden to interrupt iteration in this mode, 2383 // phases. Also, it is forbidden to interrupt iteration in this mode,
2391 // as this will leave heap objects marked (and thus, unusable). 2384 // as this will leave heap objects marked (and thus, unusable).
2392 class HeapIterator BASE_EMBEDDED { 2385 class HeapIterator BASE_EMBEDDED {
2393 public: 2386 public:
2394 enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable }; 2387 explicit HeapIterator(Heap* heap, HeapObjectsFiltering filtering =
2395 2388 HeapObjectsFiltering::kNoFiltering);
2396 explicit HeapIterator(Heap* heap,
2397 HeapObjectsFiltering filtering = kNoFiltering);
2398 ~HeapIterator(); 2389 ~HeapIterator();
2399 2390
2400 HeapObject* next(); 2391 HeapObject* next();
2401 2392
2402 private: 2393 private:
2403 struct MakeHeapIterableHelper { 2394 DisallowHeapAllocation* disallow_heap_allocation_;
2404 explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
2405 };
2406
2407 HeapObject* NextObject();
2408
2409 // The following two fields need to be declared in this order. Initialization
2410 // order guarantees that we first make the heap iterable (which may involve
2411 // allocations) and only then lock it down by not allowing further
2412 // allocations.
2413 MakeHeapIterableHelper make_heap_iterable_helper_;
2414 DisallowHeapAllocation no_heap_allocation_;
2415 2395
2416 Heap* heap_; 2396 Heap* heap_;
2417 HeapObjectsFiltering filtering_;
2418 HeapObjectsFilter* filter_;
2419 // Space iterator for iterating all the spaces. 2397 // Space iterator for iterating all the spaces.
2420 SpaceIterator* space_iterator_; 2398 SpaceIterator* space_iterator_;
2421 // Object iterator for the space currently being iterated. 2399 // Object iterator for the space currently being iterated.
2422 ObjectIterator* object_iterator_; 2400 ObjectIterator* object_iterator_;
2423 }; 2401 };
2424 2402
2425 2403
2426 // Cache for mapping (map, property name) into field offset. 2404 // Cache for mapping (map, property name) into field offset.
2427 // Cleared at startup and prior to mark sweep collection. 2405 // Cleared at startup and prior to mark sweep collection.
2428 class KeyedLookupCache { 2406 class KeyedLookupCache {
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after
2647 friend class LargeObjectSpace; 2625 friend class LargeObjectSpace;
2648 friend class NewSpace; 2626 friend class NewSpace;
2649 friend class PagedSpace; 2627 friend class PagedSpace;
2650 DISALLOW_COPY_AND_ASSIGN(AllocationObserver); 2628 DISALLOW_COPY_AND_ASSIGN(AllocationObserver);
2651 }; 2629 };
2652 2630
2653 } // namespace internal 2631 } // namespace internal
2654 } // namespace v8 2632 } // namespace v8
2655 2633
2656 #endif // V8_HEAP_HEAP_H_ 2634 #endif // V8_HEAP_HEAP_H_
OLDNEW
« no previous file with comments | « src/debug/debug.cc ('k') | src/heap/heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698