| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
| 6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
| 7 | 7 |
| 8 #include <cmath> | 8 #include <cmath> |
| 9 #include <map> | 9 #include <map> |
| 10 | 10 |
| (...skipping 680 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 691 static const int kReduceMemoryFootprintMask = 1; | 691 static const int kReduceMemoryFootprintMask = 1; |
| 692 static const int kAbortIncrementalMarkingMask = 2; | 692 static const int kAbortIncrementalMarkingMask = 2; |
| 693 static const int kFinalizeIncrementalMarkingMask = 4; | 693 static const int kFinalizeIncrementalMarkingMask = 4; |
| 694 | 694 |
| 695 // Making the heap iterable requires us to abort incremental marking. | 695 // Making the heap iterable requires us to abort incremental marking. |
| 696 static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask; | 696 static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask; |
| 697 | 697 |
| 698 // The roots that have an index less than this are always in old space. | 698 // The roots that have an index less than this are always in old space. |
| 699 static const int kOldSpaceRoots = 0x20; | 699 static const int kOldSpaceRoots = 0x20; |
| 700 | 700 |
| 701 // The minimum size of a HeapObject on the heap. |
| 702 static const int kMinObjectSizeInWords = 2; |
| 703 |
| 701 STATIC_ASSERT(kUndefinedValueRootIndex == | 704 STATIC_ASSERT(kUndefinedValueRootIndex == |
| 702 Internals::kUndefinedValueRootIndex); | 705 Internals::kUndefinedValueRootIndex); |
| 703 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex); | 706 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex); |
| 704 STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex); | 707 STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex); |
| 705 STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex); | 708 STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex); |
| 706 STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex); | 709 STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex); |
| 707 | 710 |
| 708 // Calculates the maximum amount of filler that could be required by the | 711 // Calculates the maximum amount of filler that could be required by the |
| 709 // given alignment. | 712 // given alignment. |
| 710 static int GetMaximumFillToAlign(AllocationAlignment alignment); | 713 static int GetMaximumFillToAlign(AllocationAlignment alignment); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 749 static double HeapGrowingFactor(double gc_speed, double mutator_speed); | 752 static double HeapGrowingFactor(double gc_speed, double mutator_speed); |
| 750 | 753 |
| 751 // Copy block of memory from src to dst. Size of block should be aligned | 754 // Copy block of memory from src to dst. Size of block should be aligned |
| 752 // by pointer size. | 755 // by pointer size. |
| 753 static inline void CopyBlock(Address dst, Address src, int byte_size); | 756 static inline void CopyBlock(Address dst, Address src, int byte_size); |
| 754 | 757 |
| 755 // Optimized version of memmove for blocks with pointer size aligned sizes and | 758 // Optimized version of memmove for blocks with pointer size aligned sizes and |
| 756 // pointer size aligned addresses. | 759 // pointer size aligned addresses. |
| 757 static inline void MoveBlock(Address dst, Address src, int byte_size); | 760 static inline void MoveBlock(Address dst, Address src, int byte_size); |
| 758 | 761 |
| 762 // Determines a static visitor id based on the given {map} that can then be |
| 763 // stored on the map to facilitate fast dispatch for {StaticVisitorBase}. |
| 764 static int GetStaticVisitorIdForMap(Map* map); |
| 765 |
| 759 // Notifies the heap that is ok to start marking or other activities that | 766 // Notifies the heap that is ok to start marking or other activities that |
| 760 // should not happen during deserialization. | 767 // should not happen during deserialization. |
| 761 void NotifyDeserializationComplete(); | 768 void NotifyDeserializationComplete(); |
| 762 | 769 |
| 763 intptr_t old_generation_allocation_limit() const { | 770 intptr_t old_generation_allocation_limit() const { |
| 764 return old_generation_allocation_limit_; | 771 return old_generation_allocation_limit_; |
| 765 } | 772 } |
| 766 | 773 |
| 767 bool always_allocate() { return always_allocate_scope_count_.Value() != 0; } | 774 bool always_allocate() { return always_allocate_scope_count_.Value() != 0; } |
| 768 | 775 |
| (...skipping 719 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1488 | 1495 |
| 1489 // Creates a filler object if needed for alignment and returns a heap object | 1496 // Creates a filler object if needed for alignment and returns a heap object |
| 1490 // immediately after it. If any space is left after the returned object, | 1497 // immediately after it. If any space is left after the returned object, |
| 1491 // another filler object is created so the over allocated memory is iterable. | 1498 // another filler object is created so the over allocated memory is iterable. |
| 1492 MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object, | 1499 MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object, |
| 1493 int object_size, | 1500 int object_size, |
| 1494 int allocation_size, | 1501 int allocation_size, |
| 1495 AllocationAlignment alignment); | 1502 AllocationAlignment alignment); |
| 1496 | 1503 |
| 1497 // =========================================================================== | 1504 // =========================================================================== |
| 1498 // ArrayBufferTracker. ======================================================= | 1505 // ArrayBuffer tracking. ===================================================== |
| 1499 // =========================================================================== | 1506 // =========================================================================== |
| 1507 |
| 1500 void RegisterNewArrayBuffer(JSArrayBuffer* buffer); | 1508 void RegisterNewArrayBuffer(JSArrayBuffer* buffer); |
| 1501 void UnregisterArrayBuffer(JSArrayBuffer* buffer); | 1509 void UnregisterArrayBuffer(JSArrayBuffer* buffer); |
| 1502 | 1510 |
| 1503 inline ArrayBufferTracker* array_buffer_tracker() { | 1511 inline ArrayBufferTracker* array_buffer_tracker() { |
| 1504 return array_buffer_tracker_; | 1512 return array_buffer_tracker_; |
| 1505 } | 1513 } |
| 1506 | 1514 |
| 1507 // ============================================================================= | 1515 // ============================================================================= |
| 1508 | 1516 |
| 1509 #ifdef VERIFY_HEAP | 1517 #ifdef VERIFY_HEAP |
| (...skipping 1181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2691 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 2699 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
| 2692 | 2700 |
| 2693 private: | 2701 private: |
| 2694 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2702 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
| 2695 }; | 2703 }; |
| 2696 #endif // DEBUG | 2704 #endif // DEBUG |
| 2697 } | 2705 } |
| 2698 } // namespace v8::internal | 2706 } // namespace v8::internal |
| 2699 | 2707 |
| 2700 #endif // V8_HEAP_HEAP_H_ | 2708 #endif // V8_HEAP_HEAP_H_ |
| OLD | NEW |