Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
| 6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
| 7 | 7 |
| 8 #include <cmath> | 8 #include <cmath> |
| 9 #include <map> | 9 #include <map> |
| 10 | 10 |
| 11 #include "src/allocation.h" | 11 #include "src/allocation.h" |
| 12 #include "src/assert-scope.h" | 12 #include "src/assert-scope.h" |
| 13 #include "src/base/flags.h" | |
| 13 #include "src/globals.h" | 14 #include "src/globals.h" |
| 14 #include "src/heap/gc-idle-time-handler.h" | 15 #include "src/heap/gc-idle-time-handler.h" |
| 15 #include "src/heap/incremental-marking.h" | 16 #include "src/heap/incremental-marking.h" |
| 16 #include "src/heap/mark-compact.h" | 17 #include "src/heap/mark-compact.h" |
| 17 #include "src/heap/objects-visiting.h" | 18 #include "src/heap/objects-visiting.h" |
| 18 #include "src/heap/spaces.h" | 19 #include "src/heap/spaces.h" |
| 19 #include "src/heap/store-buffer.h" | 20 #include "src/heap/store-buffer.h" |
| 20 #include "src/list.h" | 21 #include "src/list.h" |
| 21 | 22 |
| 22 namespace v8 { | 23 namespace v8 { |
| (...skipping 591 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 614 kStringTableRootIndex, | 615 kStringTableRootIndex, |
| 615 | 616 |
| 616 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, | 617 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, |
| 617 SMI_ROOT_LIST(ROOT_INDEX_DECLARATION) | 618 SMI_ROOT_LIST(ROOT_INDEX_DECLARATION) |
| 618 #undef ROOT_INDEX_DECLARATION | 619 #undef ROOT_INDEX_DECLARATION |
| 619 kRootListLength, | 620 kRootListLength, |
| 620 kStrongRootListLength = kStringTableRootIndex, | 621 kStrongRootListLength = kStringTableRootIndex, |
| 621 kSmiRootsStart = kStringTableRootIndex + 1 | 622 kSmiRootsStart = kStringTableRootIndex + 1 |
| 622 }; | 623 }; |
| 623 | 624 |
| 625 // Flags to indicate modes for a GC run. | |
| 626 enum GCFlag { | |
| 627 kNoGCFlags = 0u, | |
| 628 kReduceMemoryFootprintMask = 1u << 0, | |
| 629 kAbortIncrementalMarkingMask = 1u << 1, | |
| 630 kFinalizeIncrementalMarkingMask = 1u << 2, | |
| 631 | |
| 632 // Making the heap iterable requires us to abort incremental marking. | |
| 633 kMakeHeapIterableMask = kAbortIncrementalMarkingMask, | |
| 634 }; | |
| 635 typedef base::Flags<GCFlag> GCFlags; | |
| 636 | |
| 637 enum GCFlagOverride { | |
| 638 kOverride, | |
| 639 kDontOverride, | |
| 640 }; | |
| 641 | |
| 624 // Indicates whether live bytes adjustment is triggered | 642 // Indicates whether live bytes adjustment is triggered |
| 625 // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER), | 643 // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER), |
| 626 // - or from within GC (CONCURRENT_TO_SWEEPER), | 644 // - or from within GC (CONCURRENT_TO_SWEEPER), |
| 627 // - or mutator code (CONCURRENT_TO_SWEEPER). | 645 // - or mutator code (CONCURRENT_TO_SWEEPER). |
| 628 enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER }; | 646 enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER }; |
| 629 | 647 |
| 630 enum ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT, RECORD_SCRATCHPAD_SLOT }; | 648 enum ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT, RECORD_SCRATCHPAD_SLOT }; |
| 631 | 649 |
| 632 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; | 650 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; |
| 633 | 651 |
| 634 // ObjectStats are kept in two arrays, counts and sizes. Related stats are | 652 // ObjectStats are kept in two arrays, counts and sizes. Related stats are |
| 635 // stored in a contiguous linear buffer. Stats groups are stored one after | 653 // stored in a contiguous linear buffer. Stats groups are stored one after |
| 636 // another. | 654 // another. |
| 637 enum { | 655 enum { |
| 638 FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1, | 656 FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1, |
| 639 FIRST_FIXED_ARRAY_SUB_TYPE = | 657 FIRST_FIXED_ARRAY_SUB_TYPE = |
| 640 FIRST_CODE_KIND_SUB_TYPE + Code::NUMBER_OF_KINDS, | 658 FIRST_CODE_KIND_SUB_TYPE + Code::NUMBER_OF_KINDS, |
| 641 FIRST_CODE_AGE_SUB_TYPE = | 659 FIRST_CODE_AGE_SUB_TYPE = |
| 642 FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1, | 660 FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1, |
| 643 OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kCodeAgeCount + 1 | 661 OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kCodeAgeCount + 1 |
| 644 }; | 662 }; |
| 645 | 663 |
| 664 class GCFlagScope { | |
| 665 public: | |
| 666 GCFlagScope(Heap* heap, GCFlags gc_flags, GCCallbackFlags callback_flags, | |
| 667 GCFlagOverride override) | |
| 668 : heap_(heap), override_(override) { | |
| 669 if (override_ == kDontOverride) { | |
| 670 saved_gc_flags_ = heap->current_gc_flags_; | |
| 671 saved_gc_callback_flags_ = heap->current_gc_callback_flags_; | |
| 672 } | |
| 673 heap->set_current_gc_flags(gc_flags); | |
| 674 heap->current_gc_callback_flags_ = callback_flags; | |
| 675 } | |
| 676 | |
| 677 ~GCFlagScope() { | |
| 678 if (override_ == kDontOverride) { | |
| 679 heap_->set_current_gc_flags(saved_gc_flags_); | |
| 680 heap_->current_gc_callback_flags_ = saved_gc_callback_flags_; | |
| 681 } else { | |
| 682 heap_->set_current_gc_flags(kNoGCFlags); | |
| 683 heap_->current_gc_callback_flags_ = kNoGCCallbackFlags; | |
| 684 } | |
| 685 } | |
| 686 | |
| 687 private: | |
| 688 Heap* heap_; | |
| 689 GCFlagOverride override_; | |
| 690 GCFlags saved_gc_flags_; | |
| 691 GCCallbackFlags saved_gc_callback_flags_; | |
| 692 }; | |
| 693 | |
| 646 // Taking this lock prevents the GC from entering a phase that relocates | 694 // Taking this lock prevents the GC from entering a phase that relocates |
| 647 // object references. | 695 // object references. |
| 648 class RelocationLock { | 696 class RelocationLock { |
| 649 public: | 697 public: |
| 650 explicit RelocationLock(Heap* heap) : heap_(heap) { | 698 explicit RelocationLock(Heap* heap) : heap_(heap) { |
| 651 heap_->relocation_mutex_.Lock(); | 699 heap_->relocation_mutex_.Lock(); |
| 652 } | 700 } |
| 653 | 701 |
| 654 ~RelocationLock() { heap_->relocation_mutex_.Unlock(); } | 702 ~RelocationLock() { heap_->relocation_mutex_.Unlock(); } |
| 655 | 703 |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 738 // Strict mode arguments has no callee so it is smaller. | 786 // Strict mode arguments has no callee so it is smaller. |
| 739 static const int kStrictArgumentsObjectSize = | 787 static const int kStrictArgumentsObjectSize = |
| 740 JSObject::kHeaderSize + 1 * kPointerSize; | 788 JSObject::kHeaderSize + 1 * kPointerSize; |
| 741 | 789 |
| 742 // Indicies for direct access into argument objects. | 790 // Indicies for direct access into argument objects. |
| 743 static const int kArgumentsLengthIndex = 0; | 791 static const int kArgumentsLengthIndex = 0; |
| 744 | 792 |
| 745 // callee is only valid in sloppy mode. | 793 // callee is only valid in sloppy mode. |
| 746 static const int kArgumentsCalleeIndex = 1; | 794 static const int kArgumentsCalleeIndex = 1; |
| 747 | 795 |
| 748 static const int kNoGCFlags = 0; | |
| 749 static const int kReduceMemoryFootprintMask = 1; | |
| 750 static const int kAbortIncrementalMarkingMask = 2; | |
| 751 static const int kFinalizeIncrementalMarkingMask = 4; | |
| 752 | |
| 753 // Making the heap iterable requires us to abort incremental marking. | |
| 754 static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask; | |
| 755 | |
| 756 // The roots that have an index less than this are always in old space. | 796 // The roots that have an index less than this are always in old space. |
| 757 static const int kOldSpaceRoots = 0x20; | 797 static const int kOldSpaceRoots = 0x20; |
| 758 | 798 |
| 759 STATIC_ASSERT(kUndefinedValueRootIndex == | 799 STATIC_ASSERT(kUndefinedValueRootIndex == |
| 760 Internals::kUndefinedValueRootIndex); | 800 Internals::kUndefinedValueRootIndex); |
| 761 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex); | 801 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex); |
| 762 STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex); | 802 STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex); |
| 763 STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex); | 803 STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex); |
| 764 STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex); | 804 STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex); |
| 765 | 805 |
| (...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1305 bool inline_allocation_disabled() { return inline_allocation_disabled_; } | 1345 bool inline_allocation_disabled() { return inline_allocation_disabled_; } |
| 1306 | 1346 |
| 1307 // Switch whether inline bump-pointer allocation should be used. | 1347 // Switch whether inline bump-pointer allocation should be used. |
| 1308 void EnableInlineAllocation(); | 1348 void EnableInlineAllocation(); |
| 1309 void DisableInlineAllocation(); | 1349 void DisableInlineAllocation(); |
| 1310 | 1350 |
| 1311 // =========================================================================== | 1351 // =========================================================================== |
| 1312 // Methods triggering GCs. =================================================== | 1352 // Methods triggering GCs. =================================================== |
| 1313 // =========================================================================== | 1353 // =========================================================================== |
| 1314 | 1354 |
| 1315 // Performs garbage collection operation. | 1355 // Perform a garbage collection operation in a given space. |
| 1316 // Returns whether there is a chance that another major GC could | 1356 // Returns whether there is a chance that another major GC could |
| 1317 // collect more garbage. | 1357 // collect more garbage. |
|
Hannes Payer (out of office)
2015/08/27 12:41:14
Explain in detail here how the flags work.
| |
| 1318 inline bool CollectGarbage( | 1358 inline bool CollectGarbage( |
| 1319 AllocationSpace space, const char* gc_reason = NULL, | 1359 AllocationSpace space, const char* gc_reason = nullptr, |
| 1320 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); | 1360 const GCFlags flags = kNoGCFlags, |
| 1361 const GCCallbackFlags callback_flags = kNoGCCallbackFlags, | |
| 1362 const GCFlagOverride override = kOverride); | |
| 1321 | 1363 |
| 1322 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is | 1364 inline bool CollectGarbageNewSpace(const char* gc_reason = nullptr); |
|
Hannes Payer (out of office)
2015/08/27 12:41:14
Also mention here what impact the flags have.
| |
| 1323 // non-zero, then the slower precise sweeper is used, which leaves the heap | 1365 |
| 1324 // in a state where we can iterate over the heap visiting all objects. | 1366 // Performs a full garbage collection. |
| 1325 void CollectAllGarbage( | 1367 void CollectAllGarbage( |
| 1326 int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL, | 1368 const char* gc_reason = nullptr, |
| 1369 const GCFlags flags = Heap::kFinalizeIncrementalMarkingMask, | |
| 1327 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); | 1370 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); |
| 1328 | 1371 |
| 1329 // Last hope GC, should try to squeeze as much as possible. | 1372 // Last hope GC, should try to squeeze as much as possible. |
| 1330 void CollectAllAvailableGarbage(const char* gc_reason = NULL); | 1373 void CollectAllAvailableGarbage(const char* gc_reason = nullptr); |
| 1331 | 1374 |
| 1332 // Invoked when GC was requested via the stack guard. | 1375 // Invoked when GC was requested via the stack guard. |
| 1333 void HandleGCRequest(); | 1376 void HandleGCRequest(); |
| 1334 | 1377 |
| 1335 // =========================================================================== | 1378 // =========================================================================== |
| 1336 // Iterators. ================================================================ | 1379 // Iterators. ================================================================ |
| 1337 // =========================================================================== | 1380 // =========================================================================== |
| 1338 | 1381 |
| 1339 // Iterates over all roots in the heap. | 1382 // Iterates over all roots in the heap. |
| 1340 void IterateRoots(ObjectVisitor* v, VisitMode mode); | 1383 void IterateRoots(ObjectVisitor* v, VisitMode mode); |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 1369 // =========================================================================== | 1412 // =========================================================================== |
| 1370 // Incremental marking API. ================================================== | 1413 // Incremental marking API. ================================================== |
| 1371 // =========================================================================== | 1414 // =========================================================================== |
| 1372 | 1415 |
| 1373 // Start incremental marking and ensure that idle time handler can perform | 1416 // Start incremental marking and ensure that idle time handler can perform |
| 1374 // incremental steps. | 1417 // incremental steps. |
| 1375 void StartIdleIncrementalMarking(); | 1418 void StartIdleIncrementalMarking(); |
| 1376 | 1419 |
| 1377 // Starts incremental marking assuming incremental marking is currently | 1420 // Starts incremental marking assuming incremental marking is currently |
| 1378 // stopped. | 1421 // stopped. |
| 1379 void StartIncrementalMarking(int gc_flags = kNoGCFlags, | 1422 void StartIncrementalMarking(const GCFlags = kNoGCFlags, |
| 1380 const GCCallbackFlags gc_callback_flags = | 1423 const GCCallbackFlags gc_callback_flags = |
| 1381 GCCallbackFlags::kNoGCCallbackFlags, | 1424 GCCallbackFlags::kNoGCCallbackFlags, |
| 1382 const char* reason = nullptr); | 1425 const char* reason = nullptr); |
| 1383 | 1426 |
| 1384 // Performs incremental marking steps of step_size_in_bytes as long as | 1427 // Performs incremental marking steps of step_size_in_bytes as long as |
| 1385 // deadline_ins_ms is not reached. step_size_in_bytes can be 0 to compute | 1428 // deadline_ins_ms is not reached. step_size_in_bytes can be 0 to compute |
| 1386 // an estimate increment. Returns the remaining time that cannot be used | 1429 // an estimate increment. Returns the remaining time that cannot be used |
| 1387 // for incremental marking anymore because a single step would exceed the | 1430 // for incremental marking anymore because a single step would exceed the |
| 1388 // deadline. | 1431 // deadline. |
| 1389 double AdvanceIncrementalMarking( | 1432 double AdvanceIncrementalMarking( |
| (...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1680 return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE; | 1723 return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE; |
| 1681 } | 1724 } |
| 1682 | 1725 |
| 1683 #define ROOT_ACCESSOR(type, name, camel_name) \ | 1726 #define ROOT_ACCESSOR(type, name, camel_name) \ |
| 1684 inline void set_##name(type* value); | 1727 inline void set_##name(type* value); |
| 1685 ROOT_LIST(ROOT_ACCESSOR) | 1728 ROOT_LIST(ROOT_ACCESSOR) |
| 1686 #undef ROOT_ACCESSOR | 1729 #undef ROOT_ACCESSOR |
| 1687 | 1730 |
| 1688 StoreBuffer* store_buffer() { return &store_buffer_; } | 1731 StoreBuffer* store_buffer() { return &store_buffer_; } |
| 1689 | 1732 |
| 1690 void set_current_gc_flags(int flags) { | 1733 void set_current_gc_flags(GCFlags flags) { |
| 1691 current_gc_flags_ = flags; | 1734 current_gc_flags_ = flags; |
| 1692 DCHECK(!ShouldFinalizeIncrementalMarking() || | 1735 DCHECK(!ShouldFinalizeIncrementalMarking() || |
| 1693 !ShouldAbortIncrementalMarking()); | 1736 !ShouldAbortIncrementalMarking()); |
| 1694 } | 1737 } |
| 1695 | 1738 |
| 1696 inline bool ShouldReduceMemory() const { | 1739 inline bool ShouldReduceMemory() const { |
| 1697 return current_gc_flags_ & kReduceMemoryFootprintMask; | 1740 return current_gc_flags_ & kReduceMemoryFootprintMask; |
| 1698 } | 1741 } |
| 1699 | 1742 |
| 1700 inline bool ShouldAbortIncrementalMarking() const { | 1743 inline bool ShouldAbortIncrementalMarking() const { |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 1722 // with the allocation memento of the object at the top | 1765 // with the allocation memento of the object at the top |
| 1723 void EnsureFillerObjectAtTop(); | 1766 void EnsureFillerObjectAtTop(); |
| 1724 | 1767 |
| 1725 // Ensure that we have swept all spaces in such a way that we can iterate | 1768 // Ensure that we have swept all spaces in such a way that we can iterate |
| 1726 // over all objects. May cause a GC. | 1769 // over all objects. May cause a GC. |
| 1727 void MakeHeapIterable(); | 1770 void MakeHeapIterable(); |
| 1728 | 1771 |
| 1729 // Performs garbage collection operation. | 1772 // Performs garbage collection operation. |
| 1730 // Returns whether there is a chance that another major GC could | 1773 // Returns whether there is a chance that another major GC could |
| 1731 // collect more garbage. | 1774 // collect more garbage. |
| 1732 bool CollectGarbage( | 1775 bool CollectGarbage(GarbageCollector collector, const char* gc_reason, |
| 1733 GarbageCollector collector, const char* gc_reason, | 1776 const char* collector_reason); |
| 1734 const char* collector_reason, | |
| 1735 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); | |
| 1736 | 1777 |
| 1737 // Performs garbage collection | 1778 // Performs garbage collection |
| 1738 // Returns whether there is a chance another major GC could | 1779 // Returns whether there is a chance another major GC could |
| 1739 // collect more garbage. | 1780 // collect more garbage. |
| 1740 bool PerformGarbageCollection( | 1781 bool PerformGarbageCollection(GarbageCollector collector); |
| 1741 GarbageCollector collector, | |
| 1742 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); | |
| 1743 | 1782 |
| 1744 inline void UpdateOldSpaceLimits(); | 1783 inline void UpdateOldSpaceLimits(); |
| 1745 | 1784 |
| 1746 // Initializes a JSObject based on its map. | 1785 // Initializes a JSObject based on its map. |
| 1747 void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties, | 1786 void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties, |
| 1748 Map* map); | 1787 Map* map); |
| 1749 void InitializeAllocationMemento(AllocationMemento* memento, | 1788 void InitializeAllocationMemento(AllocationMemento* memento, |
| 1750 AllocationSite* allocation_site); | 1789 AllocationSite* allocation_site); |
| 1751 | 1790 |
| 1752 bool CreateInitialMaps(); | 1791 bool CreateInitialMaps(); |
| (...skipping 599 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2352 size_t ring_buffer_end_; | 2391 size_t ring_buffer_end_; |
| 2353 | 2392 |
| 2354 // Shared state read by the scavenge collector and set by ScavengeObject. | 2393 // Shared state read by the scavenge collector and set by ScavengeObject. |
| 2355 PromotionQueue promotion_queue_; | 2394 PromotionQueue promotion_queue_; |
| 2356 | 2395 |
| 2357 // Flag is set when the heap has been configured. The heap can be repeatedly | 2396 // Flag is set when the heap has been configured. The heap can be repeatedly |
| 2358 // configured through the API until it is set up. | 2397 // configured through the API until it is set up. |
| 2359 bool configured_; | 2398 bool configured_; |
| 2360 | 2399 |
| 2361 // Currently set GC flags that are respected by all GC components. | 2400 // Currently set GC flags that are respected by all GC components. |
| 2362 int current_gc_flags_; | 2401 GCFlags current_gc_flags_; |
| 2363 | 2402 |
| 2364 // Currently set GC callback flags that are used to pass information between | 2403 // Currently set GC callback flags that are used to pass information between |
| 2365 // the embedder and V8's GC. | 2404 // the embedder and V8's GC. |
| 2366 GCCallbackFlags current_gc_callback_flags_; | 2405 GCCallbackFlags current_gc_callback_flags_; |
| 2367 | 2406 |
| 2368 ExternalStringTable external_string_table_; | 2407 ExternalStringTable external_string_table_; |
| 2369 | 2408 |
| 2370 VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_; | 2409 VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_; |
| 2371 | 2410 |
| 2372 MemoryChunk* chunks_queued_for_free_; | 2411 MemoryChunk* chunks_queued_for_free_; |
| (...skipping 410 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2783 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 2822 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
| 2784 | 2823 |
| 2785 private: | 2824 private: |
| 2786 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2825 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
| 2787 }; | 2826 }; |
| 2788 #endif // DEBUG | 2827 #endif // DEBUG |
| 2789 } | 2828 } |
| 2790 } // namespace v8::internal | 2829 } // namespace v8::internal |
| 2791 | 2830 |
| 2792 #endif // V8_HEAP_HEAP_H_ | 2831 #endif // V8_HEAP_HEAP_H_ |
| OLD | NEW |