| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
| 6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
| 7 | 7 |
| 8 #include <cmath> | 8 #include <cmath> |
| 9 | 9 |
| 10 #include "src/allocation.h" | 10 #include "src/allocation.h" |
| (...skipping 850 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 861 void set_array_buffers_list(Object* object) { array_buffers_list_ = object; } | 861 void set_array_buffers_list(Object* object) { array_buffers_list_ = object; } |
| 862 Object* array_buffers_list() const { return array_buffers_list_; } | 862 Object* array_buffers_list() const { return array_buffers_list_; } |
| 863 | 863 |
| 864 void set_last_array_buffer_in_list(Object* object) { | 864 void set_last_array_buffer_in_list(Object* object) { |
| 865 last_array_buffer_in_list_ = object; | 865 last_array_buffer_in_list_ = object; |
| 866 } | 866 } |
| 867 Object* last_array_buffer_in_list() const { | 867 Object* last_array_buffer_in_list() const { |
| 868 return last_array_buffer_in_list_; | 868 return last_array_buffer_in_list_; |
| 869 } | 869 } |
| 870 | 870 |
| 871 void set_new_array_buffer_views_list(Object* object) { | |
| 872 new_array_buffer_views_list_ = object; | |
| 873 } | |
| 874 Object* new_array_buffer_views_list() const { | |
| 875 return new_array_buffer_views_list_; | |
| 876 } | |
| 877 | |
| 878 void set_allocation_sites_list(Object* object) { | 871 void set_allocation_sites_list(Object* object) { |
| 879 allocation_sites_list_ = object; | 872 allocation_sites_list_ = object; |
| 880 } | 873 } |
| 881 Object* allocation_sites_list() { return allocation_sites_list_; } | 874 Object* allocation_sites_list() { return allocation_sites_list_; } |
| 882 | 875 |
| 883 // Used in CreateAllocationSiteStub and the (de)serializer. | 876 // Used in CreateAllocationSiteStub and the (de)serializer. |
| 884 Object** allocation_sites_list_address() { return &allocation_sites_list_; } | 877 Object** allocation_sites_list_address() { return &allocation_sites_list_; } |
| 885 | 878 |
| 886 void set_encountered_weak_collections(Object* weak_collection) { | 879 void set_encountered_weak_collections(Object* weak_collection) { |
| 887 encountered_weak_collections_ = weak_collection; | 880 encountered_weak_collections_ = weak_collection; |
| (...skipping 773 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1661 // for all spaces. This is used to disable allocations in generated code. | 1654 // for all spaces. This is used to disable allocations in generated code. |
| 1662 bool inline_allocation_disabled_; | 1655 bool inline_allocation_disabled_; |
| 1663 | 1656 |
| 1664 // Weak list heads, threaded through the objects. | 1657 // Weak list heads, threaded through the objects. |
| 1665 // List heads are initialized lazily and contain the undefined_value at start. | 1658 // List heads are initialized lazily and contain the undefined_value at start. |
| 1666 Object* native_contexts_list_; | 1659 Object* native_contexts_list_; |
| 1667 Object* array_buffers_list_; | 1660 Object* array_buffers_list_; |
| 1668 Object* last_array_buffer_in_list_; | 1661 Object* last_array_buffer_in_list_; |
| 1669 Object* allocation_sites_list_; | 1662 Object* allocation_sites_list_; |
| 1670 | 1663 |
| 1671 // This is a global list of array buffer views in new space. When the views | |
| 1672 // get promoted, they are removed form the list and added to the corresponding | |
| 1673 // array buffer. | |
| 1674 Object* new_array_buffer_views_list_; | |
| 1675 | |
| 1676 // List of encountered weak collections (JSWeakMap and JSWeakSet) during | 1664 // List of encountered weak collections (JSWeakMap and JSWeakSet) during |
| 1677 // marking. It is initialized during marking, destroyed after marking and | 1665 // marking. It is initialized during marking, destroyed after marking and |
| 1678 // contains Smi(0) while marking is not active. | 1666 // contains Smi(0) while marking is not active. |
| 1679 Object* encountered_weak_collections_; | 1667 Object* encountered_weak_collections_; |
| 1680 | 1668 |
| 1681 Object* encountered_weak_cells_; | 1669 Object* encountered_weak_cells_; |
| 1682 | 1670 |
| 1683 StoreBufferRebuilder store_buffer_rebuilder_; | 1671 StoreBufferRebuilder store_buffer_rebuilder_; |
| 1684 | 1672 |
| 1685 struct StringTypeTable { | 1673 struct StringTypeTable { |
| (...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1996 | 1984 |
| 1997 // Performs a major collection in the whole heap. | 1985 // Performs a major collection in the whole heap. |
| 1998 void MarkCompact(); | 1986 void MarkCompact(); |
| 1999 | 1987 |
| 2000 // Code to be run before and after mark-compact. | 1988 // Code to be run before and after mark-compact. |
| 2001 void MarkCompactPrologue(); | 1989 void MarkCompactPrologue(); |
| 2002 void MarkCompactEpilogue(); | 1990 void MarkCompactEpilogue(); |
| 2003 | 1991 |
| 2004 void ProcessNativeContexts(WeakObjectRetainer* retainer); | 1992 void ProcessNativeContexts(WeakObjectRetainer* retainer); |
| 2005 void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young); | 1993 void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young); |
| 2006 void ProcessNewArrayBufferViews(WeakObjectRetainer* retainer); | |
| 2007 void ProcessAllocationSites(WeakObjectRetainer* retainer); | 1994 void ProcessAllocationSites(WeakObjectRetainer* retainer); |
| 2008 | 1995 |
| 2009 // Deopts all code that contains allocation instruction which are tenured or | 1996 // Deopts all code that contains allocation instruction which are tenured or |
| 2010 // not tenured. Moreover it clears the pretenuring allocation site statistics. | 1997 // not tenured. Moreover it clears the pretenuring allocation site statistics. |
| 2011 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); | 1998 void ResetAllAllocationSitesDependentCode(PretenureFlag flag); |
| 2012 | 1999 |
| 2013 // Evaluates local pretenuring for the old space and calls | 2000 // Evaluates local pretenuring for the old space and calls |
| 2014 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in | 2001 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in |
| 2015 // the old space. | 2002 // the old space. |
| 2016 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); | 2003 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); |
| (...skipping 609 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2626 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 2613 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
| 2627 | 2614 |
| 2628 private: | 2615 private: |
| 2629 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2616 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
| 2630 }; | 2617 }; |
| 2631 #endif // DEBUG | 2618 #endif // DEBUG |
| 2632 } | 2619 } |
| 2633 } // namespace v8::internal | 2620 } // namespace v8::internal |
| 2634 | 2621 |
| 2635 #endif // V8_HEAP_HEAP_H_ | 2622 #endif // V8_HEAP_HEAP_H_ |
| OLD | NEW |