OLD | NEW |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
199 V(closure_symbol, "(closure)") | 199 V(closure_symbol, "(closure)") |
200 | 200 |
201 | 201 |
202 // Forward declaration of the GCTracer class. | 202 // Forward declaration of the GCTracer class. |
203 class GCTracer; | 203 class GCTracer; |
204 class HeapStats; | 204 class HeapStats; |
205 | 205 |
206 | 206 |
207 typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer); | 207 typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer); |
208 | 208 |
209 typedef bool (*DirtyRegionCallback)(Address start, | |
210 Address end, | |
211 ObjectSlotCallback copy_object_func); | |
212 | |
213 | 209 |
214 // The all static Heap captures the interface to the global object heap. | 210 // The all static Heap captures the interface to the global object heap. |
215 // All JavaScript contexts by this process share the same object heap. | 211 // All JavaScript contexts by this process share the same object heap. |
216 | 212 |
217 class Heap : public AllStatic { | 213 class Heap : public AllStatic { |
218 public: | 214 public: |
219 // Configure heap size before setup. Return false if the heap has been | 215 // Configure heap size before setup. Return false if the heap has been |
220 // setup already. | 216 // setup already. |
221 static bool ConfigureHeap(int max_semispace_size, int max_old_gen_size); | 217 static bool ConfigureHeap(int max_semispace_size, int max_old_gen_size); |
222 static bool ConfigureHeapDefault(); | 218 static bool ConfigureHeapDefault(); |
(...skipping 514 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
737 // not match the empty string. | 733 // not match the empty string. |
738 static String* hidden_symbol() { return hidden_symbol_; } | 734 static String* hidden_symbol() { return hidden_symbol_; } |
739 | 735 |
740 // Iterates over all roots in the heap. | 736 // Iterates over all roots in the heap. |
741 static void IterateRoots(ObjectVisitor* v, VisitMode mode); | 737 static void IterateRoots(ObjectVisitor* v, VisitMode mode); |
742 // Iterates over all strong roots in the heap. | 738 // Iterates over all strong roots in the heap. |
743 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode); | 739 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode); |
744 // Iterates over all the other roots in the heap. | 740 // Iterates over all the other roots in the heap. |
745 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); | 741 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); |
746 | 742 |
747 enum ExpectedPageWatermarkState { | 743 // Iterates remembered set of an old space. |
748 WATERMARK_SHOULD_BE_VALID, | 744 static void IterateRSet(PagedSpace* space, ObjectSlotCallback callback); |
749 WATERMARK_CAN_BE_INVALID | |
750 }; | |
751 | 745 |
752 // For each dirty region on a page in use from an old space call | 746 // Iterates a range of remembered set addresses starting with rset_start |
753 // visit_dirty_region callback. | 747 // corresponding to the range of allocated pointers |
754 // If either visit_dirty_region or callback can cause an allocation | 748 // [object_start, object_end). |
755 // in old space and changes in allocation watermark then | 749 // Returns the number of bits that were set. |
756 // can_preallocate_during_iteration should be set to true. | 750 static int IterateRSetRange(Address object_start, |
757 // All pages will be marked as having invalid watermark upon | 751 Address object_end, |
758 // iteration completion. | 752 Address rset_start, |
759 static void IterateDirtyRegions( | 753 ObjectSlotCallback copy_object_func); |
760 PagedSpace* space, | |
761 DirtyRegionCallback visit_dirty_region, | |
762 ObjectSlotCallback callback, | |
763 ExpectedPageWatermarkState expected_page_watermark_state); | |
764 | |
765 // Interpret marks as a bitvector of dirty marks for regions of size | |
766 // Page::kRegionSize aligned by Page::kRegionAlignmentMask and covering | |
767 // memory interval from start to top. For each dirty region call a | |
768 // visit_dirty_region callback. Return updated bitvector of dirty marks. | |
769 static uint32_t IterateDirtyRegions(uint32_t marks, | |
770 Address start, | |
771 Address end, | |
772 DirtyRegionCallback visit_dirty_region, | |
773 ObjectSlotCallback callback); | |
774 | |
775 // Iterate pointers to new space found in memory interval from start to end. | |
776 // Update dirty marks for page containing start address. | |
777 static void IterateAndMarkPointersToNewSpace(Address start, | |
778 Address end, | |
779 ObjectSlotCallback callback); | |
780 | |
781 // Iterate pointers to new space found in memory interval from start to end. | |
782 // Return true if pointers to new space was found. | |
783 static bool IteratePointersInDirtyRegion(Address start, | |
784 Address end, | |
785 ObjectSlotCallback callback); | |
786 | |
787 | |
788 // Iterate pointers to new space found in memory interval from start to end. | |
789 // This interval is considered to belong to the map space. | |
790 // Return true if pointers to new space was found. | |
791 static bool IteratePointersInDirtyMapsRegion(Address start, | |
792 Address end, | |
793 ObjectSlotCallback callback); | |
794 | |
795 | 754 |
796 // Returns whether the object resides in new space. | 755 // Returns whether the object resides in new space. |
797 static inline bool InNewSpace(Object* object); | 756 static inline bool InNewSpace(Object* object); |
798 static inline bool InFromSpace(Object* object); | 757 static inline bool InFromSpace(Object* object); |
799 static inline bool InToSpace(Object* object); | 758 static inline bool InToSpace(Object* object); |
800 | 759 |
801 // Checks whether an address/object in the heap (including auxiliary | 760 // Checks whether an address/object in the heap (including auxiliary |
802 // area and unused area). | 761 // area and unused area). |
803 static bool Contains(Address addr); | 762 static bool Contains(Address addr); |
804 static bool Contains(HeapObject* value); | 763 static bool Contains(HeapObject* value); |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
886 static void TracePathToGlobal(); | 845 static void TracePathToGlobal(); |
887 #endif | 846 #endif |
888 | 847 |
889 // Callback function passed to Heap::Iterate etc. Copies an object if | 848 // Callback function passed to Heap::Iterate etc. Copies an object if |
890 // necessary, the object might be promoted to an old space. The caller must | 849 // necessary, the object might be promoted to an old space. The caller must |
891 // ensure the precondition that the object is (a) a heap object and (b) in | 850 // ensure the precondition that the object is (a) a heap object and (b) in |
892 // the heap's from space. | 851 // the heap's from space. |
893 static void ScavengePointer(HeapObject** p); | 852 static void ScavengePointer(HeapObject** p); |
894 static inline void ScavengeObject(HeapObject** p, HeapObject* object); | 853 static inline void ScavengeObject(HeapObject** p, HeapObject* object); |
895 | 854 |
| 855 // Clear a range of remembered set addresses corresponding to the object |
| 856 // area address 'start' with size 'size_in_bytes', eg, when adding blocks |
| 857 // to the free list. |
| 858 static void ClearRSetRange(Address start, int size_in_bytes); |
| 859 |
| 860 // Rebuild remembered set in old and map spaces. |
| 861 static void RebuildRSets(); |
| 862 |
| 863 // Update an old object's remembered set |
| 864 static int UpdateRSet(HeapObject* obj); |
| 865 |
896 // Commits from space if it is uncommitted. | 866 // Commits from space if it is uncommitted. |
897 static void EnsureFromSpaceIsCommitted(); | 867 static void EnsureFromSpaceIsCommitted(); |
898 | 868 |
899 // Support for partial snapshots. After calling this we can allocate a | 869 // Support for partial snapshots. After calling this we can allocate a |
900 // certain number of bytes using only linear allocation (with a | 870 // certain number of bytes using only linear allocation (with a |
901 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists | 871 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists |
902 // or causing a GC. It returns true of space was reserved or false if a GC is | 872 // or causing a GC. It returns true of space was reserved or false if a GC is |
903 // needed. For paged spaces the space requested must include the space wasted | 873 // needed. For paged spaces the space requested must include the space wasted |
904 // at the end of each page when allocating linearly. | 874 // at the end of each page when allocating linearly. |
905 static void ReserveSpace( | 875 static void ReserveSpace( |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
978 bool check_number_string_cache = true); | 948 bool check_number_string_cache = true); |
979 | 949 |
980 static Map* MapForExternalArrayType(ExternalArrayType array_type); | 950 static Map* MapForExternalArrayType(ExternalArrayType array_type); |
981 static RootListIndex RootIndexForExternalArrayType( | 951 static RootListIndex RootIndexForExternalArrayType( |
982 ExternalArrayType array_type); | 952 ExternalArrayType array_type); |
983 | 953 |
984 static void RecordStats(HeapStats* stats); | 954 static void RecordStats(HeapStats* stats); |
985 | 955 |
986 // Copy block of memory from src to dst. Size of block should be aligned | 956 // Copy block of memory from src to dst. Size of block should be aligned |
987 // by pointer size. | 957 // by pointer size. |
988 static inline void CopyBlock(Address dst, Address src, int byte_size); | 958 static inline void CopyBlock(Object** dst, Object** src, int byte_size); |
989 | |
990 static inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst, | |
991 Address src, | |
992 int byte_size); | |
993 | 959 |
994 // Optimized version of memmove for blocks with pointer size aligned sizes and | 960 // Optimized version of memmove for blocks with pointer size aligned sizes and |
995 // pointer size aligned addresses. | 961 // pointer size aligned addresses. |
996 static inline void MoveBlock(Address dst, Address src, int byte_size); | 962 static inline void MoveBlock(Object** dst, Object** src, int byte_size); |
997 | |
998 static inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst, | |
999 Address src, | |
1000 int byte_size); | |
1001 | 963 |
1002 // Check new space expansion criteria and expand semispaces if it was hit. | 964 // Check new space expansion criteria and expand semispaces if it was hit. |
1003 static void CheckNewSpaceExpansionCriteria(); | 965 static void CheckNewSpaceExpansionCriteria(); |
1004 | 966 |
1005 static inline void IncrementYoungSurvivorsCounter(int survived) { | 967 static inline void IncrementYoungSurvivorsCounter(int survived) { |
1006 survived_since_last_expansion_ += survived; | 968 survived_since_last_expansion_ += survived; |
1007 } | 969 } |
1008 | 970 |
1009 static void UpdateNewSpaceReferencesInExternalStringTable( | 971 static void UpdateNewSpaceReferencesInExternalStringTable( |
1010 ExternalStringTableUpdaterCallback updater_func); | 972 ExternalStringTableUpdaterCallback updater_func); |
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1238 | 1200 |
1239 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) | 1201 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) |
1240 // Record the copy of an object in the NewSpace's statistics. | 1202 // Record the copy of an object in the NewSpace's statistics. |
1241 static void RecordCopiedObject(HeapObject* obj); | 1203 static void RecordCopiedObject(HeapObject* obj); |
1242 | 1204 |
1243 // Record statistics before and after garbage collection. | 1205 // Record statistics before and after garbage collection. |
1244 static void ReportStatisticsBeforeGC(); | 1206 static void ReportStatisticsBeforeGC(); |
1245 static void ReportStatisticsAfterGC(); | 1207 static void ReportStatisticsAfterGC(); |
1246 #endif | 1208 #endif |
1247 | 1209 |
| 1210 // Rebuild remembered set in an old space. |
| 1211 static void RebuildRSets(PagedSpace* space); |
| 1212 |
| 1213 // Rebuild remembered set in the large object space. |
| 1214 static void RebuildRSets(LargeObjectSpace* space); |
| 1215 |
1248 // Slow part of scavenge object. | 1216 // Slow part of scavenge object. |
1249 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object); | 1217 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object); |
1250 | 1218 |
1251 // Initializes a function with a shared part and prototype. | 1219 // Initializes a function with a shared part and prototype. |
1252 // Returns the function. | 1220 // Returns the function. |
1253 // Note: this code was factored out of AllocateFunction such that | 1221 // Note: this code was factored out of AllocateFunction such that |
1254 // other parts of the VM could use it. Specifically, a function that creates | 1222 // other parts of the VM could use it. Specifically, a function that creates |
1255 // instances of type JS_FUNCTION_TYPE benefit from the use of this function. | 1223 // instances of type JS_FUNCTION_TYPE benefit from the use of this function. |
1256 // Please note this does not perform a garbage collection. | 1224 // Please note this does not perform a garbage collection. |
1257 static inline Object* InitializeFunction(JSFunction* function, | 1225 static inline Object* InitializeFunction(JSFunction* function, |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1326 } | 1294 } |
1327 | 1295 |
1328 ~LinearAllocationScope() { | 1296 ~LinearAllocationScope() { |
1329 Heap::linear_allocation_scope_depth_--; | 1297 Heap::linear_allocation_scope_depth_--; |
1330 ASSERT(Heap::linear_allocation_scope_depth_ >= 0); | 1298 ASSERT(Heap::linear_allocation_scope_depth_ >= 0); |
1331 } | 1299 } |
1332 }; | 1300 }; |
1333 | 1301 |
1334 | 1302 |
1335 #ifdef DEBUG | 1303 #ifdef DEBUG |
1336 // Visitor class to verify interior pointers in spaces that do not contain | 1304 // Visitor class to verify interior pointers that do not have remembered set |
1337 // or care about intergenerational references. All heap object pointers have to | 1305 // bits. All heap object pointers have to point into the heap to a location |
1338 // point into the heap to a location that has a map pointer at its first word. | 1306 // that has a map pointer at its first word. Caveat: Heap::Contains is an |
1339 // Caveat: Heap::Contains is an approximation because it can return true for | 1307 // approximation because it can return true for objects in a heap space but |
1340 // objects in a heap space but above the allocation pointer. | 1308 // above the allocation pointer. |
1341 class VerifyPointersVisitor: public ObjectVisitor { | 1309 class VerifyPointersVisitor: public ObjectVisitor { |
1342 public: | 1310 public: |
1343 void VisitPointers(Object** start, Object** end) { | 1311 void VisitPointers(Object** start, Object** end) { |
1344 for (Object** current = start; current < end; current++) { | 1312 for (Object** current = start; current < end; current++) { |
1345 if ((*current)->IsHeapObject()) { | 1313 if ((*current)->IsHeapObject()) { |
1346 HeapObject* object = HeapObject::cast(*current); | 1314 HeapObject* object = HeapObject::cast(*current); |
1347 ASSERT(Heap::Contains(object)); | 1315 ASSERT(Heap::Contains(object)); |
1348 ASSERT(object->map()->IsMap()); | 1316 ASSERT(object->map()->IsMap()); |
1349 } | 1317 } |
1350 } | 1318 } |
1351 } | 1319 } |
1352 }; | 1320 }; |
1353 | 1321 |
1354 | 1322 |
1355 // Visitor class to verify interior pointers in spaces that use region marks | 1323 // Visitor class to verify interior pointers that have remembered set bits. |
1356 // to keep track of intergenerational references. | 1324 // As VerifyPointersVisitor but also checks that remembered set bits are |
1357 // As VerifyPointersVisitor but also checks that dirty marks are set | 1325 // always set for pointers into new space. |
1358 // for regions covering intergenerational references. | 1326 class VerifyPointersAndRSetVisitor: public ObjectVisitor { |
1359 class VerifyPointersAndDirtyRegionsVisitor: public ObjectVisitor { | |
1360 public: | 1327 public: |
1361 void VisitPointers(Object** start, Object** end) { | 1328 void VisitPointers(Object** start, Object** end) { |
1362 for (Object** current = start; current < end; current++) { | 1329 for (Object** current = start; current < end; current++) { |
1363 if ((*current)->IsHeapObject()) { | 1330 if ((*current)->IsHeapObject()) { |
1364 HeapObject* object = HeapObject::cast(*current); | 1331 HeapObject* object = HeapObject::cast(*current); |
1365 ASSERT(Heap::Contains(object)); | 1332 ASSERT(Heap::Contains(object)); |
1366 ASSERT(object->map()->IsMap()); | 1333 ASSERT(object->map()->IsMap()); |
1367 if (Heap::InNewSpace(object)) { | 1334 if (Heap::InNewSpace(object)) { |
1368 ASSERT(Heap::InToSpace(object)); | 1335 ASSERT(Page::IsRSetSet(reinterpret_cast<Address>(current), 0)); |
1369 Address addr = reinterpret_cast<Address>(current); | |
1370 ASSERT(Page::FromAddress(addr)->IsRegionDirty(addr)); | |
1371 } | 1336 } |
1372 } | 1337 } |
1373 } | 1338 } |
1374 } | 1339 } |
1375 }; | 1340 }; |
1376 #endif | 1341 #endif |
1377 | 1342 |
1378 | 1343 |
1379 // Space iterator for iterating over all spaces of the heap. | 1344 // Space iterator for iterating over all spaces of the heap. |
1380 // Returns each space in turn, and null when it is done. | 1345 // Returns each space in turn, and null when it is done. |
(...skipping 547 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1928 | 1893 |
1929 // To speed up scavenge collections new space string are kept | 1894 // To speed up scavenge collections new space string are kept |
1930 // separate from old space strings. | 1895 // separate from old space strings. |
1931 static List<Object*> new_space_strings_; | 1896 static List<Object*> new_space_strings_; |
1932 static List<Object*> old_space_strings_; | 1897 static List<Object*> old_space_strings_; |
1933 }; | 1898 }; |
1934 | 1899 |
1935 } } // namespace v8::internal | 1900 } } // namespace v8::internal |
1936 | 1901 |
1937 #endif // V8_HEAP_H_ | 1902 #endif // V8_HEAP_H_ |
OLD | NEW |