| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #include "src/v8.h" | 5 #include "src/v8.h" | 
| 6 | 6 | 
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" | 
| 8 #include "src/api.h" | 8 #include "src/api.h" | 
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" | 
| 10 #include "src/base/once.h" | 10 #include "src/base/once.h" | 
| (...skipping 1957 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1968 STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset & | 1968 STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset & | 
| 1969                kDoubleAlignmentMask) == 0);  // NOLINT | 1969                kDoubleAlignmentMask) == 0);  // NOLINT | 
| 1970 STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) == | 1970 STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) == | 
| 1971               0);  // NOLINT | 1971               0);  // NOLINT | 
| 1972 #ifdef V8_HOST_ARCH_32_BIT | 1972 #ifdef V8_HOST_ARCH_32_BIT | 
| 1973 STATIC_ASSERT((HeapNumber::kValueOffset & kDoubleAlignmentMask) != | 1973 STATIC_ASSERT((HeapNumber::kValueOffset & kDoubleAlignmentMask) != | 
| 1974               0);  // NOLINT | 1974               0);  // NOLINT | 
| 1975 #endif | 1975 #endif | 
| 1976 | 1976 | 
| 1977 | 1977 | 
| 1978 HeapObject* Heap::EnsureAligned(HeapObject* object, int size, | 1978 int GetMaximumMisalignment(AllocationAlignment alignment) { | 
| 1979                                 AllocationAlignment alignment) { | 1979   switch (alignment) { | 
| 1980   if (alignment == kDoubleAligned && | 1980     case kWordAligned: | 
| 1981       (OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { | 1981       return 0; | 
| 1982     CreateFillerObjectAt(object->address(), kPointerSize); | 1982     case kDoubleAligned: | 
| 1983     return HeapObject::FromAddress(object->address() + kPointerSize); | 1983     case kDoubleValueAligned: | 
| 1984   } else if (alignment == kDoubleUnaligned && | 1984       return kDoubleSize / 2; | 
| 1985              (OffsetFrom(object->address()) & kDoubleAlignmentMask) == 0) { | 1985     default: | 
| 1986     CreateFillerObjectAt(object->address(), kPointerSize); | 1986       UNREACHABLE(); | 
| 1987     return HeapObject::FromAddress(object->address() + kPointerSize); |  | 
| 1988   } else { |  | 
| 1989     CreateFillerObjectAt(object->address() + size - kPointerSize, kPointerSize); |  | 
| 1990     return object; |  | 
| 1991   } | 1987   } | 
| 1992 } | 1988 } | 
| 1993 | 1989 | 
| 1994 | 1990 | 
| 1995 HeapObject* Heap::PrecedeWithFiller(HeapObject* object) { | 1991 int Heap::GetMisalignment(Address address, AllocationAlignment alignment) { | 
| 1996   CreateFillerObjectAt(object->address(), kPointerSize); | 1992   intptr_t offset = OffsetFrom(address); | 
| 1997   return HeapObject::FromAddress(object->address() + kPointerSize); | 1993   if (alignment == kDoubleAligned && (offset & kDoubleAlignmentMask) != 0) | 
|  | 1994     return kPointerSize; | 
|  | 1995   if (alignment == kDoubleValueAligned && (offset & kDoubleAlignmentMask) == 0) | 
|  | 1996     return kPointerSize; | 
|  | 1997   return 0; | 
|  | 1998 } | 
|  | 1999 | 
|  | 2000 | 
|  | 2001 HeapObject* Heap::PrecedeWithFiller(HeapObject* object, int filler_size) { | 
|  | 2002   CreateFillerObjectAt(object->address(), filler_size); | 
|  | 2003   return HeapObject::FromAddress(object->address() + filler_size); | 
| 1998 } | 2004 } | 
| 1999 | 2005 | 
| 2000 | 2006 | 
| 2001 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { | 2007 HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { | 
| 2002   return EnsureAligned(object, size, kDoubleAligned); | 2008   Address address = object->address(); | 
|  | 2009   int fill_size = GetMisalignment(address, kDoubleAligned); | 
|  | 2010   // If object is not aligned, add fill to align it. | 
|  | 2011   if (fill_size) return PrecedeWithFiller(object, kPointerSize); | 
|  | 2012 | 
|  | 2013   // object is aligned. Add fill in the extra space at the end. | 
|  | 2014   // TODO(bbudge) Calculate alignment fill earlier to avoid this. | 
|  | 2015   CreateFillerObjectAt(address + size - kPointerSize, kPointerSize); | 
|  | 2016   return object; | 
| 2003 } | 2017 } | 
| 2004 | 2018 | 
| 2005 | 2019 | 
| 2006 enum LoggingAndProfiling { | 2020 enum LoggingAndProfiling { | 
| 2007   LOGGING_AND_PROFILING_ENABLED, | 2021   LOGGING_AND_PROFILING_ENABLED, | 
| 2008   LOGGING_AND_PROFILING_DISABLED | 2022   LOGGING_AND_PROFILING_DISABLED | 
| 2009 }; | 2023 }; | 
| 2010 | 2024 | 
| 2011 | 2025 | 
| 2012 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; | 2026 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS }; | 
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2137       heap->OnMoveEvent(target, source, size); | 2151       heap->OnMoveEvent(target, source, size); | 
| 2138     } | 2152     } | 
| 2139 | 2153 | 
| 2140     if (marks_handling == TRANSFER_MARKS) { | 2154     if (marks_handling == TRANSFER_MARKS) { | 
| 2141       if (Marking::TransferColor(source, target)) { | 2155       if (Marking::TransferColor(source, target)) { | 
| 2142         MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); | 2156         MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); | 
| 2143       } | 2157       } | 
| 2144     } | 2158     } | 
| 2145   } | 2159   } | 
| 2146 | 2160 | 
| 2147   template <int alignment> | 2161   template <AllocationAlignment alignment> | 
| 2148   static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot, | 2162   static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot, | 
| 2149                                          HeapObject* object, int object_size) { | 2163                                          HeapObject* object, int object_size) { | 
| 2150     Heap* heap = map->GetHeap(); | 2164     Heap* heap = map->GetHeap(); | 
| 2151 | 2165 | 
| 2152     DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE)); | 2166     DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE)); | 
| 2153     AllocationAlignment align = |  | 
| 2154         alignment == kDoubleAlignment ? kDoubleAligned : kWordAligned; |  | 
| 2155     AllocationResult allocation = | 2167     AllocationResult allocation = | 
| 2156         heap->new_space()->AllocateRaw(object_size, align); | 2168         heap->new_space()->AllocateRaw(object_size, alignment); | 
| 2157 | 2169 | 
| 2158     HeapObject* target = NULL;  // Initialization to please compiler. | 2170     HeapObject* target = NULL;  // Initialization to please compiler. | 
| 2159     if (allocation.To(&target)) { | 2171     if (allocation.To(&target)) { | 
| 2160       // Order is important here: Set the promotion limit before storing a | 2172       // Order is important here: Set the promotion limit before storing a | 
| 2161       // filler for double alignment or migrating the object. Otherwise we | 2173       // filler for double alignment or migrating the object. Otherwise we | 
| 2162       // may end up overwriting promotion queue entries when we migrate the | 2174       // may end up overwriting promotion queue entries when we migrate the | 
| 2163       // object. | 2175       // object. | 
| 2164       heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); | 2176       heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); | 
| 2165 | 2177 | 
| 2166       MigrateObject(heap, object, target, object_size); | 2178       MigrateObject(heap, object, target, object_size); | 
| 2167 | 2179 | 
| 2168       // Update slot to new target. | 2180       // Update slot to new target. | 
| 2169       *slot = target; | 2181       *slot = target; | 
| 2170 | 2182 | 
| 2171       heap->IncrementSemiSpaceCopiedObjectSize(object_size); | 2183       heap->IncrementSemiSpaceCopiedObjectSize(object_size); | 
| 2172       return true; | 2184       return true; | 
| 2173     } | 2185     } | 
| 2174     return false; | 2186     return false; | 
| 2175   } | 2187   } | 
| 2176 | 2188 | 
| 2177 | 2189 | 
| 2178   template <ObjectContents object_contents, int alignment> | 2190   template <ObjectContents object_contents, AllocationAlignment alignment> | 
| 2179   static inline bool PromoteObject(Map* map, HeapObject** slot, | 2191   static inline bool PromoteObject(Map* map, HeapObject** slot, | 
| 2180                                    HeapObject* object, int object_size) { | 2192                                    HeapObject* object, int object_size) { | 
| 2181     Heap* heap = map->GetHeap(); | 2193     Heap* heap = map->GetHeap(); | 
| 2182 | 2194 | 
| 2183     AllocationAlignment align = |  | 
| 2184         alignment == kDoubleAlignment ? kDoubleAligned : kWordAligned; |  | 
| 2185     AllocationResult allocation = | 2195     AllocationResult allocation = | 
| 2186         heap->old_space()->AllocateRaw(object_size, align); | 2196         heap->old_space()->AllocateRaw(object_size, alignment); | 
| 2187 | 2197 | 
| 2188     HeapObject* target = NULL;  // Initialization to please compiler. | 2198     HeapObject* target = NULL;  // Initialization to please compiler. | 
| 2189     if (allocation.To(&target)) { | 2199     if (allocation.To(&target)) { | 
| 2190       MigrateObject(heap, object, target, object_size); | 2200       MigrateObject(heap, object, target, object_size); | 
| 2191 | 2201 | 
| 2192       // Update slot to new target. | 2202       // Update slot to new target. | 
| 2193       *slot = target; | 2203       *slot = target; | 
| 2194 | 2204 | 
| 2195       if (object_contents == POINTER_OBJECT) { | 2205       if (object_contents == POINTER_OBJECT) { | 
| 2196         if (map->instance_type() == JS_FUNCTION_TYPE) { | 2206         if (map->instance_type() == JS_FUNCTION_TYPE) { | 
| 2197           heap->promotion_queue()->insert(target, | 2207           heap->promotion_queue()->insert(target, | 
| 2198                                           JSFunction::kNonWeakFieldsEndOffset); | 2208                                           JSFunction::kNonWeakFieldsEndOffset); | 
| 2199         } else { | 2209         } else { | 
| 2200           heap->promotion_queue()->insert(target, object_size); | 2210           heap->promotion_queue()->insert(target, object_size); | 
| 2201         } | 2211         } | 
| 2202       } | 2212       } | 
| 2203       heap->IncrementPromotedObjectsSize(object_size); | 2213       heap->IncrementPromotedObjectsSize(object_size); | 
| 2204       return true; | 2214       return true; | 
| 2205     } | 2215     } | 
| 2206     return false; | 2216     return false; | 
| 2207   } | 2217   } | 
| 2208 | 2218 | 
| 2209 | 2219 | 
| 2210   template <ObjectContents object_contents, int alignment> | 2220   template <ObjectContents object_contents, AllocationAlignment alignment> | 
| 2211   static inline void EvacuateObject(Map* map, HeapObject** slot, | 2221   static inline void EvacuateObject(Map* map, HeapObject** slot, | 
| 2212                                     HeapObject* object, int object_size) { | 2222                                     HeapObject* object, int object_size) { | 
| 2213     SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | 2223     SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | 
| 2214     SLOW_DCHECK(object->Size() == object_size); | 2224     SLOW_DCHECK(object->Size() == object_size); | 
| 2215     Heap* heap = map->GetHeap(); | 2225     Heap* heap = map->GetHeap(); | 
| 2216 | 2226 | 
| 2217     if (!heap->ShouldBePromoted(object->address(), object_size)) { | 2227     if (!heap->ShouldBePromoted(object->address(), object_size)) { | 
| 2218       // A semi-space copy may fail due to fragmentation. In that case, we | 2228       // A semi-space copy may fail due to fragmentation. In that case, we | 
| 2219       // try to promote the object. | 2229       // try to promote the object. | 
| 2220       if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { | 2230       if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) { | 
| (...skipping 29 matching lines...) Expand all  Loading... | 
| 2250       // promotion queue processing (IterateAndMarkPointersToFromSpace) will | 2260       // promotion queue processing (IterateAndMarkPointersToFromSpace) will | 
| 2251       // miss it as it is not HeapObject-tagged. | 2261       // miss it as it is not HeapObject-tagged. | 
| 2252       Address code_entry_slot = | 2262       Address code_entry_slot = | 
| 2253           target->address() + JSFunction::kCodeEntryOffset; | 2263           target->address() + JSFunction::kCodeEntryOffset; | 
| 2254       Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 2264       Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 
| 2255       map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( | 2265       map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot( | 
| 2256           code_entry_slot, code); | 2266           code_entry_slot, code); | 
| 2257     } | 2267     } | 
| 2258   } | 2268   } | 
| 2259 | 2269 | 
|  | 2270 #if V8_HOST_ARCH_64_BIT | 
|  | 2271 #define kObjectAligned kDoubleAligned | 
|  | 2272 #else | 
|  | 2273 #define kObjectAligned kWordAligned | 
|  | 2274 #endif | 
| 2260 | 2275 | 
| 2261   static inline void EvacuateFixedArray(Map* map, HeapObject** slot, | 2276   static inline void EvacuateFixedArray(Map* map, HeapObject** slot, | 
| 2262                                         HeapObject* object) { | 2277                                         HeapObject* object) { | 
| 2263     int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); | 2278     int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); | 
| 2264     EvacuateObject<POINTER_OBJECT, kObjectAlignment>(map, slot, object, | 2279     EvacuateObject<POINTER_OBJECT, kObjectAligned>(map, slot, object, | 
| 2265                                                      object_size); | 2280                                                    object_size); | 
| 2266   } | 2281   } | 
| 2267 | 2282 | 
| 2268 | 2283 | 
| 2269   static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot, | 2284   static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot, | 
| 2270                                               HeapObject* object) { | 2285                                               HeapObject* object) { | 
| 2271     int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); | 2286     int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); | 
| 2272     int object_size = FixedDoubleArray::SizeFor(length); | 2287     int object_size = FixedDoubleArray::SizeFor(length); | 
| 2273     EvacuateObject<DATA_OBJECT, kDoubleAlignment>(map, slot, object, | 2288     EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size); | 
| 2274                                                   object_size); |  | 
| 2275   } | 2289   } | 
| 2276 | 2290 | 
| 2277 | 2291 | 
| 2278   static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot, | 2292   static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot, | 
| 2279                                              HeapObject* object) { | 2293                                              HeapObject* object) { | 
| 2280     int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size(); | 2294     int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size(); | 
| 2281     EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object, | 2295     EvacuateObject<DATA_OBJECT, kObjectAligned>(map, slot, object, object_size); | 
| 2282                                                   object_size); |  | 
| 2283   } | 2296   } | 
| 2284 | 2297 | 
| 2285 | 2298 | 
| 2286   static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot, | 2299   static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot, | 
| 2287                                                HeapObject* object) { | 2300                                                HeapObject* object) { | 
| 2288     int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); | 2301     int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size(); | 
| 2289     EvacuateObject<DATA_OBJECT, kDoubleAlignment>(map, slot, object, | 2302     EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size); | 
| 2290                                                   object_size); |  | 
| 2291   } | 2303   } | 
| 2292 | 2304 | 
| 2293 | 2305 | 
| 2294   static inline void EvacuateByteArray(Map* map, HeapObject** slot, | 2306   static inline void EvacuateByteArray(Map* map, HeapObject** slot, | 
| 2295                                        HeapObject* object) { | 2307                                        HeapObject* object) { | 
| 2296     int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); | 2308     int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); | 
| 2297     EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object, | 2309     EvacuateObject<DATA_OBJECT, kObjectAligned>(map, slot, object, object_size); | 
| 2298                                                   object_size); |  | 
| 2299   } | 2310   } | 
| 2300 | 2311 | 
| 2301 | 2312 | 
| 2302   static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot, | 2313   static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot, | 
| 2303                                               HeapObject* object) { | 2314                                               HeapObject* object) { | 
| 2304     int object_size = SeqOneByteString::cast(object) | 2315     int object_size = SeqOneByteString::cast(object) | 
| 2305                           ->SeqOneByteStringSize(map->instance_type()); | 2316                           ->SeqOneByteStringSize(map->instance_type()); | 
| 2306     EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object, | 2317     EvacuateObject<DATA_OBJECT, kObjectAligned>(map, slot, object, object_size); | 
| 2307                                                   object_size); |  | 
| 2308   } | 2318   } | 
| 2309 | 2319 | 
| 2310 | 2320 | 
| 2311   static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot, | 2321   static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot, | 
| 2312                                               HeapObject* object) { | 2322                                               HeapObject* object) { | 
| 2313     int object_size = SeqTwoByteString::cast(object) | 2323     int object_size = SeqTwoByteString::cast(object) | 
| 2314                           ->SeqTwoByteStringSize(map->instance_type()); | 2324                           ->SeqTwoByteStringSize(map->instance_type()); | 
| 2315     EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object, | 2325     EvacuateObject<DATA_OBJECT, kObjectAligned>(map, slot, object, object_size); | 
| 2316                                                   object_size); |  | 
| 2317   } | 2326   } | 
| 2318 | 2327 | 
| 2319 | 2328 | 
| 2320   static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot, | 2329   static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot, | 
| 2321                                                HeapObject* object) { | 2330                                                HeapObject* object) { | 
| 2322     DCHECK(IsShortcutCandidate(map->instance_type())); | 2331     DCHECK(IsShortcutCandidate(map->instance_type())); | 
| 2323 | 2332 | 
| 2324     Heap* heap = map->GetHeap(); | 2333     Heap* heap = map->GetHeap(); | 
| 2325 | 2334 | 
| 2326     if (marks_handling == IGNORE_MARKS && | 2335     if (marks_handling == IGNORE_MARKS && | 
| (...skipping 16 matching lines...) Expand all  Loading... | 
| 2343         object->set_map_word(MapWord::FromForwardingAddress(target)); | 2352         object->set_map_word(MapWord::FromForwardingAddress(target)); | 
| 2344         return; | 2353         return; | 
| 2345       } | 2354       } | 
| 2346 | 2355 | 
| 2347       heap->DoScavengeObject(first->map(), slot, first); | 2356       heap->DoScavengeObject(first->map(), slot, first); | 
| 2348       object->set_map_word(MapWord::FromForwardingAddress(*slot)); | 2357       object->set_map_word(MapWord::FromForwardingAddress(*slot)); | 
| 2349       return; | 2358       return; | 
| 2350     } | 2359     } | 
| 2351 | 2360 | 
| 2352     int object_size = ConsString::kSize; | 2361     int object_size = ConsString::kSize; | 
| 2353     EvacuateObject<POINTER_OBJECT, kObjectAlignment>(map, slot, object, | 2362     EvacuateObject<POINTER_OBJECT, kObjectAligned>(map, slot, object, | 
| 2354                                                      object_size); | 2363                                                    object_size); | 
| 2355   } | 2364   } | 
| 2356 | 2365 | 
| 2357   template <ObjectContents object_contents> | 2366   template <ObjectContents object_contents> | 
| 2358   class ObjectEvacuationStrategy { | 2367   class ObjectEvacuationStrategy { | 
| 2359    public: | 2368    public: | 
| 2360     template <int object_size> | 2369     template <int object_size> | 
| 2361     static inline void VisitSpecialized(Map* map, HeapObject** slot, | 2370     static inline void VisitSpecialized(Map* map, HeapObject** slot, | 
| 2362                                         HeapObject* object) { | 2371                                         HeapObject* object) { | 
| 2363       EvacuateObject<object_contents, kObjectAlignment>(map, slot, object, | 2372       EvacuateObject<object_contents, kObjectAligned>(map, slot, object, | 
| 2364                                                         object_size); | 2373                                                       object_size); | 
| 2365     } | 2374     } | 
| 2366 | 2375 | 
| 2367     static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) { | 2376     static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) { | 
| 2368       int object_size = map->instance_size(); | 2377       int object_size = map->instance_size(); | 
| 2369       EvacuateObject<object_contents, kObjectAlignment>(map, slot, object, | 2378       EvacuateObject<object_contents, kObjectAligned>(map, slot, object, | 
| 2370                                                         object_size); | 2379                                                       object_size); | 
| 2371     } | 2380     } | 
| 2372   }; | 2381   }; | 
| 2373 | 2382 | 
| 2374   static VisitorDispatchTable<ScavengingCallback> table_; | 2383   static VisitorDispatchTable<ScavengingCallback> table_; | 
| 2375 }; | 2384 }; | 
| 2376 | 2385 | 
| 2377 | 2386 | 
| 2378 template <MarksHandling marks_handling, | 2387 template <MarksHandling marks_handling, | 
| 2379           LoggingAndProfiling logging_and_profiling_mode> | 2388           LoggingAndProfiling logging_and_profiling_mode> | 
| 2380 VisitorDispatchTable<ScavengingCallback> | 2389 VisitorDispatchTable<ScavengingCallback> | 
| (...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2843   // Statically ensure that it is safe to allocate heap numbers in paged | 2852   // Statically ensure that it is safe to allocate heap numbers in paged | 
| 2844   // spaces. | 2853   // spaces. | 
| 2845   int size = HeapNumber::kSize; | 2854   int size = HeapNumber::kSize; | 
| 2846   STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize); | 2855   STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize); | 
| 2847 | 2856 | 
| 2848   AllocationSpace space = SelectSpace(size, pretenure); | 2857   AllocationSpace space = SelectSpace(size, pretenure); | 
| 2849 | 2858 | 
| 2850   HeapObject* result; | 2859   HeapObject* result; | 
| 2851   { | 2860   { | 
| 2852     AllocationResult allocation = | 2861     AllocationResult allocation = | 
| 2853         AllocateRaw(size, space, OLD_SPACE, kDoubleUnaligned); | 2862         AllocateRaw(size, space, OLD_SPACE, kDoubleValueAligned); | 
| 2854     if (!allocation.To(&result)) return allocation; | 2863     if (!allocation.To(&result)) return allocation; | 
| 2855   } | 2864   } | 
| 2856 | 2865 | 
| 2857   Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map(); | 2866   Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map(); | 
| 2858   HeapObject::cast(result)->set_map_no_write_barrier(map); | 2867   HeapObject::cast(result)->set_map_no_write_barrier(map); | 
| 2859   HeapNumber::cast(result)->set_value(value); | 2868   HeapNumber::cast(result)->set_value(value); | 
| 2860   return result; | 2869   return result; | 
| 2861 } | 2870 } | 
| 2862 | 2871 | 
| 2863 | 2872 | 
| (...skipping 3723 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 6587     *object_type = "CODE_TYPE";                                                \ | 6596     *object_type = "CODE_TYPE";                                                \ | 
| 6588     *object_sub_type = "CODE_AGE/" #name;                                      \ | 6597     *object_sub_type = "CODE_AGE/" #name;                                      \ | 
| 6589     return true; | 6598     return true; | 
| 6590     CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) | 6599     CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) | 
| 6591 #undef COMPARE_AND_RETURN_NAME | 6600 #undef COMPARE_AND_RETURN_NAME | 
| 6592   } | 6601   } | 
| 6593   return false; | 6602   return false; | 
| 6594 } | 6603 } | 
| 6595 } | 6604 } | 
| 6596 }  // namespace v8::internal | 6605 }  // namespace v8::internal | 
| OLD | NEW | 
|---|