| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 996 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1007 | 1007 |
| 1008 gc_post_processing_depth_++; | 1008 gc_post_processing_depth_++; |
| 1009 { AllowHeapAllocation allow_allocation; | 1009 { AllowHeapAllocation allow_allocation; |
| 1010 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); | 1010 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
| 1011 next_gc_likely_to_collect_more = | 1011 next_gc_likely_to_collect_more = |
| 1012 isolate_->global_handles()->PostGarbageCollectionProcessing( | 1012 isolate_->global_handles()->PostGarbageCollectionProcessing( |
| 1013 collector, tracer); | 1013 collector, tracer); |
| 1014 } | 1014 } |
| 1015 gc_post_processing_depth_--; | 1015 gc_post_processing_depth_--; |
| 1016 | 1016 |
| 1017 isolate_->eternal_handles()->PostGarbageCollectionProcessing(this); | |
| 1018 | |
| 1019 // Update relocatables. | 1017 // Update relocatables. |
| 1020 Relocatable::PostGarbageCollectionProcessing(); | 1018 Relocatable::PostGarbageCollectionProcessing(); |
| 1021 | 1019 |
| 1022 if (collector == MARK_COMPACTOR) { | 1020 if (collector == MARK_COMPACTOR) { |
| 1023 // Register the amount of external allocated memory. | 1021 // Register the amount of external allocated memory. |
| 1024 amount_of_external_allocated_memory_at_last_global_gc_ = | 1022 amount_of_external_allocated_memory_at_last_global_gc_ = |
| 1025 amount_of_external_allocated_memory_; | 1023 amount_of_external_allocated_memory_; |
| 1026 } | 1024 } |
| 1027 | 1025 |
| 1028 { | 1026 { |
| (...skipping 972 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2001 kVisitStruct, | 1999 kVisitStruct, |
| 2002 kVisitStructGeneric>(); | 2000 kVisitStructGeneric>(); |
| 2003 } | 2001 } |
| 2004 | 2002 |
| 2005 static VisitorDispatchTable<ScavengingCallback>* GetTable() { | 2003 static VisitorDispatchTable<ScavengingCallback>* GetTable() { |
| 2006 return &table_; | 2004 return &table_; |
| 2007 } | 2005 } |
| 2008 | 2006 |
| 2009 private: | 2007 private: |
| 2010 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; | 2008 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; |
| 2009 enum SizeRestriction { SMALL, UNKNOWN_SIZE }; |
| 2011 | 2010 |
| 2012 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { | 2011 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { |
| 2013 bool should_record = false; | 2012 bool should_record = false; |
| 2014 #ifdef DEBUG | 2013 #ifdef DEBUG |
| 2015 should_record = FLAG_heap_stats; | 2014 should_record = FLAG_heap_stats; |
| 2016 #endif | 2015 #endif |
| 2017 should_record = should_record || FLAG_log_gc; | 2016 should_record = should_record || FLAG_log_gc; |
| 2018 if (should_record) { | 2017 if (should_record) { |
| 2019 if (heap->new_space()->Contains(obj)) { | 2018 if (heap->new_space()->Contains(obj)) { |
| 2020 heap->new_space()->RecordAllocation(obj); | 2019 heap->new_space()->RecordAllocation(obj); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2052 } | 2051 } |
| 2053 | 2052 |
| 2054 if (marks_handling == TRANSFER_MARKS) { | 2053 if (marks_handling == TRANSFER_MARKS) { |
| 2055 if (Marking::TransferColor(source, target)) { | 2054 if (Marking::TransferColor(source, target)) { |
| 2056 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); | 2055 MemoryChunk::IncrementLiveBytesFromGC(target->address(), size); |
| 2057 } | 2056 } |
| 2058 } | 2057 } |
| 2059 } | 2058 } |
| 2060 | 2059 |
| 2061 | 2060 |
| 2062 template<ObjectContents object_contents, int alignment> | 2061 template<ObjectContents object_contents, |
| 2062 SizeRestriction size_restriction, |
| 2063 int alignment> |
| 2063 static inline void EvacuateObject(Map* map, | 2064 static inline void EvacuateObject(Map* map, |
| 2064 HeapObject** slot, | 2065 HeapObject** slot, |
| 2065 HeapObject* object, | 2066 HeapObject* object, |
| 2066 int object_size) { | 2067 int object_size) { |
| 2067 SLOW_ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize); | 2068 SLOW_ASSERT((size_restriction != SMALL) || |
| 2069 (object_size <= Page::kMaxNonCodeHeapObjectSize)); |
| 2068 SLOW_ASSERT(object->Size() == object_size); | 2070 SLOW_ASSERT(object->Size() == object_size); |
| 2069 | 2071 |
| 2070 int allocation_size = object_size; | 2072 int allocation_size = object_size; |
| 2071 if (alignment != kObjectAlignment) { | 2073 if (alignment != kObjectAlignment) { |
| 2072 ASSERT(alignment == kDoubleAlignment); | 2074 ASSERT(alignment == kDoubleAlignment); |
| 2073 allocation_size += kPointerSize; | 2075 allocation_size += kPointerSize; |
| 2074 } | 2076 } |
| 2075 | 2077 |
| 2076 Heap* heap = map->GetHeap(); | 2078 Heap* heap = map->GetHeap(); |
| 2077 if (heap->ShouldBePromoted(object->address(), object_size)) { | 2079 if (heap->ShouldBePromoted(object->address(), object_size)) { |
| 2078 MaybeObject* maybe_result; | 2080 MaybeObject* maybe_result; |
| 2079 | 2081 |
| 2080 if (object_contents == DATA_OBJECT) { | 2082 if ((size_restriction != SMALL) && |
| 2081 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size); | 2083 (allocation_size > Page::kMaxNonCodeHeapObjectSize)) { |
| 2084 maybe_result = heap->lo_space()->AllocateRaw(allocation_size, |
| 2085 NOT_EXECUTABLE); |
| 2082 } else { | 2086 } else { |
| 2083 maybe_result = | 2087 if (object_contents == DATA_OBJECT) { |
| 2084 heap->old_pointer_space()->AllocateRaw(allocation_size); | 2088 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size); |
| 2089 } else { |
| 2090 maybe_result = |
| 2091 heap->old_pointer_space()->AllocateRaw(allocation_size); |
| 2092 } |
| 2085 } | 2093 } |
| 2086 | 2094 |
| 2087 Object* result = NULL; // Initialization to please compiler. | 2095 Object* result = NULL; // Initialization to please compiler. |
| 2088 if (maybe_result->ToObject(&result)) { | 2096 if (maybe_result->ToObject(&result)) { |
| 2089 HeapObject* target = HeapObject::cast(result); | 2097 HeapObject* target = HeapObject::cast(result); |
| 2090 | 2098 |
| 2091 if (alignment != kObjectAlignment) { | 2099 if (alignment != kObjectAlignment) { |
| 2092 target = EnsureDoubleAligned(heap, target, allocation_size); | 2100 target = EnsureDoubleAligned(heap, target, allocation_size); |
| 2093 } | 2101 } |
| 2094 | 2102 |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2148 map->GetHeap()->mark_compact_collector()-> | 2156 map->GetHeap()->mark_compact_collector()-> |
| 2149 RecordCodeEntrySlot(code_entry_slot, code); | 2157 RecordCodeEntrySlot(code_entry_slot, code); |
| 2150 } | 2158 } |
| 2151 } | 2159 } |
| 2152 | 2160 |
| 2153 | 2161 |
| 2154 static inline void EvacuateFixedArray(Map* map, | 2162 static inline void EvacuateFixedArray(Map* map, |
| 2155 HeapObject** slot, | 2163 HeapObject** slot, |
| 2156 HeapObject* object) { | 2164 HeapObject* object) { |
| 2157 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); | 2165 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); |
| 2158 EvacuateObject<POINTER_OBJECT, kObjectAlignment>( | 2166 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map, |
| 2159 map, slot, object, object_size); | 2167 slot, |
| 2168 object, |
| 2169 object_size); |
| 2160 } | 2170 } |
| 2161 | 2171 |
| 2162 | 2172 |
| 2163 static inline void EvacuateFixedDoubleArray(Map* map, | 2173 static inline void EvacuateFixedDoubleArray(Map* map, |
| 2164 HeapObject** slot, | 2174 HeapObject** slot, |
| 2165 HeapObject* object) { | 2175 HeapObject* object) { |
| 2166 int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); | 2176 int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); |
| 2167 int object_size = FixedDoubleArray::SizeFor(length); | 2177 int object_size = FixedDoubleArray::SizeFor(length); |
| 2168 EvacuateObject<DATA_OBJECT, kDoubleAlignment>( | 2178 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>( |
| 2169 map, slot, object, object_size); | 2179 map, |
| 2180 slot, |
| 2181 object, |
| 2182 object_size); |
| 2170 } | 2183 } |
| 2171 | 2184 |
| 2172 | 2185 |
| 2173 static inline void EvacuateByteArray(Map* map, | 2186 static inline void EvacuateByteArray(Map* map, |
| 2174 HeapObject** slot, | 2187 HeapObject** slot, |
| 2175 HeapObject* object) { | 2188 HeapObject* object) { |
| 2176 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); | 2189 int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); |
| 2177 EvacuateObject<DATA_OBJECT, kObjectAlignment>( | 2190 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>( |
| 2178 map, slot, object, object_size); | 2191 map, slot, object, object_size); |
| 2179 } | 2192 } |
| 2180 | 2193 |
| 2181 | 2194 |
| 2182 static inline void EvacuateSeqOneByteString(Map* map, | 2195 static inline void EvacuateSeqOneByteString(Map* map, |
| 2183 HeapObject** slot, | 2196 HeapObject** slot, |
| 2184 HeapObject* object) { | 2197 HeapObject* object) { |
| 2185 int object_size = SeqOneByteString::cast(object)-> | 2198 int object_size = SeqOneByteString::cast(object)-> |
| 2186 SeqOneByteStringSize(map->instance_type()); | 2199 SeqOneByteStringSize(map->instance_type()); |
| 2187 EvacuateObject<DATA_OBJECT, kObjectAlignment>( | 2200 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>( |
| 2188 map, slot, object, object_size); | 2201 map, slot, object, object_size); |
| 2189 } | 2202 } |
| 2190 | 2203 |
| 2191 | 2204 |
| 2192 static inline void EvacuateSeqTwoByteString(Map* map, | 2205 static inline void EvacuateSeqTwoByteString(Map* map, |
| 2193 HeapObject** slot, | 2206 HeapObject** slot, |
| 2194 HeapObject* object) { | 2207 HeapObject* object) { |
| 2195 int object_size = SeqTwoByteString::cast(object)-> | 2208 int object_size = SeqTwoByteString::cast(object)-> |
| 2196 SeqTwoByteStringSize(map->instance_type()); | 2209 SeqTwoByteStringSize(map->instance_type()); |
| 2197 EvacuateObject<DATA_OBJECT, kObjectAlignment>( | 2210 EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>( |
| 2198 map, slot, object, object_size); | 2211 map, slot, object, object_size); |
| 2199 } | 2212 } |
| 2200 | 2213 |
| 2201 | 2214 |
| 2202 static inline bool IsShortcutCandidate(int type) { | 2215 static inline bool IsShortcutCandidate(int type) { |
| 2203 return ((type & kShortcutTypeMask) == kShortcutTypeTag); | 2216 return ((type & kShortcutTypeMask) == kShortcutTypeTag); |
| 2204 } | 2217 } |
| 2205 | 2218 |
| 2206 static inline void EvacuateShortcutCandidate(Map* map, | 2219 static inline void EvacuateShortcutCandidate(Map* map, |
| 2207 HeapObject** slot, | 2220 HeapObject** slot, |
| (...skipping 23 matching lines...) Expand all Loading... |
| 2231 object->set_map_word(MapWord::FromForwardingAddress(target)); | 2244 object->set_map_word(MapWord::FromForwardingAddress(target)); |
| 2232 return; | 2245 return; |
| 2233 } | 2246 } |
| 2234 | 2247 |
| 2235 heap->DoScavengeObject(first->map(), slot, first); | 2248 heap->DoScavengeObject(first->map(), slot, first); |
| 2236 object->set_map_word(MapWord::FromForwardingAddress(*slot)); | 2249 object->set_map_word(MapWord::FromForwardingAddress(*slot)); |
| 2237 return; | 2250 return; |
| 2238 } | 2251 } |
| 2239 | 2252 |
| 2240 int object_size = ConsString::kSize; | 2253 int object_size = ConsString::kSize; |
| 2241 EvacuateObject<POINTER_OBJECT, kObjectAlignment>( | 2254 EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>( |
| 2242 map, slot, object, object_size); | 2255 map, slot, object, object_size); |
| 2243 } | 2256 } |
| 2244 | 2257 |
| 2245 template<ObjectContents object_contents> | 2258 template<ObjectContents object_contents> |
| 2246 class ObjectEvacuationStrategy { | 2259 class ObjectEvacuationStrategy { |
| 2247 public: | 2260 public: |
| 2248 template<int object_size> | 2261 template<int object_size> |
| 2249 static inline void VisitSpecialized(Map* map, | 2262 static inline void VisitSpecialized(Map* map, |
| 2250 HeapObject** slot, | 2263 HeapObject** slot, |
| 2251 HeapObject* object) { | 2264 HeapObject* object) { |
| 2252 EvacuateObject<object_contents, kObjectAlignment>( | 2265 EvacuateObject<object_contents, SMALL, kObjectAlignment>( |
| 2253 map, slot, object, object_size); | 2266 map, slot, object, object_size); |
| 2254 } | 2267 } |
| 2255 | 2268 |
| 2256 static inline void Visit(Map* map, | 2269 static inline void Visit(Map* map, |
| 2257 HeapObject** slot, | 2270 HeapObject** slot, |
| 2258 HeapObject* object) { | 2271 HeapObject* object) { |
| 2259 int object_size = map->instance_size(); | 2272 int object_size = map->instance_size(); |
| 2260 EvacuateObject<object_contents, kObjectAlignment>( | 2273 EvacuateObject<object_contents, SMALL, kObjectAlignment>( |
| 2261 map, slot, object, object_size); | 2274 map, slot, object, object_size); |
| 2262 } | 2275 } |
| 2263 }; | 2276 }; |
| 2264 | 2277 |
| 2265 static VisitorDispatchTable<ScavengingCallback> table_; | 2278 static VisitorDispatchTable<ScavengingCallback> table_; |
| 2266 }; | 2279 }; |
| 2267 | 2280 |
| 2268 | 2281 |
| 2269 template<MarksHandling marks_handling, | 2282 template<MarksHandling marks_handling, |
| 2270 LoggingAndProfiling logging_and_profiling_mode> | 2283 LoggingAndProfiling logging_and_profiling_mode> |
| (...skipping 927 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3198 if (!maybe_obj->ToObject(&obj)) return false; | 3211 if (!maybe_obj->ToObject(&obj)) return false; |
| 3199 } | 3212 } |
| 3200 SeededNumberDictionary::cast(obj)->set_requires_slow_elements(); | 3213 SeededNumberDictionary::cast(obj)->set_requires_slow_elements(); |
| 3201 set_empty_slow_element_dictionary(SeededNumberDictionary::cast(obj)); | 3214 set_empty_slow_element_dictionary(SeededNumberDictionary::cast(obj)); |
| 3202 | 3215 |
| 3203 { MaybeObject* maybe_obj = AllocateSymbol(); | 3216 { MaybeObject* maybe_obj = AllocateSymbol(); |
| 3204 if (!maybe_obj->ToObject(&obj)) return false; | 3217 if (!maybe_obj->ToObject(&obj)) return false; |
| 3205 } | 3218 } |
| 3206 set_observed_symbol(Symbol::cast(obj)); | 3219 set_observed_symbol(Symbol::cast(obj)); |
| 3207 | 3220 |
| 3221 set_i18n_template_one(the_hole_value()); |
| 3222 set_i18n_template_two(the_hole_value()); |
| 3223 |
| 3208 // Handling of script id generation is in Factory::NewScript. | 3224 // Handling of script id generation is in Factory::NewScript. |
| 3209 set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId)); | 3225 set_last_script_id(Smi::FromInt(v8::Script::kNoScriptId)); |
| 3210 | 3226 |
| 3211 // Initialize keyed lookup cache. | 3227 // Initialize keyed lookup cache. |
| 3212 isolate_->keyed_lookup_cache()->Clear(); | 3228 isolate_->keyed_lookup_cache()->Clear(); |
| 3213 | 3229 |
| 3214 // Initialize context slot cache. | 3230 // Initialize context slot cache. |
| 3215 isolate_->context_slot_cache()->Clear(); | 3231 isolate_->context_slot_cache()->Clear(); |
| 3216 | 3232 |
| 3217 // Initialize descriptor cache. | 3233 // Initialize descriptor cache. |
| (...skipping 3367 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6585 case VISIT_ALL_IN_SCAVENGE: | 6601 case VISIT_ALL_IN_SCAVENGE: |
| 6586 isolate_->global_handles()->IterateNewSpaceStrongAndDependentRoots(v); | 6602 isolate_->global_handles()->IterateNewSpaceStrongAndDependentRoots(v); |
| 6587 break; | 6603 break; |
| 6588 case VISIT_ALL_IN_SWEEP_NEWSPACE: | 6604 case VISIT_ALL_IN_SWEEP_NEWSPACE: |
| 6589 case VISIT_ALL: | 6605 case VISIT_ALL: |
| 6590 isolate_->global_handles()->IterateAllRoots(v); | 6606 isolate_->global_handles()->IterateAllRoots(v); |
| 6591 break; | 6607 break; |
| 6592 } | 6608 } |
| 6593 v->Synchronize(VisitorSynchronization::kGlobalHandles); | 6609 v->Synchronize(VisitorSynchronization::kGlobalHandles); |
| 6594 | 6610 |
| 6595 // Iterate over eternal handles. | |
| 6596 if (mode == VISIT_ALL_IN_SCAVENGE) { | |
| 6597 isolate_->eternal_handles()->IterateNewSpaceRoots(v); | |
| 6598 } else { | |
| 6599 isolate_->eternal_handles()->IterateAllRoots(v); | |
| 6600 } | |
| 6601 v->Synchronize(VisitorSynchronization::kEternalHandles); | |
| 6602 | |
| 6603 // Iterate over pointers being held by inactive threads. | 6611 // Iterate over pointers being held by inactive threads. |
| 6604 isolate_->thread_manager()->Iterate(v); | 6612 isolate_->thread_manager()->Iterate(v); |
| 6605 v->Synchronize(VisitorSynchronization::kThreadManager); | 6613 v->Synchronize(VisitorSynchronization::kThreadManager); |
| 6606 | 6614 |
| 6607 // Iterate over the pointers the Serialization/Deserialization code is | 6615 // Iterate over the pointers the Serialization/Deserialization code is |
| 6608 // holding. | 6616 // holding. |
| 6609 // During garbage collection this keeps the partial snapshot cache alive. | 6617 // During garbage collection this keeps the partial snapshot cache alive. |
| 6610 // During deserialization of the startup snapshot this creates the partial | 6618 // During deserialization of the startup snapshot this creates the partial |
| 6611 // snapshot cache and deserializes the objects it refers to. During | 6619 // snapshot cache and deserializes the objects it refers to. During |
| 6612 // serialization this does nothing, since the partial snapshot cache is | 6620 // serialization this does nothing, since the partial snapshot cache is |
| (...skipping 1415 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8028 if (FLAG_parallel_recompilation) { | 8036 if (FLAG_parallel_recompilation) { |
| 8029 heap_->relocation_mutex_->Lock(); | 8037 heap_->relocation_mutex_->Lock(); |
| 8030 #ifdef DEBUG | 8038 #ifdef DEBUG |
| 8031 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 8039 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
| 8032 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 8040 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
| 8033 #endif // DEBUG | 8041 #endif // DEBUG |
| 8034 } | 8042 } |
| 8035 } | 8043 } |
| 8036 | 8044 |
| 8037 } } // namespace v8::internal | 8045 } } // namespace v8::internal |
| OLD | NEW |