| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
| (...skipping 688 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 699 #ifdef DEBUG | 699 #ifdef DEBUG |
| 700 ReportStatisticsAfterGC(); | 700 ReportStatisticsAfterGC(); |
| 701 #endif // DEBUG | 701 #endif // DEBUG |
| 702 | 702 |
| 703 // Remember the last top pointer so that we can later find out | 703 // Remember the last top pointer so that we can later find out |
| 704 // whether we allocated in new space since the last GC. | 704 // whether we allocated in new space since the last GC. |
| 705 new_space_top_after_last_gc_ = new_space()->top(); | 705 new_space_top_after_last_gc_ = new_space()->top(); |
| 706 } | 706 } |
| 707 | 707 |
| 708 | 708 |
| 709 void Heap::PreprocessStackTraces() { |
| 710 if (!weak_stack_trace_list()->IsWeakFixedArray()) return; |
| 711 WeakFixedArray* array = WeakFixedArray::cast(weak_stack_trace_list()); |
| 712 int length = array->Length(); |
| 713 for (int i = 0; i < length; i++) { |
| 714 if (array->IsEmptySlot(i)) continue; |
| 715 FixedArray* elements = FixedArray::cast(array->Get(i)); |
| 716 for (int j = 1; j < elements->length(); j += 4) { |
| 717 Object* maybe_code = elements->get(j + 2); |
| 718 // If GC happens while adding a stack trace to the weak fixed array, |
| 719 // which has been copied into a larger backing store, we may run into |
| 720 // a stack trace that has already been preprocessed. Guard against this. |
| 721 if (!maybe_code->IsCode()) break; |
| 722 Code* code = Code::cast(maybe_code); |
| 723 int offset = Smi::cast(elements->get(j + 3))->value(); |
| 724 Address pc = code->address() + offset; |
| 725 int pos = code->SourcePosition(pc); |
| 726 elements->set(j + 2, Smi::FromInt(pos)); |
| 727 } |
| 728 } |
| 729 // We must not compact the weak fixed list here, as we may be in the middle |
| 730 // of writing to it, when the GC triggered. Instead, we reset the root value. |
| 731 set_weak_stack_trace_list(Smi::FromInt(0)); |
| 732 } |
| 733 |
| 734 |
| 709 void Heap::HandleGCRequest() { | 735 void Heap::HandleGCRequest() { |
| 710 if (incremental_marking()->request_type() == | 736 if (incremental_marking()->request_type() == |
| 711 IncrementalMarking::COMPLETE_MARKING) { | 737 IncrementalMarking::COMPLETE_MARKING) { |
| 712 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt"); | 738 CollectAllGarbage(Heap::kNoGCFlags, "GC interrupt"); |
| 713 return; | 739 return; |
| 714 } | 740 } |
| 715 DCHECK(FLAG_overapproximate_weak_closure); | 741 DCHECK(FLAG_overapproximate_weak_closure); |
| 716 if (!incremental_marking()->weak_closure_was_overapproximated()) { | 742 if (!incremental_marking()->weak_closure_was_overapproximated()) { |
| 717 OverApproximateWeakClosure("GC interrupt"); | 743 OverApproximateWeakClosure("GC interrupt"); |
| 718 } | 744 } |
| (...skipping 546 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1265 } | 1291 } |
| 1266 } | 1292 } |
| 1267 | 1293 |
| 1268 | 1294 |
| 1269 void Heap::MarkCompactEpilogue() { | 1295 void Heap::MarkCompactEpilogue() { |
| 1270 gc_state_ = NOT_IN_GC; | 1296 gc_state_ = NOT_IN_GC; |
| 1271 | 1297 |
| 1272 isolate_->counters()->objs_since_last_full()->Set(0); | 1298 isolate_->counters()->objs_since_last_full()->Set(0); |
| 1273 | 1299 |
| 1274 incremental_marking()->Epilogue(); | 1300 incremental_marking()->Epilogue(); |
| 1301 |
| 1302 PreprocessStackTraces(); |
| 1275 } | 1303 } |
| 1276 | 1304 |
| 1277 | 1305 |
| 1278 void Heap::MarkCompactPrologue() { | 1306 void Heap::MarkCompactPrologue() { |
| 1279 // At any old GC clear the keyed lookup cache to enable collection of unused | 1307 // At any old GC clear the keyed lookup cache to enable collection of unused |
| 1280 // maps. | 1308 // maps. |
| 1281 isolate_->keyed_lookup_cache()->Clear(); | 1309 isolate_->keyed_lookup_cache()->Clear(); |
| 1282 isolate_->context_slot_cache()->Clear(); | 1310 isolate_->context_slot_cache()->Clear(); |
| 1283 isolate_->descriptor_lookup_cache()->Clear(); | 1311 isolate_->descriptor_lookup_cache()->Clear(); |
| 1284 RegExpResultsCache::Clear(string_split_cache()); | 1312 RegExpResultsCache::Clear(string_split_cache()); |
| (...skipping 1790 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3075 | 3103 |
| 3076 set_materialized_objects(*factory->NewFixedArray(0, TENURED)); | 3104 set_materialized_objects(*factory->NewFixedArray(0, TENURED)); |
| 3077 | 3105 |
| 3078 // Handling of script id generation is in Factory::NewScript. | 3106 // Handling of script id generation is in Factory::NewScript. |
| 3079 set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId)); | 3107 set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId)); |
| 3080 | 3108 |
| 3081 Handle<PropertyCell> cell = factory->NewPropertyCell(); | 3109 Handle<PropertyCell> cell = factory->NewPropertyCell(); |
| 3082 cell->set_value(Smi::FromInt(Isolate::kArrayProtectorValid)); | 3110 cell->set_value(Smi::FromInt(Isolate::kArrayProtectorValid)); |
| 3083 set_array_protector(*cell); | 3111 set_array_protector(*cell); |
| 3084 | 3112 |
| 3113 set_weak_stack_trace_list(Smi::FromInt(0)); |
| 3114 |
| 3085 set_allocation_sites_scratchpad( | 3115 set_allocation_sites_scratchpad( |
| 3086 *factory->NewFixedArray(kAllocationSiteScratchpadSize, TENURED)); | 3116 *factory->NewFixedArray(kAllocationSiteScratchpadSize, TENURED)); |
| 3087 InitializeAllocationSitesScratchpad(); | 3117 InitializeAllocationSitesScratchpad(); |
| 3088 | 3118 |
| 3089 // Initialize keyed lookup cache. | 3119 // Initialize keyed lookup cache. |
| 3090 isolate_->keyed_lookup_cache()->Clear(); | 3120 isolate_->keyed_lookup_cache()->Clear(); |
| 3091 | 3121 |
| 3092 // Initialize context slot cache. | 3122 // Initialize context slot cache. |
| 3093 isolate_->context_slot_cache()->Clear(); | 3123 isolate_->context_slot_cache()->Clear(); |
| 3094 | 3124 |
| (...skipping 16 matching lines...) Expand all Loading... |
| 3111 case kNonMonomorphicCacheRootIndex: | 3141 case kNonMonomorphicCacheRootIndex: |
| 3112 case kPolymorphicCodeCacheRootIndex: | 3142 case kPolymorphicCodeCacheRootIndex: |
| 3113 case kEmptyScriptRootIndex: | 3143 case kEmptyScriptRootIndex: |
| 3114 case kSymbolRegistryRootIndex: | 3144 case kSymbolRegistryRootIndex: |
| 3115 case kMaterializedObjectsRootIndex: | 3145 case kMaterializedObjectsRootIndex: |
| 3116 case kAllocationSitesScratchpadRootIndex: | 3146 case kAllocationSitesScratchpadRootIndex: |
| 3117 case kMicrotaskQueueRootIndex: | 3147 case kMicrotaskQueueRootIndex: |
| 3118 case kDetachedContextsRootIndex: | 3148 case kDetachedContextsRootIndex: |
| 3119 case kWeakObjectToCodeTableRootIndex: | 3149 case kWeakObjectToCodeTableRootIndex: |
| 3120 case kRetainedMapsRootIndex: | 3150 case kRetainedMapsRootIndex: |
| 3151 case kWeakStackTraceListRootIndex: |
| 3121 // Smi values | 3152 // Smi values |
| 3122 #define SMI_ENTRY(type, name, Name) case k##Name##RootIndex: | 3153 #define SMI_ENTRY(type, name, Name) case k##Name##RootIndex: |
| 3123 SMI_ROOT_LIST(SMI_ENTRY) | 3154 SMI_ROOT_LIST(SMI_ENTRY) |
| 3124 #undef SMI_ENTRY | 3155 #undef SMI_ENTRY |
| 3125 // String table | 3156 // String table |
| 3126 case kStringTableRootIndex: | 3157 case kStringTableRootIndex: |
| 3127 return true; | 3158 return true; |
| 3128 | 3159 |
| 3129 default: | 3160 default: |
| 3130 return false; | 3161 return false; |
| (...skipping 3250 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6381 static_cast<int>(object_sizes_last_time_[index])); | 6412 static_cast<int>(object_sizes_last_time_[index])); |
| 6382 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6413 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 6383 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6414 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 6384 | 6415 |
| 6385 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6416 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 6386 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6417 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 6387 ClearObjectStats(); | 6418 ClearObjectStats(); |
| 6388 } | 6419 } |
| 6389 } | 6420 } |
| 6390 } // namespace v8::internal | 6421 } // namespace v8::internal |
| OLD | NEW |