| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "v8.h" | 5 #include "v8.h" |
| 6 | 6 |
| 7 #include "accessors.h" | 7 #include "accessors.h" |
| 8 #include "api.h" | 8 #include "api.h" |
| 9 #include "bootstrapper.h" | 9 #include "bootstrapper.h" |
| 10 #include "codegen.h" | 10 #include "codegen.h" |
| (...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 137 // V8 with snapshots and a non-default max semispace size is much | 137 // V8 with snapshots and a non-default max semispace size is much |
| 138 // easier if you can define it as part of the build environment. | 138 // easier if you can define it as part of the build environment. |
| 139 #if defined(V8_MAX_SEMISPACE_SIZE) | 139 #if defined(V8_MAX_SEMISPACE_SIZE) |
| 140 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 140 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; |
| 141 #endif | 141 #endif |
| 142 | 142 |
| 143 // Ensure old_generation_size_ is a multiple of kPageSize. | 143 // Ensure old_generation_size_ is a multiple of kPageSize. |
| 144 ASSERT(MB >= Page::kPageSize); | 144 ASSERT(MB >= Page::kPageSize); |
| 145 | 145 |
| 146 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); | 146 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); |
| 147 native_contexts_list_ = NULL; | 147 set_native_contexts_list(NULL); |
| 148 array_buffers_list_ = Smi::FromInt(0); | 148 set_array_buffers_list(Smi::FromInt(0)); |
| 149 allocation_sites_list_ = Smi::FromInt(0); | 149 set_allocation_sites_list(Smi::FromInt(0)); |
| 150 // Put a dummy entry in the remembered pages so we can find the list the | 150 // Put a dummy entry in the remembered pages so we can find the list the |
| 151 // minidump even if there are no real unmapped pages. | 151 // minidump even if there are no real unmapped pages. |
| 152 RememberUnmappedPage(NULL, false); | 152 RememberUnmappedPage(NULL, false); |
| 153 | 153 |
| 154 ClearObjectStats(true); | 154 ClearObjectStats(true); |
| 155 } | 155 } |
| 156 | 156 |
| 157 | 157 |
| 158 intptr_t Heap::Capacity() { | 158 intptr_t Heap::Capacity() { |
| 159 if (!HasBeenSetUp()) return 0; | 159 if (!HasBeenSetUp()) return 0; |
| (...skipping 795 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 955 | 955 |
| 956 // Committing memory to from space failed. | 956 // Committing memory to from space failed. |
| 957 // Memory is exhausted and we will die. | 957 // Memory is exhausted and we will die. |
| 958 V8::FatalProcessOutOfMemory("Committing semi space failed."); | 958 V8::FatalProcessOutOfMemory("Committing semi space failed."); |
| 959 } | 959 } |
| 960 | 960 |
| 961 | 961 |
| 962 void Heap::ClearJSFunctionResultCaches() { | 962 void Heap::ClearJSFunctionResultCaches() { |
| 963 if (isolate_->bootstrapper()->IsActive()) return; | 963 if (isolate_->bootstrapper()->IsActive()) return; |
| 964 | 964 |
| 965 Object* context = native_contexts_list_; | 965 Object* context = native_contexts_list(); |
| 966 while (!context->IsUndefined()) { | 966 while (!context->IsUndefined()) { |
| 967 // Get the caches for this context. GC can happen when the context | 967 // Get the caches for this context. GC can happen when the context |
| 968 // is not fully initialized, so the caches can be undefined. | 968 // is not fully initialized, so the caches can be undefined. |
| 969 Object* caches_or_undefined = | 969 Object* caches_or_undefined = |
| 970 Context::cast(context)->get(Context::JSFUNCTION_RESULT_CACHES_INDEX); | 970 Context::cast(context)->get(Context::JSFUNCTION_RESULT_CACHES_INDEX); |
| 971 if (!caches_or_undefined->IsUndefined()) { | 971 if (!caches_or_undefined->IsUndefined()) { |
| 972 FixedArray* caches = FixedArray::cast(caches_or_undefined); | 972 FixedArray* caches = FixedArray::cast(caches_or_undefined); |
| 973 // Clear the caches: | 973 // Clear the caches: |
| 974 int length = caches->length(); | 974 int length = caches->length(); |
| 975 for (int i = 0; i < length; i++) { | 975 for (int i = 0; i < length; i++) { |
| 976 JSFunctionResultCache::cast(caches->get(i))->Clear(); | 976 JSFunctionResultCache::cast(caches->get(i))->Clear(); |
| 977 } | 977 } |
| 978 } | 978 } |
| 979 // Get the next context: | 979 // Get the next context: |
| 980 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 980 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
| 981 } | 981 } |
| 982 } | 982 } |
| 983 | 983 |
| 984 | 984 |
| 985 void Heap::ClearNormalizedMapCaches() { | 985 void Heap::ClearNormalizedMapCaches() { |
| 986 if (isolate_->bootstrapper()->IsActive() && | 986 if (isolate_->bootstrapper()->IsActive() && |
| 987 !incremental_marking()->IsMarking()) { | 987 !incremental_marking()->IsMarking()) { |
| 988 return; | 988 return; |
| 989 } | 989 } |
| 990 | 990 |
| 991 Object* context = native_contexts_list_; | 991 Object* context = native_contexts_list(); |
| 992 while (!context->IsUndefined()) { | 992 while (!context->IsUndefined()) { |
| 993 // GC can happen when the context is not fully initialized, | 993 // GC can happen when the context is not fully initialized, |
| 994 // so the cache can be undefined. | 994 // so the cache can be undefined. |
| 995 Object* cache = | 995 Object* cache = |
| 996 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX); | 996 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX); |
| 997 if (!cache->IsUndefined()) { | 997 if (!cache->IsUndefined()) { |
| 998 NormalizedMapCache::cast(cache)->Clear(); | 998 NormalizedMapCache::cast(cache)->Clear(); |
| 999 } | 999 } |
| 1000 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); | 1000 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); |
| 1001 } | 1001 } |
| (...skipping 560 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1562 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(type_address)); | 1562 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(type_address)); |
| 1563 } | 1563 } |
| 1564 } | 1564 } |
| 1565 | 1565 |
| 1566 // Copy objects reachable from the code flushing candidates list. | 1566 // Copy objects reachable from the code flushing candidates list. |
| 1567 MarkCompactCollector* collector = mark_compact_collector(); | 1567 MarkCompactCollector* collector = mark_compact_collector(); |
| 1568 if (collector->is_code_flushing_enabled()) { | 1568 if (collector->is_code_flushing_enabled()) { |
| 1569 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); | 1569 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); |
| 1570 } | 1570 } |
| 1571 | 1571 |
| 1572 // Scavenge object reachable from the native contexts list directly. | |
| 1573 scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_)); | |
| 1574 | |
| 1575 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1572 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
| 1576 | 1573 |
| 1577 while (isolate()->global_handles()->IterateObjectGroups( | 1574 while (isolate()->global_handles()->IterateObjectGroups( |
| 1578 &scavenge_visitor, &IsUnscavengedHeapObject)) { | 1575 &scavenge_visitor, &IsUnscavengedHeapObject)) { |
| 1579 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1576 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
| 1580 } | 1577 } |
| 1581 isolate()->global_handles()->RemoveObjectGroups(); | 1578 isolate()->global_handles()->RemoveObjectGroups(); |
| 1582 isolate()->global_handles()->RemoveImplicitRefGroups(); | 1579 isolate()->global_handles()->RemoveImplicitRefGroups(); |
| 1583 | 1580 |
| 1584 isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles( | 1581 isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles( |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1697 // MARK_COMPACT, as they live in old space. Verify and address. | 1694 // MARK_COMPACT, as they live in old space. Verify and address. |
| 1698 ProcessAllocationSites(retainer, record_slots); | 1695 ProcessAllocationSites(retainer, record_slots); |
| 1699 } | 1696 } |
| 1700 | 1697 |
| 1701 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, | 1698 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, |
| 1702 bool record_slots) { | 1699 bool record_slots) { |
| 1703 Object* head = | 1700 Object* head = |
| 1704 VisitWeakList<Context>( | 1701 VisitWeakList<Context>( |
| 1705 this, native_contexts_list(), retainer, record_slots); | 1702 this, native_contexts_list(), retainer, record_slots); |
| 1706 // Update the head of the list of contexts. | 1703 // Update the head of the list of contexts. |
| 1707 native_contexts_list_ = head; | 1704 set_native_contexts_list(head); |
| 1708 } | 1705 } |
| 1709 | 1706 |
| 1710 | 1707 |
| 1711 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, | 1708 void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, |
| 1712 bool record_slots) { | 1709 bool record_slots) { |
| 1713 Object* array_buffer_obj = | 1710 Object* array_buffer_obj = |
| 1714 VisitWeakList<JSArrayBuffer>(this, | 1711 VisitWeakList<JSArrayBuffer>(this, |
| 1715 array_buffers_list(), | 1712 array_buffers_list(), |
| 1716 retainer, record_slots); | 1713 retainer, record_slots); |
| 1717 set_array_buffers_list(array_buffer_obj); | 1714 set_array_buffers_list(array_buffer_obj); |
| 1718 } | 1715 } |
| 1719 | 1716 |
| 1720 | 1717 |
| 1721 void Heap::TearDownArrayBuffers() { | 1718 void Heap::TearDownArrayBuffers() { |
| 1722 Object* undefined = undefined_value(); | 1719 Object* undefined = undefined_value(); |
| 1723 for (Object* o = array_buffers_list(); o != undefined;) { | 1720 for (Object* o = array_buffers_list(); o != undefined;) { |
| 1724 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); | 1721 JSArrayBuffer* buffer = JSArrayBuffer::cast(o); |
| 1725 Runtime::FreeArrayBuffer(isolate(), buffer); | 1722 Runtime::FreeArrayBuffer(isolate(), buffer); |
| 1726 o = buffer->weak_next(); | 1723 o = buffer->weak_next(); |
| 1727 } | 1724 } |
| 1728 array_buffers_list_ = undefined; | 1725 set_array_buffers_list(undefined); |
| 1729 } | 1726 } |
| 1730 | 1727 |
| 1731 | 1728 |
| 1732 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer, | 1729 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer, |
| 1733 bool record_slots) { | 1730 bool record_slots) { |
| 1734 Object* allocation_site_obj = | 1731 Object* allocation_site_obj = |
| 1735 VisitWeakList<AllocationSite>(this, | 1732 VisitWeakList<AllocationSite>(this, |
| 1736 allocation_sites_list(), | 1733 allocation_sites_list(), |
| 1737 retainer, record_slots); | 1734 retainer, record_slots); |
| 1738 set_allocation_sites_list(allocation_site_obj); | 1735 set_allocation_sites_list(allocation_site_obj); |
| (...skipping 3537 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5276 | 5273 |
| 5277 bool Heap::CreateHeapObjects() { | 5274 bool Heap::CreateHeapObjects() { |
| 5278 // Create initial maps. | 5275 // Create initial maps. |
| 5279 if (!CreateInitialMaps()) return false; | 5276 if (!CreateInitialMaps()) return false; |
| 5280 CreateApiObjects(); | 5277 CreateApiObjects(); |
| 5281 | 5278 |
| 5282 // Create initial objects | 5279 // Create initial objects |
| 5283 CreateInitialObjects(); | 5280 CreateInitialObjects(); |
| 5284 CHECK_EQ(0, gc_count_); | 5281 CHECK_EQ(0, gc_count_); |
| 5285 | 5282 |
| 5286 native_contexts_list_ = undefined_value(); | 5283 set_native_contexts_list(undefined_value()); |
| 5287 array_buffers_list_ = undefined_value(); | 5284 set_array_buffers_list(undefined_value()); |
| 5288 allocation_sites_list_ = undefined_value(); | 5285 set_allocation_sites_list(undefined_value()); |
| 5289 weak_object_to_code_table_ = undefined_value(); | 5286 weak_object_to_code_table_ = undefined_value(); |
| 5290 return true; | 5287 return true; |
| 5291 } | 5288 } |
| 5292 | 5289 |
| 5293 | 5290 |
| 5294 void Heap::SetStackLimits() { | 5291 void Heap::SetStackLimits() { |
| 5295 ASSERT(isolate_ != NULL); | 5292 ASSERT(isolate_ != NULL); |
| 5296 ASSERT(isolate_ == isolate()); | 5293 ASSERT(isolate_ == isolate()); |
| 5297 // On 64 bit machines, pointers are generally out of range of Smis. We write | 5294 // On 64 bit machines, pointers are generally out of range of Smis. We write |
| 5298 // something that looks like an out of range Smi to the GC. | 5295 // something that looks like an out of range Smi to the GC. |
| (...skipping 1170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6469 static_cast<int>(object_sizes_last_time_[index])); | 6466 static_cast<int>(object_sizes_last_time_[index])); |
| 6470 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6467 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 6471 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6468 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 6472 | 6469 |
| 6473 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6470 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 6474 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6471 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 6475 ClearObjectStats(); | 6472 ClearObjectStats(); |
| 6476 } | 6473 } |
| 6477 | 6474 |
| 6478 } } // namespace v8::internal | 6475 } } // namespace v8::internal |
| OLD | NEW |