OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
143 #endif | 143 #endif |
144 | 144 |
145 // Ensure old_generation_size_ is a multiple of kPageSize. | 145 // Ensure old_generation_size_ is a multiple of kPageSize. |
146 DCHECK(MB >= Page::kPageSize); | 146 DCHECK(MB >= Page::kPageSize); |
147 | 147 |
148 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); | 148 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); |
149 set_native_contexts_list(NULL); | 149 set_native_contexts_list(NULL); |
150 set_array_buffers_list(Smi::FromInt(0)); | 150 set_array_buffers_list(Smi::FromInt(0)); |
151 set_allocation_sites_list(Smi::FromInt(0)); | 151 set_allocation_sites_list(Smi::FromInt(0)); |
152 set_encountered_weak_collections(Smi::FromInt(0)); | 152 set_encountered_weak_collections(Smi::FromInt(0)); |
153 set_encountered_weak_cells(Smi::FromInt(0)); | |
153 // Put a dummy entry in the remembered pages so we can find the list the | 154 // Put a dummy entry in the remembered pages so we can find the list the |
154 // minidump even if there are no real unmapped pages. | 155 // minidump even if there are no real unmapped pages. |
155 RememberUnmappedPage(NULL, false); | 156 RememberUnmappedPage(NULL, false); |
156 | 157 |
157 ClearObjectStats(true); | 158 ClearObjectStats(true); |
158 } | 159 } |
159 | 160 |
160 | 161 |
161 intptr_t Heap::Capacity() { | 162 intptr_t Heap::Capacity() { |
162 if (!HasBeenSetUp()) return 0; | 163 if (!HasBeenSetUp()) return 0; |
(...skipping 1339 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1502 PropertyCell* cell = PropertyCell::cast(heap_object); | 1503 PropertyCell* cell = PropertyCell::cast(heap_object); |
1503 Address value_address = cell->ValueAddress(); | 1504 Address value_address = cell->ValueAddress(); |
1504 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); | 1505 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); |
1505 Address type_address = cell->TypeAddress(); | 1506 Address type_address = cell->TypeAddress(); |
1506 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(type_address)); | 1507 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(type_address)); |
1507 } | 1508 } |
1508 } | 1509 } |
1509 | 1510 |
1510 // Copy objects reachable from the encountered weak collections list. | 1511 // Copy objects reachable from the encountered weak collections list. |
1511 scavenge_visitor.VisitPointer(&encountered_weak_collections_); | 1512 scavenge_visitor.VisitPointer(&encountered_weak_collections_); |
1513 // Copy objects reachable from the encountered weak cells. | |
1514 scavenge_visitor.VisitPointer(&encountered_weak_cells_); | |
1512 | 1515 |
1513 // Copy objects reachable from the code flushing candidates list. | 1516 // Copy objects reachable from the code flushing candidates list. |
1514 MarkCompactCollector* collector = mark_compact_collector(); | 1517 MarkCompactCollector* collector = mark_compact_collector(); |
1515 if (collector->is_code_flushing_enabled()) { | 1518 if (collector->is_code_flushing_enabled()) { |
1516 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); | 1519 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); |
1517 } | 1520 } |
1518 | 1521 |
1519 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); | 1522 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
1520 | 1523 |
1521 while (isolate()->global_handles()->IterateObjectGroups( | 1524 while (isolate()->global_handles()->IterateObjectGroups( |
(...skipping 1030 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2552 | 2555 |
2553 TYPED_ARRAYS(ALLOCATE_FIXED_TYPED_ARRAY_MAP) | 2556 TYPED_ARRAYS(ALLOCATE_FIXED_TYPED_ARRAY_MAP) |
2554 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP | 2557 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP |
2555 | 2558 |
2556 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, sloppy_arguments_elements) | 2559 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, sloppy_arguments_elements) |
2557 | 2560 |
2558 ALLOCATE_VARSIZE_MAP(CODE_TYPE, code) | 2561 ALLOCATE_VARSIZE_MAP(CODE_TYPE, code) |
2559 | 2562 |
2560 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell) | 2563 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell) |
2561 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell) | 2564 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell) |
2565 ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell) | |
2562 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler) | 2566 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler) |
2563 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler) | 2567 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler) |
2564 | 2568 |
2565 | 2569 |
2566 for (unsigned i = 0; i < arraysize(struct_table); i++) { | 2570 for (unsigned i = 0; i < arraysize(struct_table); i++) { |
2567 const StructTable& entry = struct_table[i]; | 2571 const StructTable& entry = struct_table[i]; |
2568 Map* map; | 2572 Map* map; |
2569 if (!AllocateMap(entry.type, entry.size).To(&map)) return false; | 2573 if (!AllocateMap(entry.type, entry.size).To(&map)) return false; |
2570 roots_[entry.index] = map; | 2574 roots_[entry.index] = map; |
2571 } | 2575 } |
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2678 result->set_map_no_write_barrier(global_property_cell_map()); | 2682 result->set_map_no_write_barrier(global_property_cell_map()); |
2679 PropertyCell* cell = PropertyCell::cast(result); | 2683 PropertyCell* cell = PropertyCell::cast(result); |
2680 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), | 2684 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), |
2681 SKIP_WRITE_BARRIER); | 2685 SKIP_WRITE_BARRIER); |
2682 cell->set_value(the_hole_value()); | 2686 cell->set_value(the_hole_value()); |
2683 cell->set_type(HeapType::None()); | 2687 cell->set_type(HeapType::None()); |
2684 return result; | 2688 return result; |
2685 } | 2689 } |
2686 | 2690 |
2687 | 2691 |
2692 AllocationResult Heap::AllocateWeakCell(HeapObject* value) { | |
2693 int size = WeakCell::kSize; | |
2694 STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize); | |
2695 HeapObject* result; | |
2696 { | |
2697 AllocationResult allocation = | |
2698 AllocateRaw(size, OLD_POINTER_SPACE, OLD_POINTER_SPACE); | |
2699 if (!allocation.To(&result)) return allocation; | |
2700 } | |
2701 result->set_map_no_write_barrier(weak_cell_map()); | |
2702 WeakCell::cast(result)->update_value_from_gc(value); | |
Erik Corry
2014/10/13 15:56:17
This is from allocation, not from GC, so probably
ulan
2014/10/14 10:17:22
Introduced a new function "initialize" for this an
| |
2703 WeakCell::cast(result)->set_next(undefined_value()); | |
Erik Corry
2014/10/13 15:56:17
There's no need to set the write barrier when sett
ulan
2014/10/14 10:17:22
Done.
| |
2704 return result; | |
2705 } | |
2706 | |
2707 | |
2688 void Heap::CreateApiObjects() { | 2708 void Heap::CreateApiObjects() { |
2689 HandleScope scope(isolate()); | 2709 HandleScope scope(isolate()); |
2690 Factory* factory = isolate()->factory(); | 2710 Factory* factory = isolate()->factory(); |
2691 Handle<Map> new_neander_map = | 2711 Handle<Map> new_neander_map = |
2692 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); | 2712 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); |
2693 | 2713 |
2694 // Don't use Smi-only elements optimizations for objects with the neander | 2714 // Don't use Smi-only elements optimizations for objects with the neander |
2695 // map. There are too many cases where element values are set directly with a | 2715 // map. There are too many cases where element values are set directly with a |
2696 // bottleneck to trap the Smi-only -> fast elements transition, and there | 2716 // bottleneck to trap the Smi-only -> fast elements transition, and there |
2697 // appears to be no benefit for optimize this case. | 2717 // appears to be no benefit for optimize this case. |
(...skipping 2464 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5162 if (!CreateInitialMaps()) return false; | 5182 if (!CreateInitialMaps()) return false; |
5163 CreateApiObjects(); | 5183 CreateApiObjects(); |
5164 | 5184 |
5165 // Create initial objects | 5185 // Create initial objects |
5166 CreateInitialObjects(); | 5186 CreateInitialObjects(); |
5167 CHECK_EQ(0, gc_count_); | 5187 CHECK_EQ(0, gc_count_); |
5168 | 5188 |
5169 set_native_contexts_list(undefined_value()); | 5189 set_native_contexts_list(undefined_value()); |
5170 set_array_buffers_list(undefined_value()); | 5190 set_array_buffers_list(undefined_value()); |
5171 set_allocation_sites_list(undefined_value()); | 5191 set_allocation_sites_list(undefined_value()); |
5192 set_encountered_weak_cells(undefined_value()); | |
5172 weak_object_to_code_table_ = undefined_value(); | 5193 weak_object_to_code_table_ = undefined_value(); |
5173 return true; | 5194 return true; |
5174 } | 5195 } |
5175 | 5196 |
5176 | 5197 |
5177 void Heap::SetStackLimits() { | 5198 void Heap::SetStackLimits() { |
5178 DCHECK(isolate_ != NULL); | 5199 DCHECK(isolate_ != NULL); |
5179 DCHECK(isolate_ == isolate()); | 5200 DCHECK(isolate_ == isolate()); |
5180 // On 64 bit machines, pointers are generally out of range of Smis. We write | 5201 // On 64 bit machines, pointers are generally out of range of Smis. We write |
5181 // something that looks like an out of range Smi to the GC. | 5202 // something that looks like an out of range Smi to the GC. |
(...skipping 975 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6157 static_cast<int>(object_sizes_last_time_[index])); | 6178 static_cast<int>(object_sizes_last_time_[index])); |
6158 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 6179 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
6159 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 6180 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
6160 | 6181 |
6161 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 6182 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
6162 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 6183 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
6163 ClearObjectStats(); | 6184 ClearObjectStats(); |
6164 } | 6185 } |
6165 } | 6186 } |
6166 } // namespace v8::internal | 6187 } // namespace v8::internal |
OLD | NEW |