Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(198)

Side by Side Diff: src/heap/heap.cc

Issue 640303006: Weak Cells (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Skip cleared weak cells Created 6 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
143 #endif 143 #endif
144 144
145 // Ensure old_generation_size_ is a multiple of kPageSize. 145 // Ensure old_generation_size_ is a multiple of kPageSize.
146 DCHECK(MB >= Page::kPageSize); 146 DCHECK(MB >= Page::kPageSize);
147 147
148 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); 148 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
149 set_native_contexts_list(NULL); 149 set_native_contexts_list(NULL);
150 set_array_buffers_list(Smi::FromInt(0)); 150 set_array_buffers_list(Smi::FromInt(0));
151 set_allocation_sites_list(Smi::FromInt(0)); 151 set_allocation_sites_list(Smi::FromInt(0));
152 set_encountered_weak_collections(Smi::FromInt(0)); 152 set_encountered_weak_collections(Smi::FromInt(0));
153 set_encountered_weak_cells(Smi::FromInt(0));
153 // Put a dummy entry in the remembered pages so we can find the list the 154 // Put a dummy entry in the remembered pages so we can find the list the
154 // minidump even if there are no real unmapped pages. 155 // minidump even if there are no real unmapped pages.
155 RememberUnmappedPage(NULL, false); 156 RememberUnmappedPage(NULL, false);
156 157
157 ClearObjectStats(true); 158 ClearObjectStats(true);
158 } 159 }
159 160
160 161
161 intptr_t Heap::Capacity() { 162 intptr_t Heap::Capacity() {
162 if (!HasBeenSetUp()) return 0; 163 if (!HasBeenSetUp()) return 0;
(...skipping 1339 matching lines...) Expand 10 before | Expand all | Expand 10 after
1502 PropertyCell* cell = PropertyCell::cast(heap_object); 1503 PropertyCell* cell = PropertyCell::cast(heap_object);
1503 Address value_address = cell->ValueAddress(); 1504 Address value_address = cell->ValueAddress();
1504 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); 1505 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1505 Address type_address = cell->TypeAddress(); 1506 Address type_address = cell->TypeAddress();
1506 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(type_address)); 1507 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(type_address));
1507 } 1508 }
1508 } 1509 }
1509 1510
1510 // Copy objects reachable from the encountered weak collections list. 1511 // Copy objects reachable from the encountered weak collections list.
1511 scavenge_visitor.VisitPointer(&encountered_weak_collections_); 1512 scavenge_visitor.VisitPointer(&encountered_weak_collections_);
1513 // Copy objects reachable from the encountered weak cells.
1514 scavenge_visitor.VisitPointer(&encountered_weak_cells_);
1512 1515
1513 // Copy objects reachable from the code flushing candidates list. 1516 // Copy objects reachable from the code flushing candidates list.
1514 MarkCompactCollector* collector = mark_compact_collector(); 1517 MarkCompactCollector* collector = mark_compact_collector();
1515 if (collector->is_code_flushing_enabled()) { 1518 if (collector->is_code_flushing_enabled()) {
1516 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor); 1519 collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor);
1517 } 1520 }
1518 1521
1519 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); 1522 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1520 1523
1521 while (isolate()->global_handles()->IterateObjectGroups( 1524 while (isolate()->global_handles()->IterateObjectGroups(
(...skipping 1030 matching lines...) Expand 10 before | Expand all | Expand 10 after
2552 2555
2553 TYPED_ARRAYS(ALLOCATE_FIXED_TYPED_ARRAY_MAP) 2556 TYPED_ARRAYS(ALLOCATE_FIXED_TYPED_ARRAY_MAP)
2554 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP 2557 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP
2555 2558
2556 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, sloppy_arguments_elements) 2559 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, sloppy_arguments_elements)
2557 2560
2558 ALLOCATE_VARSIZE_MAP(CODE_TYPE, code) 2561 ALLOCATE_VARSIZE_MAP(CODE_TYPE, code)
2559 2562
2560 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell) 2563 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell)
2561 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell) 2564 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
2565 ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell)
2562 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler) 2566 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler)
2563 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler) 2567 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler)
2564 2568
2565 2569
2566 for (unsigned i = 0; i < arraysize(struct_table); i++) { 2570 for (unsigned i = 0; i < arraysize(struct_table); i++) {
2567 const StructTable& entry = struct_table[i]; 2571 const StructTable& entry = struct_table[i];
2568 Map* map; 2572 Map* map;
2569 if (!AllocateMap(entry.type, entry.size).To(&map)) return false; 2573 if (!AllocateMap(entry.type, entry.size).To(&map)) return false;
2570 roots_[entry.index] = map; 2574 roots_[entry.index] = map;
2571 } 2575 }
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
2678 result->set_map_no_write_barrier(global_property_cell_map()); 2682 result->set_map_no_write_barrier(global_property_cell_map());
2679 PropertyCell* cell = PropertyCell::cast(result); 2683 PropertyCell* cell = PropertyCell::cast(result);
2680 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), 2684 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2681 SKIP_WRITE_BARRIER); 2685 SKIP_WRITE_BARRIER);
2682 cell->set_value(the_hole_value()); 2686 cell->set_value(the_hole_value());
2683 cell->set_type(HeapType::None()); 2687 cell->set_type(HeapType::None());
2684 return result; 2688 return result;
2685 } 2689 }
2686 2690
2687 2691
2692 AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
2693 int size = WeakCell::kSize;
2694 STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize);
2695 HeapObject* result;
2696 {
2697 AllocationResult allocation =
2698 AllocateRaw(size, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
2699 if (!allocation.To(&result)) return allocation;
2700 }
2701 result->set_map_no_write_barrier(weak_cell_map());
2702 WeakCell::cast(result)->initialize(value);
2703 WeakCell::cast(result)->set_next(undefined_value(), SKIP_WRITE_BARRIER);
2704 return result;
2705 }
2706
2707
2688 void Heap::CreateApiObjects() { 2708 void Heap::CreateApiObjects() {
2689 HandleScope scope(isolate()); 2709 HandleScope scope(isolate());
2690 Factory* factory = isolate()->factory(); 2710 Factory* factory = isolate()->factory();
2691 Handle<Map> new_neander_map = 2711 Handle<Map> new_neander_map =
2692 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); 2712 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
2693 2713
2694 // Don't use Smi-only elements optimizations for objects with the neander 2714 // Don't use Smi-only elements optimizations for objects with the neander
2695 // map. There are too many cases where element values are set directly with a 2715 // map. There are too many cases where element values are set directly with a
2696 // bottleneck to trap the Smi-only -> fast elements transition, and there 2716 // bottleneck to trap the Smi-only -> fast elements transition, and there
2697 // appears to be no benefit for optimize this case. 2717 // appears to be no benefit for optimize this case.
(...skipping 3474 matching lines...) Expand 10 before | Expand all | Expand 10 after
6172 static_cast<int>(object_sizes_last_time_[index])); 6192 static_cast<int>(object_sizes_last_time_[index]));
6173 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) 6193 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
6174 #undef ADJUST_LAST_TIME_OBJECT_COUNT 6194 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6175 6195
6176 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 6196 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
6177 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 6197 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
6178 ClearObjectStats(); 6198 ClearObjectStats();
6179 } 6199 }
6180 } 6200 }
6181 } // namespace v8::internal 6201 } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698