| Index: src/heap.cc
|
| ===================================================================
|
| --- src/heap.cc (revision 2377)
|
| +++ src/heap.cc (working copy)
|
| @@ -64,6 +64,7 @@
|
| OldSpace* Heap::old_data_space_ = NULL;
|
| OldSpace* Heap::code_space_ = NULL;
|
| MapSpace* Heap::map_space_ = NULL;
|
| +CellSpace* Heap::cell_space_ = NULL;
|
| LargeObjectSpace* Heap::lo_space_ = NULL;
|
|
|
| static const int kMinimumPromotionLimit = 2*MB;
|
| @@ -121,7 +122,8 @@
|
| old_pointer_space_->Capacity() +
|
| old_data_space_->Capacity() +
|
| code_space_->Capacity() +
|
| - map_space_->Capacity();
|
| + map_space_->Capacity() +
|
| + cell_space_->Capacity();
|
| }
|
|
|
|
|
| @@ -132,7 +134,8 @@
|
| old_pointer_space_->Available() +
|
| old_data_space_->Available() +
|
| code_space_->Available() +
|
| - map_space_->Available();
|
| + map_space_->Available() +
|
| + cell_space_->Available();
|
| }
|
|
|
|
|
| @@ -141,6 +144,7 @@
|
| old_data_space_ != NULL &&
|
| code_space_ != NULL &&
|
| map_space_ != NULL &&
|
| + cell_space_ != NULL &&
|
| lo_space_ != NULL;
|
| }
|
|
|
| @@ -371,6 +375,8 @@
|
| return code_space_->Available() >= requested_size;
|
| case MAP_SPACE:
|
| return map_space_->Available() >= requested_size;
|
| + case CELL_SPACE:
|
| + return cell_space_->Available() >= requested_size;
|
| case LO_SPACE:
|
| return lo_space_->Available() >= requested_size;
|
| }
|
| @@ -666,7 +672,7 @@
|
| // Copy objects reachable from weak pointers.
|
| GlobalHandles::IterateWeakRoots(&scavenge_visitor);
|
|
|
| -#if V8_HOST_ARCH_64_BIT
|
| +#ifdef V8_HOST_ARCH_64_BIT
|
| // TODO(X64): Make this go away again. We currently disable RSets for
|
| // 64-bit-mode.
|
| HeapObjectIterator old_pointer_iterator(old_pointer_space_);
|
| @@ -686,13 +692,14 @@
|
| heap_object->Iterate(&scavenge_visitor);
|
| }
|
| }
|
| -#else // V8_HOST_ARCH_64_BIT
|
| +#else // !defined(V8_HOST_ARCH_64_BIT)
|
| // Copy objects reachable from the old generation. By definition,
|
| // there are no intergenerational pointers in code or data spaces.
|
| IterateRSet(old_pointer_space_, &ScavengePointer);
|
| + IterateRSet(cell_space_, &ScavengePointer);
|
| IterateRSet(map_space_, &ScavengePointer);
|
| lo_space_->IterateRSet(&ScavengePointer);
|
| -#endif // V8_HOST_ARCH_64_BIT
|
| +#endif
|
|
|
| do {
|
| ASSERT(new_space_front <= new_space_.top());
|
| @@ -840,6 +847,9 @@
|
| old_pointer_space_->ClearRSet();
|
| RebuildRSets(old_pointer_space_);
|
|
|
| + cell_space_->ClearRSet();
|
| + RebuildRSets(cell_space_);
|
| +
|
| Heap::lo_space_->ClearRSet();
|
| RebuildRSets(lo_space_);
|
| }
|
| @@ -1008,7 +1018,7 @@
|
|
|
| Object* Heap::AllocatePartialMap(InstanceType instance_type,
|
| int instance_size) {
|
| - Object* result = AllocateRawMap(Map::kSize);
|
| + Object* result = AllocateRawMap();
|
| if (result->IsFailure()) return result;
|
|
|
| // Map::cast cannot be used due to uninitialized map field.
|
| @@ -1022,7 +1032,7 @@
|
|
|
|
|
| Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
|
| - Object* result = AllocateRawMap(Map::kSize);
|
| + Object* result = AllocateRawMap();
|
| if (result->IsFailure()) return result;
|
|
|
| Map* map = reinterpret_cast<Map*>(result);
|
| @@ -1044,7 +1054,6 @@
|
| bool Heap::CreateInitialMaps() {
|
| Object* obj = AllocatePartialMap(MAP_TYPE, Map::kSize);
|
| if (obj->IsFailure()) return false;
|
| -
|
| // Map::cast cannot be used due to uninitialized map field.
|
| meta_map_ = reinterpret_cast<Map*>(obj);
|
| meta_map()->set_map(meta_map());
|
| @@ -1057,11 +1066,6 @@
|
| if (obj->IsFailure()) return false;
|
| oddball_map_ = Map::cast(obj);
|
|
|
| - obj = AllocatePartialMap(JS_GLOBAL_PROPERTY_CELL_TYPE,
|
| - JSGlobalPropertyCell::kSize);
|
| - if (obj->IsFailure()) return false;
|
| - global_property_cell_map_ = Map::cast(obj);
|
| -
|
| // Allocate the empty array
|
| obj = AllocateEmptyFixedArray();
|
| if (obj->IsFailure()) return false;
|
| @@ -1071,11 +1075,11 @@
|
| if (obj->IsFailure()) return false;
|
| null_value_ = obj;
|
|
|
| - // Allocate the empty descriptor array. AllocateMap can now be used.
|
| + // Allocate the empty descriptor array.
|
| obj = AllocateEmptyFixedArray();
|
| if (obj->IsFailure()) return false;
|
| - // There is a check against empty_descriptor_array() in cast().
|
| - empty_descriptor_array_ = reinterpret_cast<DescriptorArray*>(obj);
|
| + empty_descriptor_array_ = DescriptorArray::cast(obj);
|
| + // AllocateMap can now be used.
|
|
|
| // Fix the instance_descriptors for the existing maps.
|
| meta_map()->set_instance_descriptors(empty_descriptor_array());
|
| @@ -1087,22 +1091,16 @@
|
| oddball_map()->set_instance_descriptors(empty_descriptor_array());
|
| oddball_map()->set_code_cache(empty_fixed_array());
|
|
|
| - global_property_cell_map()->set_instance_descriptors(
|
| - empty_descriptor_array());
|
| - global_property_cell_map()->set_code_cache(empty_fixed_array());
|
| -
|
| // Fix prototype object for existing maps.
|
| meta_map()->set_prototype(null_value());
|
| meta_map()->set_constructor(null_value());
|
|
|
| fixed_array_map()->set_prototype(null_value());
|
| fixed_array_map()->set_constructor(null_value());
|
| +
|
| oddball_map()->set_prototype(null_value());
|
| oddball_map()->set_constructor(null_value());
|
|
|
| - global_property_cell_map()->set_prototype(null_value());
|
| - global_property_cell_map()->set_constructor(null_value());
|
| -
|
| obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
|
| if (obj->IsFailure()) return false;
|
| heap_number_map_ = Map::cast(obj);
|
| @@ -1156,13 +1154,18 @@
|
| if (obj->IsFailure()) return false;
|
| code_map_ = Map::cast(obj);
|
|
|
| + obj = AllocateMap(JS_GLOBAL_PROPERTY_CELL_TYPE,
|
| + JSGlobalPropertyCell::kSize);
|
| + if (obj->IsFailure()) return false;
|
| + global_property_cell_map_ = Map::cast(obj);
|
| +
|
| obj = AllocateMap(FILLER_TYPE, kPointerSize);
|
| if (obj->IsFailure()) return false;
|
| - one_word_filler_map_ = Map::cast(obj);
|
| + one_pointer_filler_map_ = Map::cast(obj);
|
|
|
| obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize);
|
| if (obj->IsFailure()) return false;
|
| - two_word_filler_map_ = Map::cast(obj);
|
| + two_pointer_filler_map_ = Map::cast(obj);
|
|
|
| #define ALLOCATE_STRUCT_MAP(NAME, Name, name) \
|
| obj = AllocateMap(NAME##_TYPE, Name::kSize); \
|
| @@ -1230,9 +1233,7 @@
|
|
|
|
|
| Object* Heap::AllocateJSGlobalPropertyCell(Object* value) {
|
| - Object* result = AllocateRaw(JSGlobalPropertyCell::kSize,
|
| - OLD_POINTER_SPACE,
|
| - OLD_POINTER_SPACE);
|
| + Object* result = AllocateRawCell();
|
| if (result->IsFailure()) return result;
|
| HeapObject::cast(result)->set_map(global_property_cell_map());
|
| JSGlobalPropertyCell::cast(result)->set_value(value);
|
| @@ -1809,7 +1810,7 @@
|
| if (size == 0) return;
|
| HeapObject* filler = HeapObject::FromAddress(addr);
|
| if (size == kPointerSize) {
|
| - filler->set_map(Heap::one_word_filler_map());
|
| + filler->set_map(Heap::one_pointer_filler_map());
|
| } else {
|
| filler->set_map(Heap::byte_array_map());
|
| ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size));
|
| @@ -2685,6 +2686,8 @@
|
| code_space_->ReportStatistics();
|
| PrintF("Map space : ");
|
| map_space_->ReportStatistics();
|
| + PrintF("Cell space : ");
|
| + cell_space_->ReportStatistics();
|
| PrintF("Large object space : ");
|
| lo_space_->ReportStatistics();
|
| PrintF(">>>>>> ========================================= >>>>>>\n");
|
| @@ -2705,6 +2708,7 @@
|
| old_data_space_->Contains(addr) ||
|
| code_space_->Contains(addr) ||
|
| map_space_->Contains(addr) ||
|
| + cell_space_->Contains(addr) ||
|
| lo_space_->SlowContains(addr));
|
| }
|
|
|
| @@ -2729,6 +2733,8 @@
|
| return code_space_->Contains(addr);
|
| case MAP_SPACE:
|
| return map_space_->Contains(addr);
|
| + case CELL_SPACE:
|
| + return cell_space_->Contains(addr);
|
| case LO_SPACE:
|
| return lo_space_->SlowContains(addr);
|
| }
|
| @@ -2840,7 +2846,9 @@
|
|
|
| void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) {
|
| ASSERT(Page::is_rset_in_use());
|
| - ASSERT(space == old_pointer_space_ || space == map_space_);
|
| + ASSERT(space == old_pointer_space_ ||
|
| + space == cell_space_ ||
|
| + space == map_space_);
|
|
|
| static void* paged_rset_histogram = StatsTable::CreateHistogram(
|
| "V8.RSetPaged",
|
| @@ -2964,6 +2972,7 @@
|
| + old_data_space_->Size()
|
| + code_space_->Size()
|
| + map_space_->Size()
|
| + + cell_space_->Size()
|
| + lo_space_->Size();
|
| }
|
|
|
| @@ -3041,6 +3050,13 @@
|
| // enough to hold at least a page will cause it to allocate.
|
| if (!map_space_->Setup(NULL, 0)) return false;
|
|
|
| + // Initialize global property cell space.
|
| + cell_space_ = new CellSpace(old_generation_size_, CELL_SPACE);
|
| + if (cell_space_ == NULL) return false;
|
| + // Setting up a paged space without giving it a virtual memory range big
|
| + // enough to hold at least a page will cause it to allocate.
|
| + if (!cell_space_->Setup(NULL, 0)) return false;
|
| +
|
| // The large object code space may contain code or data. We set the memory
|
| // to be non-executable here for safety, but this means we need to enable it
|
| // explicitly when allocating large code objects.
|
| @@ -3093,6 +3109,12 @@
|
| map_space_ = NULL;
|
| }
|
|
|
| + if (cell_space_ != NULL) {
|
| + cell_space_->TearDown();
|
| + delete cell_space_;
|
| + cell_space_ = NULL;
|
| + }
|
| +
|
| if (lo_space_ != NULL) {
|
| lo_space_->TearDown();
|
| delete lo_space_;
|
| @@ -3104,11 +3126,9 @@
|
|
|
|
|
| void Heap::Shrink() {
|
| - // Try to shrink map, old, and code spaces.
|
| - map_space_->Shrink();
|
| - old_pointer_space_->Shrink();
|
| - old_data_space_->Shrink();
|
| - code_space_->Shrink();
|
| + // Try to shrink all paged spaces.
|
| + PagedSpaces spaces;
|
| + while (PagedSpace* space = spaces.next()) space->Shrink();
|
| }
|
|
|
|
|
| @@ -3116,24 +3136,16 @@
|
|
|
| void Heap::Protect() {
|
| if (HasBeenSetup()) {
|
| - new_space_.Protect();
|
| - map_space_->Protect();
|
| - old_pointer_space_->Protect();
|
| - old_data_space_->Protect();
|
| - code_space_->Protect();
|
| - lo_space_->Protect();
|
| + AllSpaces spaces;
|
| + while (Space* space = spaces.next()) space->Protect();
|
| }
|
| }
|
|
|
|
|
| void Heap::Unprotect() {
|
| if (HasBeenSetup()) {
|
| - new_space_.Unprotect();
|
| - map_space_->Unprotect();
|
| - old_pointer_space_->Unprotect();
|
| - old_data_space_->Unprotect();
|
| - code_space_->Unprotect();
|
| - lo_space_->Unprotect();
|
| + AllSpaces spaces;
|
| + while (Space* space = spaces.next()) space->Unprotect();
|
| }
|
| }
|
|
|
| @@ -3171,6 +3183,8 @@
|
| return Heap::code_space();
|
| case MAP_SPACE:
|
| return Heap::map_space();
|
| + case CELL_SPACE:
|
| + return Heap::cell_space();
|
| case LO_SPACE:
|
| return Heap::lo_space();
|
| default:
|
| @@ -3189,6 +3203,8 @@
|
| return Heap::code_space();
|
| case MAP_SPACE:
|
| return Heap::map_space();
|
| + case CELL_SPACE:
|
| + return Heap::cell_space();
|
| default:
|
| return NULL;
|
| }
|
| @@ -3262,6 +3278,9 @@
|
| case MAP_SPACE:
|
| iterator_ = new HeapObjectIterator(Heap::map_space());
|
| break;
|
| + case CELL_SPACE:
|
| + iterator_ = new HeapObjectIterator(Heap::cell_space());
|
| + break;
|
| case LO_SPACE:
|
| iterator_ = new LargeObjectIterator(Heap::lo_space());
|
| break;
|
|
|