Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(123)

Side by Side Diff: src/heap.cc

Issue 155211: Create a new paged heap space for global property cells. The new... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 11 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
57 SYMBOL_LIST(SYMBOL_ALLOCATION) 57 SYMBOL_LIST(SYMBOL_ALLOCATION)
58 #undef SYMBOL_ALLOCATION 58 #undef SYMBOL_ALLOCATION
59 59
60 String* Heap::hidden_symbol_; 60 String* Heap::hidden_symbol_;
61 61
62 NewSpace Heap::new_space_; 62 NewSpace Heap::new_space_;
63 OldSpace* Heap::old_pointer_space_ = NULL; 63 OldSpace* Heap::old_pointer_space_ = NULL;
64 OldSpace* Heap::old_data_space_ = NULL; 64 OldSpace* Heap::old_data_space_ = NULL;
65 OldSpace* Heap::code_space_ = NULL; 65 OldSpace* Heap::code_space_ = NULL;
66 MapSpace* Heap::map_space_ = NULL; 66 MapSpace* Heap::map_space_ = NULL;
67 CellSpace* Heap::cell_space_ = NULL;
67 LargeObjectSpace* Heap::lo_space_ = NULL; 68 LargeObjectSpace* Heap::lo_space_ = NULL;
68 69
69 static const int kMinimumPromotionLimit = 2*MB; 70 static const int kMinimumPromotionLimit = 2*MB;
70 static const int kMinimumAllocationLimit = 8*MB; 71 static const int kMinimumAllocationLimit = 8*MB;
71 72
72 int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit; 73 int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
73 int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit; 74 int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
74 75
75 int Heap::old_gen_exhausted_ = false; 76 int Heap::old_gen_exhausted_ = false;
76 77
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
114 #endif // DEBUG 115 #endif // DEBUG
115 116
116 117
117 int Heap::Capacity() { 118 int Heap::Capacity() {
118 if (!HasBeenSetup()) return 0; 119 if (!HasBeenSetup()) return 0;
119 120
120 return new_space_.Capacity() + 121 return new_space_.Capacity() +
121 old_pointer_space_->Capacity() + 122 old_pointer_space_->Capacity() +
122 old_data_space_->Capacity() + 123 old_data_space_->Capacity() +
123 code_space_->Capacity() + 124 code_space_->Capacity() +
124 map_space_->Capacity(); 125 map_space_->Capacity() +
126 cell_space_->Capacity();
125 } 127 }
126 128
127 129
128 int Heap::Available() { 130 int Heap::Available() {
129 if (!HasBeenSetup()) return 0; 131 if (!HasBeenSetup()) return 0;
130 132
131 return new_space_.Available() + 133 return new_space_.Available() +
132 old_pointer_space_->Available() + 134 old_pointer_space_->Available() +
133 old_data_space_->Available() + 135 old_data_space_->Available() +
134 code_space_->Available() + 136 code_space_->Available() +
135 map_space_->Available(); 137 map_space_->Available() +
138 cell_space_->Available();
136 } 139 }
137 140
138 141
139 bool Heap::HasBeenSetup() { 142 bool Heap::HasBeenSetup() {
140 return old_pointer_space_ != NULL && 143 return old_pointer_space_ != NULL &&
141 old_data_space_ != NULL && 144 old_data_space_ != NULL &&
142 code_space_ != NULL && 145 code_space_ != NULL &&
143 map_space_ != NULL && 146 map_space_ != NULL &&
147 cell_space_ != NULL &&
144 lo_space_ != NULL; 148 lo_space_ != NULL;
145 } 149 }
146 150
147 151
148 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) { 152 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
149 // Is global GC requested? 153 // Is global GC requested?
150 if (space != NEW_SPACE || FLAG_gc_global) { 154 if (space != NEW_SPACE || FLAG_gc_global) {
151 Counters::gc_compactor_caused_by_request.Increment(); 155 Counters::gc_compactor_caused_by_request.Increment();
152 return MARK_COMPACTOR; 156 return MARK_COMPACTOR;
153 } 157 }
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after
364 case NEW_SPACE: 368 case NEW_SPACE:
365 return new_space_.Available() >= requested_size; 369 return new_space_.Available() >= requested_size;
366 case OLD_POINTER_SPACE: 370 case OLD_POINTER_SPACE:
367 return old_pointer_space_->Available() >= requested_size; 371 return old_pointer_space_->Available() >= requested_size;
368 case OLD_DATA_SPACE: 372 case OLD_DATA_SPACE:
369 return old_data_space_->Available() >= requested_size; 373 return old_data_space_->Available() >= requested_size;
370 case CODE_SPACE: 374 case CODE_SPACE:
371 return code_space_->Available() >= requested_size; 375 return code_space_->Available() >= requested_size;
372 case MAP_SPACE: 376 case MAP_SPACE:
373 return map_space_->Available() >= requested_size; 377 return map_space_->Available() >= requested_size;
378 case CELL_SPACE:
379 return cell_space_->Available() >= requested_size;
374 case LO_SPACE: 380 case LO_SPACE:
375 return lo_space_->Available() >= requested_size; 381 return lo_space_->Available() >= requested_size;
376 } 382 }
377 return false; 383 return false;
378 } 384 }
379 385
380 386
381 void Heap::PerformScavenge() { 387 void Heap::PerformScavenge() {
382 GCTracer tracer; 388 GCTracer tracer;
383 PerformGarbageCollection(NEW_SPACE, SCAVENGER, &tracer); 389 PerformGarbageCollection(NEW_SPACE, SCAVENGER, &tracer);
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after
659 Address new_space_front = new_space_.ToSpaceLow(); 665 Address new_space_front = new_space_.ToSpaceLow();
660 promotion_queue.Initialize(new_space_.ToSpaceHigh()); 666 promotion_queue.Initialize(new_space_.ToSpaceHigh());
661 667
662 ScavengeVisitor scavenge_visitor; 668 ScavengeVisitor scavenge_visitor;
663 // Copy roots. 669 // Copy roots.
664 IterateRoots(&scavenge_visitor); 670 IterateRoots(&scavenge_visitor);
665 671
666 // Copy objects reachable from weak pointers. 672 // Copy objects reachable from weak pointers.
667 GlobalHandles::IterateWeakRoots(&scavenge_visitor); 673 GlobalHandles::IterateWeakRoots(&scavenge_visitor);
668 674
669 #if V8_HOST_ARCH_64_BIT 675 #ifdef V8_HOST_ARCH_64_BIT
670 // TODO(X64): Make this go away again. We currently disable RSets for 676 // TODO(X64): Make this go away again. We currently disable RSets for
671 // 64-bit-mode. 677 // 64-bit-mode.
672 HeapObjectIterator old_pointer_iterator(old_pointer_space_); 678 HeapObjectIterator old_pointer_iterator(old_pointer_space_);
673 while (old_pointer_iterator.has_next()) { 679 while (old_pointer_iterator.has_next()) {
674 HeapObject* heap_object = old_pointer_iterator.next(); 680 HeapObject* heap_object = old_pointer_iterator.next();
675 heap_object->Iterate(&scavenge_visitor); 681 heap_object->Iterate(&scavenge_visitor);
676 } 682 }
677 HeapObjectIterator map_iterator(map_space_); 683 HeapObjectIterator map_iterator(map_space_);
678 while (map_iterator.has_next()) { 684 while (map_iterator.has_next()) {
679 HeapObject* heap_object = map_iterator.next(); 685 HeapObject* heap_object = map_iterator.next();
680 heap_object->Iterate(&scavenge_visitor); 686 heap_object->Iterate(&scavenge_visitor);
681 } 687 }
682 LargeObjectIterator lo_iterator(lo_space_); 688 LargeObjectIterator lo_iterator(lo_space_);
683 while (lo_iterator.has_next()) { 689 while (lo_iterator.has_next()) {
684 HeapObject* heap_object = lo_iterator.next(); 690 HeapObject* heap_object = lo_iterator.next();
685 if (heap_object->IsFixedArray()) { 691 if (heap_object->IsFixedArray()) {
686 heap_object->Iterate(&scavenge_visitor); 692 heap_object->Iterate(&scavenge_visitor);
687 } 693 }
688 } 694 }
689 #else // V8_HOST_ARCH_64_BIT 695 #else // !defined(V8_HOST_ARCH_64_BIT)
690 // Copy objects reachable from the old generation. By definition, 696 // Copy objects reachable from the old generation. By definition,
691 // there are no intergenerational pointers in code or data spaces. 697 // there are no intergenerational pointers in code or data spaces.
692 IterateRSet(old_pointer_space_, &ScavengePointer); 698 IterateRSet(old_pointer_space_, &ScavengePointer);
699 IterateRSet(cell_space_, &ScavengePointer);
693 IterateRSet(map_space_, &ScavengePointer); 700 IterateRSet(map_space_, &ScavengePointer);
694 lo_space_->IterateRSet(&ScavengePointer); 701 lo_space_->IterateRSet(&ScavengePointer);
695 #endif // V8_HOST_ARCH_64_BIT 702 #endif
696 703
697 do { 704 do {
698 ASSERT(new_space_front <= new_space_.top()); 705 ASSERT(new_space_front <= new_space_.top());
699 706
700 // The addresses new_space_front and new_space_.top() define a 707 // The addresses new_space_front and new_space_.top() define a
701 // queue of unprocessed copied objects. Process them until the 708 // queue of unprocessed copied objects. Process them until the
702 // queue is empty. 709 // queue is empty.
703 while (new_space_front < new_space_.top()) { 710 while (new_space_front < new_space_.top()) {
704 HeapObject* object = HeapObject::FromAddress(new_space_front); 711 HeapObject* object = HeapObject::FromAddress(new_space_front);
705 object->Iterate(&scavenge_visitor); 712 object->Iterate(&scavenge_visitor);
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
833 840
834 void Heap::RebuildRSets() { 841 void Heap::RebuildRSets() {
835 // By definition, we do not care about remembered set bits in code or data 842 // By definition, we do not care about remembered set bits in code or data
836 // spaces. 843 // spaces.
837 map_space_->ClearRSet(); 844 map_space_->ClearRSet();
838 RebuildRSets(map_space_); 845 RebuildRSets(map_space_);
839 846
840 old_pointer_space_->ClearRSet(); 847 old_pointer_space_->ClearRSet();
841 RebuildRSets(old_pointer_space_); 848 RebuildRSets(old_pointer_space_);
842 849
850 cell_space_->ClearRSet();
851 RebuildRSets(cell_space_);
852
843 Heap::lo_space_->ClearRSet(); 853 Heap::lo_space_->ClearRSet();
844 RebuildRSets(lo_space_); 854 RebuildRSets(lo_space_);
845 } 855 }
846 856
847 857
848 void Heap::RebuildRSets(PagedSpace* space) { 858 void Heap::RebuildRSets(PagedSpace* space) {
849 HeapObjectIterator it(space); 859 HeapObjectIterator it(space);
850 while (it.has_next()) Heap::UpdateRSet(it.next()); 860 while (it.has_next()) Heap::UpdateRSet(it.next());
851 } 861 }
852 862
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
1001 } 1011 }
1002 1012
1003 1013
1004 void Heap::ScavengePointer(HeapObject** p) { 1014 void Heap::ScavengePointer(HeapObject** p) {
1005 ScavengeObject(p, *p); 1015 ScavengeObject(p, *p);
1006 } 1016 }
1007 1017
1008 1018
1009 Object* Heap::AllocatePartialMap(InstanceType instance_type, 1019 Object* Heap::AllocatePartialMap(InstanceType instance_type,
1010 int instance_size) { 1020 int instance_size) {
1011 Object* result = AllocateRawMap(Map::kSize); 1021 Object* result = AllocateRawMap();
1012 if (result->IsFailure()) return result; 1022 if (result->IsFailure()) return result;
1013 1023
1014 // Map::cast cannot be used due to uninitialized map field. 1024 // Map::cast cannot be used due to uninitialized map field.
1015 reinterpret_cast<Map*>(result)->set_map(meta_map()); 1025 reinterpret_cast<Map*>(result)->set_map(meta_map());
1016 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); 1026 reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1017 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); 1027 reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1018 reinterpret_cast<Map*>(result)->set_inobject_properties(0); 1028 reinterpret_cast<Map*>(result)->set_inobject_properties(0);
1019 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); 1029 reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
1020 return result; 1030 return result;
1021 } 1031 }
1022 1032
1023 1033
1024 Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) { 1034 Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
1025 Object* result = AllocateRawMap(Map::kSize); 1035 Object* result = AllocateRawMap();
1026 if (result->IsFailure()) return result; 1036 if (result->IsFailure()) return result;
1027 1037
1028 Map* map = reinterpret_cast<Map*>(result); 1038 Map* map = reinterpret_cast<Map*>(result);
1029 map->set_map(meta_map()); 1039 map->set_map(meta_map());
1030 map->set_instance_type(instance_type); 1040 map->set_instance_type(instance_type);
1031 map->set_prototype(null_value()); 1041 map->set_prototype(null_value());
1032 map->set_constructor(null_value()); 1042 map->set_constructor(null_value());
1033 map->set_instance_size(instance_size); 1043 map->set_instance_size(instance_size);
1034 map->set_inobject_properties(0); 1044 map->set_inobject_properties(0);
1035 map->set_instance_descriptors(empty_descriptor_array()); 1045 map->set_instance_descriptors(empty_descriptor_array());
1036 map->set_code_cache(empty_fixed_array()); 1046 map->set_code_cache(empty_fixed_array());
1037 map->set_unused_property_fields(0); 1047 map->set_unused_property_fields(0);
1038 map->set_bit_field(0); 1048 map->set_bit_field(0);
1039 map->set_bit_field2(0); 1049 map->set_bit_field2(0);
1040 return map; 1050 return map;
1041 } 1051 }
1042 1052
1043 1053
1044 bool Heap::CreateInitialMaps() { 1054 bool Heap::CreateInitialMaps() {
1045 Object* obj = AllocatePartialMap(MAP_TYPE, Map::kSize); 1055 Object* obj = AllocatePartialMap(MAP_TYPE, Map::kSize);
1046 if (obj->IsFailure()) return false; 1056 if (obj->IsFailure()) return false;
1047
1048 // Map::cast cannot be used due to uninitialized map field. 1057 // Map::cast cannot be used due to uninitialized map field.
1049 meta_map_ = reinterpret_cast<Map*>(obj); 1058 meta_map_ = reinterpret_cast<Map*>(obj);
1050 meta_map()->set_map(meta_map()); 1059 meta_map()->set_map(meta_map());
1051 1060
1052 obj = AllocatePartialMap(FIXED_ARRAY_TYPE, FixedArray::kHeaderSize); 1061 obj = AllocatePartialMap(FIXED_ARRAY_TYPE, FixedArray::kHeaderSize);
1053 if (obj->IsFailure()) return false; 1062 if (obj->IsFailure()) return false;
1054 fixed_array_map_ = Map::cast(obj); 1063 fixed_array_map_ = Map::cast(obj);
1055 1064
1056 obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize); 1065 obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize);
1057 if (obj->IsFailure()) return false; 1066 if (obj->IsFailure()) return false;
1058 oddball_map_ = Map::cast(obj); 1067 oddball_map_ = Map::cast(obj);
1059 1068
1060 obj = AllocatePartialMap(JS_GLOBAL_PROPERTY_CELL_TYPE,
1061 JSGlobalPropertyCell::kSize);
1062 if (obj->IsFailure()) return false;
1063 global_property_cell_map_ = Map::cast(obj);
1064
1065 // Allocate the empty array 1069 // Allocate the empty array
1066 obj = AllocateEmptyFixedArray(); 1070 obj = AllocateEmptyFixedArray();
1067 if (obj->IsFailure()) return false; 1071 if (obj->IsFailure()) return false;
1068 empty_fixed_array_ = FixedArray::cast(obj); 1072 empty_fixed_array_ = FixedArray::cast(obj);
1069 1073
1070 obj = Allocate(oddball_map(), OLD_DATA_SPACE); 1074 obj = Allocate(oddball_map(), OLD_DATA_SPACE);
1071 if (obj->IsFailure()) return false; 1075 if (obj->IsFailure()) return false;
1072 null_value_ = obj; 1076 null_value_ = obj;
1073 1077
1074 // Allocate the empty descriptor array. AllocateMap can now be used. 1078 // Allocate the empty descriptor array.
1075 obj = AllocateEmptyFixedArray(); 1079 obj = AllocateEmptyFixedArray();
1076 if (obj->IsFailure()) return false; 1080 if (obj->IsFailure()) return false;
1077 // There is a check against empty_descriptor_array() in cast(). 1081 empty_descriptor_array_ = DescriptorArray::cast(obj);
1078 empty_descriptor_array_ = reinterpret_cast<DescriptorArray*>(obj); 1082 // AllocateMap can now be used.
1079 1083
1080 // Fix the instance_descriptors for the existing maps. 1084 // Fix the instance_descriptors for the existing maps.
1081 meta_map()->set_instance_descriptors(empty_descriptor_array()); 1085 meta_map()->set_instance_descriptors(empty_descriptor_array());
1082 meta_map()->set_code_cache(empty_fixed_array()); 1086 meta_map()->set_code_cache(empty_fixed_array());
1083 1087
1084 fixed_array_map()->set_instance_descriptors(empty_descriptor_array()); 1088 fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
1085 fixed_array_map()->set_code_cache(empty_fixed_array()); 1089 fixed_array_map()->set_code_cache(empty_fixed_array());
1086 1090
1087 oddball_map()->set_instance_descriptors(empty_descriptor_array()); 1091 oddball_map()->set_instance_descriptors(empty_descriptor_array());
1088 oddball_map()->set_code_cache(empty_fixed_array()); 1092 oddball_map()->set_code_cache(empty_fixed_array());
1089 1093
1090 global_property_cell_map()->set_instance_descriptors(
1091 empty_descriptor_array());
1092 global_property_cell_map()->set_code_cache(empty_fixed_array());
1093
1094 // Fix prototype object for existing maps. 1094 // Fix prototype object for existing maps.
1095 meta_map()->set_prototype(null_value()); 1095 meta_map()->set_prototype(null_value());
1096 meta_map()->set_constructor(null_value()); 1096 meta_map()->set_constructor(null_value());
1097 1097
1098 fixed_array_map()->set_prototype(null_value()); 1098 fixed_array_map()->set_prototype(null_value());
1099 fixed_array_map()->set_constructor(null_value()); 1099 fixed_array_map()->set_constructor(null_value());
1100
1100 oddball_map()->set_prototype(null_value()); 1101 oddball_map()->set_prototype(null_value());
1101 oddball_map()->set_constructor(null_value()); 1102 oddball_map()->set_constructor(null_value());
1102 1103
1103 global_property_cell_map()->set_prototype(null_value());
1104 global_property_cell_map()->set_constructor(null_value());
1105
1106 obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize); 1104 obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
1107 if (obj->IsFailure()) return false; 1105 if (obj->IsFailure()) return false;
1108 heap_number_map_ = Map::cast(obj); 1106 heap_number_map_ = Map::cast(obj);
1109 1107
1110 obj = AllocateMap(PROXY_TYPE, Proxy::kSize); 1108 obj = AllocateMap(PROXY_TYPE, Proxy::kSize);
1111 if (obj->IsFailure()) return false; 1109 if (obj->IsFailure()) return false;
1112 proxy_map_ = Map::cast(obj); 1110 proxy_map_ = Map::cast(obj);
1113 1111
1114 #define ALLOCATE_STRING_MAP(type, size, name) \ 1112 #define ALLOCATE_STRING_MAP(type, size, name) \
1115 obj = AllocateMap(type, size); \ 1113 obj = AllocateMap(type, size); \
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1149 undetectable_long_ascii_string_map_->set_is_undetectable(); 1147 undetectable_long_ascii_string_map_->set_is_undetectable();
1150 1148
1151 obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kAlignedSize); 1149 obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kAlignedSize);
1152 if (obj->IsFailure()) return false; 1150 if (obj->IsFailure()) return false;
1153 byte_array_map_ = Map::cast(obj); 1151 byte_array_map_ = Map::cast(obj);
1154 1152
1155 obj = AllocateMap(CODE_TYPE, Code::kHeaderSize); 1153 obj = AllocateMap(CODE_TYPE, Code::kHeaderSize);
1156 if (obj->IsFailure()) return false; 1154 if (obj->IsFailure()) return false;
1157 code_map_ = Map::cast(obj); 1155 code_map_ = Map::cast(obj);
1158 1156
1157 obj = AllocateMap(JS_GLOBAL_PROPERTY_CELL_TYPE,
1158 JSGlobalPropertyCell::kSize);
1159 if (obj->IsFailure()) return false;
1160 global_property_cell_map_ = Map::cast(obj);
1161
1159 obj = AllocateMap(FILLER_TYPE, kPointerSize); 1162 obj = AllocateMap(FILLER_TYPE, kPointerSize);
1160 if (obj->IsFailure()) return false; 1163 if (obj->IsFailure()) return false;
1161 one_word_filler_map_ = Map::cast(obj); 1164 one_pointer_filler_map_ = Map::cast(obj);
1162 1165
1163 obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize); 1166 obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize);
1164 if (obj->IsFailure()) return false; 1167 if (obj->IsFailure()) return false;
1165 two_word_filler_map_ = Map::cast(obj); 1168 two_pointer_filler_map_ = Map::cast(obj);
1166 1169
1167 #define ALLOCATE_STRUCT_MAP(NAME, Name, name) \ 1170 #define ALLOCATE_STRUCT_MAP(NAME, Name, name) \
1168 obj = AllocateMap(NAME##_TYPE, Name::kSize); \ 1171 obj = AllocateMap(NAME##_TYPE, Name::kSize); \
1169 if (obj->IsFailure()) return false; \ 1172 if (obj->IsFailure()) return false; \
1170 name##_map_ = Map::cast(obj); 1173 name##_map_ = Map::cast(obj);
1171 STRUCT_LIST(ALLOCATE_STRUCT_MAP) 1174 STRUCT_LIST(ALLOCATE_STRUCT_MAP)
1172 #undef ALLOCATE_STRUCT_MAP 1175 #undef ALLOCATE_STRUCT_MAP
1173 1176
1174 obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize); 1177 obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1175 if (obj->IsFailure()) return false; 1178 if (obj->IsFailure()) return false;
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
1223 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 1226 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
1224 Object* result = new_space_.AllocateRaw(HeapNumber::kSize); 1227 Object* result = new_space_.AllocateRaw(HeapNumber::kSize);
1225 if (result->IsFailure()) return result; 1228 if (result->IsFailure()) return result;
1226 HeapObject::cast(result)->set_map(heap_number_map()); 1229 HeapObject::cast(result)->set_map(heap_number_map());
1227 HeapNumber::cast(result)->set_value(value); 1230 HeapNumber::cast(result)->set_value(value);
1228 return result; 1231 return result;
1229 } 1232 }
1230 1233
1231 1234
1232 Object* Heap::AllocateJSGlobalPropertyCell(Object* value) { 1235 Object* Heap::AllocateJSGlobalPropertyCell(Object* value) {
1233 Object* result = AllocateRaw(JSGlobalPropertyCell::kSize, 1236 Object* result = AllocateRawCell();
1234 OLD_POINTER_SPACE,
1235 OLD_POINTER_SPACE);
1236 if (result->IsFailure()) return result; 1237 if (result->IsFailure()) return result;
1237 HeapObject::cast(result)->set_map(global_property_cell_map()); 1238 HeapObject::cast(result)->set_map(global_property_cell_map());
1238 JSGlobalPropertyCell::cast(result)->set_value(value); 1239 JSGlobalPropertyCell::cast(result)->set_value(value);
1239 return result; 1240 return result;
1240 } 1241 }
1241 1242
1242 1243
1243 Object* Heap::CreateOddball(Map* map, 1244 Object* Heap::CreateOddball(Map* map,
1244 const char* to_string, 1245 const char* to_string,
1245 Object* to_number) { 1246 Object* to_number) {
(...skipping 556 matching lines...) Expand 10 before | Expand all | Expand 10 after
1802 reinterpret_cast<Array*>(result)->set_map(byte_array_map()); 1803 reinterpret_cast<Array*>(result)->set_map(byte_array_map());
1803 reinterpret_cast<Array*>(result)->set_length(length); 1804 reinterpret_cast<Array*>(result)->set_length(length);
1804 return result; 1805 return result;
1805 } 1806 }
1806 1807
1807 1808
1808 void Heap::CreateFillerObjectAt(Address addr, int size) { 1809 void Heap::CreateFillerObjectAt(Address addr, int size) {
1809 if (size == 0) return; 1810 if (size == 0) return;
1810 HeapObject* filler = HeapObject::FromAddress(addr); 1811 HeapObject* filler = HeapObject::FromAddress(addr);
1811 if (size == kPointerSize) { 1812 if (size == kPointerSize) {
1812 filler->set_map(Heap::one_word_filler_map()); 1813 filler->set_map(Heap::one_pointer_filler_map());
1813 } else { 1814 } else {
1814 filler->set_map(Heap::byte_array_map()); 1815 filler->set_map(Heap::byte_array_map());
1815 ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size)); 1816 ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size));
1816 } 1817 }
1817 } 1818 }
1818 1819
1819 1820
1820 Object* Heap::CreateCode(const CodeDesc& desc, 1821 Object* Heap::CreateCode(const CodeDesc& desc,
1821 ZoneScopeInfo* sinfo, 1822 ZoneScopeInfo* sinfo,
1822 Code::Flags flags, 1823 Code::Flags flags,
(...skipping 855 matching lines...) Expand 10 before | Expand all | Expand 10 after
2678 PrintF("To space : "); 2679 PrintF("To space : ");
2679 new_space_.ReportStatistics(); 2680 new_space_.ReportStatistics();
2680 PrintF("Old pointer space : "); 2681 PrintF("Old pointer space : ");
2681 old_pointer_space_->ReportStatistics(); 2682 old_pointer_space_->ReportStatistics();
2682 PrintF("Old data space : "); 2683 PrintF("Old data space : ");
2683 old_data_space_->ReportStatistics(); 2684 old_data_space_->ReportStatistics();
2684 PrintF("Code space : "); 2685 PrintF("Code space : ");
2685 code_space_->ReportStatistics(); 2686 code_space_->ReportStatistics();
2686 PrintF("Map space : "); 2687 PrintF("Map space : ");
2687 map_space_->ReportStatistics(); 2688 map_space_->ReportStatistics();
2689 PrintF("Cell space : ");
2690 cell_space_->ReportStatistics();
2688 PrintF("Large object space : "); 2691 PrintF("Large object space : ");
2689 lo_space_->ReportStatistics(); 2692 lo_space_->ReportStatistics();
2690 PrintF(">>>>>> ========================================= >>>>>>\n"); 2693 PrintF(">>>>>> ========================================= >>>>>>\n");
2691 } 2694 }
2692 2695
2693 #endif // DEBUG 2696 #endif // DEBUG
2694 2697
2695 bool Heap::Contains(HeapObject* value) { 2698 bool Heap::Contains(HeapObject* value) {
2696 return Contains(value->address()); 2699 return Contains(value->address());
2697 } 2700 }
2698 2701
2699 2702
2700 bool Heap::Contains(Address addr) { 2703 bool Heap::Contains(Address addr) {
2701 if (OS::IsOutsideAllocatedSpace(addr)) return false; 2704 if (OS::IsOutsideAllocatedSpace(addr)) return false;
2702 return HasBeenSetup() && 2705 return HasBeenSetup() &&
2703 (new_space_.ToSpaceContains(addr) || 2706 (new_space_.ToSpaceContains(addr) ||
2704 old_pointer_space_->Contains(addr) || 2707 old_pointer_space_->Contains(addr) ||
2705 old_data_space_->Contains(addr) || 2708 old_data_space_->Contains(addr) ||
2706 code_space_->Contains(addr) || 2709 code_space_->Contains(addr) ||
2707 map_space_->Contains(addr) || 2710 map_space_->Contains(addr) ||
2711 cell_space_->Contains(addr) ||
2708 lo_space_->SlowContains(addr)); 2712 lo_space_->SlowContains(addr));
2709 } 2713 }
2710 2714
2711 2715
2712 bool Heap::InSpace(HeapObject* value, AllocationSpace space) { 2716 bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
2713 return InSpace(value->address(), space); 2717 return InSpace(value->address(), space);
2714 } 2718 }
2715 2719
2716 2720
2717 bool Heap::InSpace(Address addr, AllocationSpace space) { 2721 bool Heap::InSpace(Address addr, AllocationSpace space) {
2718 if (OS::IsOutsideAllocatedSpace(addr)) return false; 2722 if (OS::IsOutsideAllocatedSpace(addr)) return false;
2719 if (!HasBeenSetup()) return false; 2723 if (!HasBeenSetup()) return false;
2720 2724
2721 switch (space) { 2725 switch (space) {
2722 case NEW_SPACE: 2726 case NEW_SPACE:
2723 return new_space_.ToSpaceContains(addr); 2727 return new_space_.ToSpaceContains(addr);
2724 case OLD_POINTER_SPACE: 2728 case OLD_POINTER_SPACE:
2725 return old_pointer_space_->Contains(addr); 2729 return old_pointer_space_->Contains(addr);
2726 case OLD_DATA_SPACE: 2730 case OLD_DATA_SPACE:
2727 return old_data_space_->Contains(addr); 2731 return old_data_space_->Contains(addr);
2728 case CODE_SPACE: 2732 case CODE_SPACE:
2729 return code_space_->Contains(addr); 2733 return code_space_->Contains(addr);
2730 case MAP_SPACE: 2734 case MAP_SPACE:
2731 return map_space_->Contains(addr); 2735 return map_space_->Contains(addr);
2736 case CELL_SPACE:
2737 return cell_space_->Contains(addr);
2732 case LO_SPACE: 2738 case LO_SPACE:
2733 return lo_space_->SlowContains(addr); 2739 return lo_space_->SlowContains(addr);
2734 } 2740 }
2735 2741
2736 return false; 2742 return false;
2737 } 2743 }
2738 2744
2739 2745
2740 #ifdef DEBUG 2746 #ifdef DEBUG
2741 void Heap::Verify() { 2747 void Heap::Verify() {
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
2833 object_address += kPointerSize * kBitsPerInt; 2839 object_address += kPointerSize * kBitsPerInt;
2834 } 2840 }
2835 rset_address += kIntSize; 2841 rset_address += kIntSize;
2836 } 2842 }
2837 return set_bits_count; 2843 return set_bits_count;
2838 } 2844 }
2839 2845
2840 2846
2841 void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) { 2847 void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) {
2842 ASSERT(Page::is_rset_in_use()); 2848 ASSERT(Page::is_rset_in_use());
2843 ASSERT(space == old_pointer_space_ || space == map_space_); 2849 ASSERT(space == old_pointer_space_ ||
2850 space == cell_space_ ||
2851 space == map_space_);
2844 2852
2845 static void* paged_rset_histogram = StatsTable::CreateHistogram( 2853 static void* paged_rset_histogram = StatsTable::CreateHistogram(
2846 "V8.RSetPaged", 2854 "V8.RSetPaged",
2847 0, 2855 0,
2848 Page::kObjectAreaSize / kPointerSize, 2856 Page::kObjectAreaSize / kPointerSize,
2849 30); 2857 30);
2850 2858
2851 PageIterator it(space, PageIterator::PAGES_IN_USE); 2859 PageIterator it(space, PageIterator::PAGES_IN_USE);
2852 while (it.has_next()) { 2860 while (it.has_next()) {
2853 Page* page = it.next(); 2861 Page* page = it.next();
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
2957 bool Heap::ConfigureHeapDefault() { 2965 bool Heap::ConfigureHeapDefault() {
2958 return ConfigureHeap(FLAG_new_space_size, FLAG_old_space_size); 2966 return ConfigureHeap(FLAG_new_space_size, FLAG_old_space_size);
2959 } 2967 }
2960 2968
2961 2969
2962 int Heap::PromotedSpaceSize() { 2970 int Heap::PromotedSpaceSize() {
2963 return old_pointer_space_->Size() 2971 return old_pointer_space_->Size()
2964 + old_data_space_->Size() 2972 + old_data_space_->Size()
2965 + code_space_->Size() 2973 + code_space_->Size()
2966 + map_space_->Size() 2974 + map_space_->Size()
2975 + cell_space_->Size()
2967 + lo_space_->Size(); 2976 + lo_space_->Size();
2968 } 2977 }
2969 2978
2970 2979
2971 int Heap::PromotedExternalMemorySize() { 2980 int Heap::PromotedExternalMemorySize() {
2972 if (amount_of_external_allocated_memory_ 2981 if (amount_of_external_allocated_memory_
2973 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0; 2982 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
2974 return amount_of_external_allocated_memory_ 2983 return amount_of_external_allocated_memory_
2975 - amount_of_external_allocated_memory_at_last_global_gc_; 2984 - amount_of_external_allocated_memory_at_last_global_gc_;
2976 } 2985 }
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
3034 if (code_space_ == NULL) return false; 3043 if (code_space_ == NULL) return false;
3035 if (!code_space_->Setup(code_space_start, code_space_size)) return false; 3044 if (!code_space_->Setup(code_space_start, code_space_size)) return false;
3036 3045
3037 // Initialize map space. 3046 // Initialize map space.
3038 map_space_ = new MapSpace(kMaxMapSpaceSize, MAP_SPACE); 3047 map_space_ = new MapSpace(kMaxMapSpaceSize, MAP_SPACE);
3039 if (map_space_ == NULL) return false; 3048 if (map_space_ == NULL) return false;
3040 // Setting up a paged space without giving it a virtual memory range big 3049 // Setting up a paged space without giving it a virtual memory range big
3041 // enough to hold at least a page will cause it to allocate. 3050 // enough to hold at least a page will cause it to allocate.
3042 if (!map_space_->Setup(NULL, 0)) return false; 3051 if (!map_space_->Setup(NULL, 0)) return false;
3043 3052
3053 // Initialize global property cell space.
3054 cell_space_ = new CellSpace(old_generation_size_, CELL_SPACE);
3055 if (cell_space_ == NULL) return false;
3056 // Setting up a paged space without giving it a virtual memory range big
3057 // enough to hold at least a page will cause it to allocate.
3058 if (!cell_space_->Setup(NULL, 0)) return false;
3059
3044 // The large object code space may contain code or data. We set the memory 3060 // The large object code space may contain code or data. We set the memory
3045 // to be non-executable here for safety, but this means we need to enable it 3061 // to be non-executable here for safety, but this means we need to enable it
3046 // explicitly when allocating large code objects. 3062 // explicitly when allocating large code objects.
3047 lo_space_ = new LargeObjectSpace(LO_SPACE); 3063 lo_space_ = new LargeObjectSpace(LO_SPACE);
3048 if (lo_space_ == NULL) return false; 3064 if (lo_space_ == NULL) return false;
3049 if (!lo_space_->Setup()) return false; 3065 if (!lo_space_->Setup()) return false;
3050 3066
3051 if (create_heap_objects) { 3067 if (create_heap_objects) {
3052 // Create initial maps. 3068 // Create initial maps.
3053 if (!CreateInitialMaps()) return false; 3069 if (!CreateInitialMaps()) return false;
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
3086 delete code_space_; 3102 delete code_space_;
3087 code_space_ = NULL; 3103 code_space_ = NULL;
3088 } 3104 }
3089 3105
3090 if (map_space_ != NULL) { 3106 if (map_space_ != NULL) {
3091 map_space_->TearDown(); 3107 map_space_->TearDown();
3092 delete map_space_; 3108 delete map_space_;
3093 map_space_ = NULL; 3109 map_space_ = NULL;
3094 } 3110 }
3095 3111
3112 if (cell_space_ != NULL) {
3113 cell_space_->TearDown();
3114 delete cell_space_;
3115 cell_space_ = NULL;
3116 }
3117
3096 if (lo_space_ != NULL) { 3118 if (lo_space_ != NULL) {
3097 lo_space_->TearDown(); 3119 lo_space_->TearDown();
3098 delete lo_space_; 3120 delete lo_space_;
3099 lo_space_ = NULL; 3121 lo_space_ = NULL;
3100 } 3122 }
3101 3123
3102 MemoryAllocator::TearDown(); 3124 MemoryAllocator::TearDown();
3103 } 3125 }
3104 3126
3105 3127
3106 void Heap::Shrink() { 3128 void Heap::Shrink() {
3107 // Try to shrink map, old, and code spaces. 3129 // Try to shrink all paged spaces.
3108 map_space_->Shrink(); 3130 PagedSpaces spaces;
3109 old_pointer_space_->Shrink(); 3131 while (PagedSpace* space = spaces.next()) space->Shrink();
3110 old_data_space_->Shrink();
3111 code_space_->Shrink();
3112 } 3132 }
3113 3133
3114 3134
3115 #ifdef ENABLE_HEAP_PROTECTION 3135 #ifdef ENABLE_HEAP_PROTECTION
3116 3136
3117 void Heap::Protect() { 3137 void Heap::Protect() {
3118 if (HasBeenSetup()) { 3138 if (HasBeenSetup()) {
3119 new_space_.Protect(); 3139 AllSpaces spaces;
3120 map_space_->Protect(); 3140 while (Space* space = spaces.next()) space->Protect();
3121 old_pointer_space_->Protect();
3122 old_data_space_->Protect();
3123 code_space_->Protect();
3124 lo_space_->Protect();
3125 } 3141 }
3126 } 3142 }
3127 3143
3128 3144
3129 void Heap::Unprotect() { 3145 void Heap::Unprotect() {
3130 if (HasBeenSetup()) { 3146 if (HasBeenSetup()) {
3131 new_space_.Unprotect(); 3147 AllSpaces spaces;
3132 map_space_->Unprotect(); 3148 while (Space* space = spaces.next()) space->Unprotect();
3133 old_pointer_space_->Unprotect();
3134 old_data_space_->Unprotect();
3135 code_space_->Unprotect();
3136 lo_space_->Unprotect();
3137 } 3149 }
3138 } 3150 }
3139 3151
3140 #endif 3152 #endif
3141 3153
3142 3154
3143 #ifdef DEBUG 3155 #ifdef DEBUG
3144 3156
3145 class PrintHandleVisitor: public ObjectVisitor { 3157 class PrintHandleVisitor: public ObjectVisitor {
3146 public: 3158 public:
(...skipping 17 matching lines...) Expand all
3164 case NEW_SPACE: 3176 case NEW_SPACE:
3165 return Heap::new_space(); 3177 return Heap::new_space();
3166 case OLD_POINTER_SPACE: 3178 case OLD_POINTER_SPACE:
3167 return Heap::old_pointer_space(); 3179 return Heap::old_pointer_space();
3168 case OLD_DATA_SPACE: 3180 case OLD_DATA_SPACE:
3169 return Heap::old_data_space(); 3181 return Heap::old_data_space();
3170 case CODE_SPACE: 3182 case CODE_SPACE:
3171 return Heap::code_space(); 3183 return Heap::code_space();
3172 case MAP_SPACE: 3184 case MAP_SPACE:
3173 return Heap::map_space(); 3185 return Heap::map_space();
3186 case CELL_SPACE:
3187 return Heap::cell_space();
3174 case LO_SPACE: 3188 case LO_SPACE:
3175 return Heap::lo_space(); 3189 return Heap::lo_space();
3176 default: 3190 default:
3177 return NULL; 3191 return NULL;
3178 } 3192 }
3179 } 3193 }
3180 3194
3181 3195
3182 PagedSpace* PagedSpaces::next() { 3196 PagedSpace* PagedSpaces::next() {
3183 switch (counter_++) { 3197 switch (counter_++) {
3184 case OLD_POINTER_SPACE: 3198 case OLD_POINTER_SPACE:
3185 return Heap::old_pointer_space(); 3199 return Heap::old_pointer_space();
3186 case OLD_DATA_SPACE: 3200 case OLD_DATA_SPACE:
3187 return Heap::old_data_space(); 3201 return Heap::old_data_space();
3188 case CODE_SPACE: 3202 case CODE_SPACE:
3189 return Heap::code_space(); 3203 return Heap::code_space();
3190 case MAP_SPACE: 3204 case MAP_SPACE:
3191 return Heap::map_space(); 3205 return Heap::map_space();
3206 case CELL_SPACE:
3207 return Heap::cell_space();
3192 default: 3208 default:
3193 return NULL; 3209 return NULL;
3194 } 3210 }
3195 } 3211 }
3196 3212
3197 3213
3198 3214
3199 OldSpace* OldSpaces::next() { 3215 OldSpace* OldSpaces::next() {
3200 switch (counter_++) { 3216 switch (counter_++) {
3201 case OLD_POINTER_SPACE: 3217 case OLD_POINTER_SPACE:
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
3255 break; 3271 break;
3256 case OLD_DATA_SPACE: 3272 case OLD_DATA_SPACE:
3257 iterator_ = new HeapObjectIterator(Heap::old_data_space()); 3273 iterator_ = new HeapObjectIterator(Heap::old_data_space());
3258 break; 3274 break;
3259 case CODE_SPACE: 3275 case CODE_SPACE:
3260 iterator_ = new HeapObjectIterator(Heap::code_space()); 3276 iterator_ = new HeapObjectIterator(Heap::code_space());
3261 break; 3277 break;
3262 case MAP_SPACE: 3278 case MAP_SPACE:
3263 iterator_ = new HeapObjectIterator(Heap::map_space()); 3279 iterator_ = new HeapObjectIterator(Heap::map_space());
3264 break; 3280 break;
3281 case CELL_SPACE:
3282 iterator_ = new HeapObjectIterator(Heap::cell_space());
3283 break;
3265 case LO_SPACE: 3284 case LO_SPACE:
3266 iterator_ = new LargeObjectIterator(Heap::lo_space()); 3285 iterator_ = new LargeObjectIterator(Heap::lo_space());
3267 break; 3286 break;
3268 } 3287 }
3269 3288
3270 // Return the newly allocated iterator; 3289 // Return the newly allocated iterator;
3271 ASSERT(iterator_ != NULL); 3290 ASSERT(iterator_ != NULL);
3272 return iterator_; 3291 return iterator_;
3273 } 3292 }
3274 3293
(...skipping 382 matching lines...) Expand 10 before | Expand all | Expand 10 after
3657 #ifdef DEBUG 3676 #ifdef DEBUG
3658 bool Heap::GarbageCollectionGreedyCheck() { 3677 bool Heap::GarbageCollectionGreedyCheck() {
3659 ASSERT(FLAG_gc_greedy); 3678 ASSERT(FLAG_gc_greedy);
3660 if (Bootstrapper::IsActive()) return true; 3679 if (Bootstrapper::IsActive()) return true;
3661 if (disallow_allocation_failure()) return true; 3680 if (disallow_allocation_failure()) return true;
3662 return CollectGarbage(0, NEW_SPACE); 3681 return CollectGarbage(0, NEW_SPACE);
3663 } 3682 }
3664 #endif 3683 #endif
3665 3684
3666 } } // namespace v8::internal 3685 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698