| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1042 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1053 guard_ = false; | 1053 guard_ = false; |
| 1054 } | 1054 } |
| 1055 | 1055 |
| 1056 | 1056 |
| 1057 void PromotionQueue::RelocateQueueHead() { | 1057 void PromotionQueue::RelocateQueueHead() { |
| 1058 ASSERT(emergency_stack_ == NULL); | 1058 ASSERT(emergency_stack_ == NULL); |
| 1059 | 1059 |
| 1060 Page* p = Page::FromAllocationTop(reinterpret_cast<Address>(rear_)); | 1060 Page* p = Page::FromAllocationTop(reinterpret_cast<Address>(rear_)); |
| 1061 intptr_t* head_start = rear_; | 1061 intptr_t* head_start = rear_; |
| 1062 intptr_t* head_end = | 1062 intptr_t* head_end = |
| 1063 Min(front_, reinterpret_cast<intptr_t*>(p->body_limit())); | 1063 Min(front_, reinterpret_cast<intptr_t*>(p->area_end())); |
| 1064 | 1064 |
| 1065 int entries_count = | 1065 int entries_count = |
| 1066 static_cast<int>(head_end - head_start) / kEntrySizeInWords; | 1066 static_cast<int>(head_end - head_start) / kEntrySizeInWords; |
| 1067 | 1067 |
| 1068 emergency_stack_ = new List<Entry>(2 * entries_count); | 1068 emergency_stack_ = new List<Entry>(2 * entries_count); |
| 1069 | 1069 |
| 1070 while (head_start != head_end) { | 1070 while (head_start != head_end) { |
| 1071 int size = static_cast<int>(*(head_start++)); | 1071 int size = static_cast<int>(*(head_start++)); |
| 1072 HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++)); | 1072 HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++)); |
| 1073 emergency_stack_->Add(Entry(obj, size)); | 1073 emergency_stack_->Add(Entry(obj, size)); |
| (...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1371 // The addresses new_space_front and new_space_.top() define a | 1371 // The addresses new_space_front and new_space_.top() define a |
| 1372 // queue of unprocessed copied objects. Process them until the | 1372 // queue of unprocessed copied objects. Process them until the |
| 1373 // queue is empty. | 1373 // queue is empty. |
| 1374 while (new_space_front != new_space_.top()) { | 1374 while (new_space_front != new_space_.top()) { |
| 1375 if (!NewSpacePage::IsAtEnd(new_space_front)) { | 1375 if (!NewSpacePage::IsAtEnd(new_space_front)) { |
| 1376 HeapObject* object = HeapObject::FromAddress(new_space_front); | 1376 HeapObject* object = HeapObject::FromAddress(new_space_front); |
| 1377 new_space_front += | 1377 new_space_front += |
| 1378 NewSpaceScavenger::IterateBody(object->map(), object); | 1378 NewSpaceScavenger::IterateBody(object->map(), object); |
| 1379 } else { | 1379 } else { |
| 1380 new_space_front = | 1380 new_space_front = |
| 1381 NewSpacePage::FromLimit(new_space_front)->next_page()->body(); | 1381 NewSpacePage::FromLimit(new_space_front)->next_page()->area_start(); |
| 1382 } | 1382 } |
| 1383 } | 1383 } |
| 1384 | 1384 |
| 1385 // Promote and process all the to-be-promoted objects. | 1385 // Promote and process all the to-be-promoted objects. |
| 1386 { | 1386 { |
| 1387 StoreBufferRebuildScope scope(this, | 1387 StoreBufferRebuildScope scope(this, |
| 1388 store_buffer(), | 1388 store_buffer(), |
| 1389 &ScavengeStoreBufferCallback); | 1389 &ScavengeStoreBufferCallback); |
| 1390 while (!promotion_queue()->is_empty()) { | 1390 while (!promotion_queue()->is_empty()) { |
| 1391 HeapObject* target; | 1391 HeapObject* target; |
| (...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1533 } | 1533 } |
| 1534 } | 1534 } |
| 1535 } | 1535 } |
| 1536 | 1536 |
| 1537 template<ObjectContents object_contents, SizeRestriction size_restriction> | 1537 template<ObjectContents object_contents, SizeRestriction size_restriction> |
| 1538 static inline void EvacuateObject(Map* map, | 1538 static inline void EvacuateObject(Map* map, |
| 1539 HeapObject** slot, | 1539 HeapObject** slot, |
| 1540 HeapObject* object, | 1540 HeapObject* object, |
| 1541 int object_size) { | 1541 int object_size) { |
| 1542 SLOW_ASSERT((size_restriction != SMALL) || | 1542 SLOW_ASSERT((size_restriction != SMALL) || |
| 1543 (object_size <= Page::kMaxHeapObjectSize)); | 1543 (object_size <= Page::kMaxNonCodeHeapObjectSize)); |
| 1544 SLOW_ASSERT(object->Size() == object_size); | 1544 SLOW_ASSERT(object->Size() == object_size); |
| 1545 | 1545 |
| 1546 Heap* heap = map->GetHeap(); | 1546 Heap* heap = map->GetHeap(); |
| 1547 if (heap->ShouldBePromoted(object->address(), object_size)) { | 1547 if (heap->ShouldBePromoted(object->address(), object_size)) { |
| 1548 MaybeObject* maybe_result; | 1548 MaybeObject* maybe_result; |
| 1549 | 1549 |
| 1550 if ((size_restriction != SMALL) && | 1550 if ((size_restriction != SMALL) && |
| 1551 (object_size > Page::kMaxHeapObjectSize)) { | 1551 (object_size > Page::kMaxNonCodeHeapObjectSize)) { |
| 1552 maybe_result = heap->lo_space()->AllocateRaw(object_size, | 1552 maybe_result = heap->lo_space()->AllocateRaw(object_size, |
| 1553 NOT_EXECUTABLE); | 1553 NOT_EXECUTABLE); |
| 1554 } else { | 1554 } else { |
| 1555 if (object_contents == DATA_OBJECT) { | 1555 if (object_contents == DATA_OBJECT) { |
| 1556 maybe_result = heap->old_data_space()->AllocateRaw(object_size); | 1556 maybe_result = heap->old_data_space()->AllocateRaw(object_size); |
| 1557 } else { | 1557 } else { |
| 1558 maybe_result = heap->old_pointer_space()->AllocateRaw(object_size); | 1558 maybe_result = heap->old_pointer_space()->AllocateRaw(object_size); |
| 1559 } | 1559 } |
| 1560 } | 1560 } |
| 1561 | 1561 |
| (...skipping 606 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2168 set_message_object_map(Map::cast(obj)); | 2168 set_message_object_map(Map::cast(obj)); |
| 2169 | 2169 |
| 2170 ASSERT(!InNewSpace(empty_fixed_array())); | 2170 ASSERT(!InNewSpace(empty_fixed_array())); |
| 2171 return true; | 2171 return true; |
| 2172 } | 2172 } |
| 2173 | 2173 |
| 2174 | 2174 |
| 2175 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { | 2175 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { |
| 2176 // Statically ensure that it is safe to allocate heap numbers in paged | 2176 // Statically ensure that it is safe to allocate heap numbers in paged |
| 2177 // spaces. | 2177 // spaces. |
| 2178 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize); | 2178 STATIC_ASSERT(HeapNumber::kSize <= Page::kNonCodeObjectAreaSize); |
| 2179 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 2179 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
| 2180 | 2180 |
| 2181 Object* result; | 2181 Object* result; |
| 2182 { MaybeObject* maybe_result = | 2182 { MaybeObject* maybe_result = |
| 2183 AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE); | 2183 AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE); |
| 2184 if (!maybe_result->ToObject(&result)) return maybe_result; | 2184 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2185 } | 2185 } |
| 2186 | 2186 |
| 2187 HeapObject::cast(result)->set_map_unsafe(heap_number_map()); | 2187 HeapObject::cast(result)->set_map_unsafe(heap_number_map()); |
| 2188 HeapNumber::cast(result)->set_value(value); | 2188 HeapNumber::cast(result)->set_value(value); |
| 2189 return result; | 2189 return result; |
| 2190 } | 2190 } |
| 2191 | 2191 |
| 2192 | 2192 |
| 2193 MaybeObject* Heap::AllocateHeapNumber(double value) { | 2193 MaybeObject* Heap::AllocateHeapNumber(double value) { |
| 2194 // Use general version, if we're forced to always allocate. | 2194 // Use general version, if we're forced to always allocate. |
| 2195 if (always_allocate()) return AllocateHeapNumber(value, TENURED); | 2195 if (always_allocate()) return AllocateHeapNumber(value, TENURED); |
| 2196 | 2196 |
| 2197 // This version of AllocateHeapNumber is optimized for | 2197 // This version of AllocateHeapNumber is optimized for |
| 2198 // allocation in new space. | 2198 // allocation in new space. |
| 2199 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize); | 2199 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxNonCodeHeapObjectSize); |
| 2200 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); | 2200 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); |
| 2201 Object* result; | 2201 Object* result; |
| 2202 { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize); | 2202 { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize); |
| 2203 if (!maybe_result->ToObject(&result)) return maybe_result; | 2203 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2204 } | 2204 } |
| 2205 HeapObject::cast(result)->set_map_unsafe(heap_number_map()); | 2205 HeapObject::cast(result)->set_map_unsafe(heap_number_map()); |
| 2206 HeapNumber::cast(result)->set_value(value); | 2206 HeapNumber::cast(result)->set_value(value); |
| 2207 return result; | 2207 return result; |
| 2208 } | 2208 } |
| 2209 | 2209 |
| (...skipping 511 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2721 return Smi::FromInt(int_value); | 2721 return Smi::FromInt(int_value); |
| 2722 } | 2722 } |
| 2723 | 2723 |
| 2724 // Materialize the value in the heap. | 2724 // Materialize the value in the heap. |
| 2725 return AllocateHeapNumber(value, pretenure); | 2725 return AllocateHeapNumber(value, pretenure); |
| 2726 } | 2726 } |
| 2727 | 2727 |
| 2728 | 2728 |
| 2729 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) { | 2729 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) { |
| 2730 // Statically ensure that it is safe to allocate foreigns in paged spaces. | 2730 // Statically ensure that it is safe to allocate foreigns in paged spaces. |
| 2731 STATIC_ASSERT(Foreign::kSize <= Page::kMaxHeapObjectSize); | 2731 STATIC_ASSERT(Foreign::kSize <= Page::kMaxNonCodeHeapObjectSize); |
| 2732 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 2732 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
| 2733 Foreign* result; | 2733 Foreign* result; |
| 2734 MaybeObject* maybe_result = Allocate(foreign_map(), space); | 2734 MaybeObject* maybe_result = Allocate(foreign_map(), space); |
| 2735 if (!maybe_result->To(&result)) return maybe_result; | 2735 if (!maybe_result->To(&result)) return maybe_result; |
| 2736 result->set_foreign_address(address); | 2736 result->set_foreign_address(address); |
| 2737 return result; | 2737 return result; |
| 2738 } | 2738 } |
| 2739 | 2739 |
| 2740 | 2740 |
| 2741 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) { | 2741 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) { |
| (...skipping 395 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3137 | 3137 |
| 3138 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { | 3138 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { |
| 3139 if (length < 0 || length > ByteArray::kMaxLength) { | 3139 if (length < 0 || length > ByteArray::kMaxLength) { |
| 3140 return Failure::OutOfMemoryException(); | 3140 return Failure::OutOfMemoryException(); |
| 3141 } | 3141 } |
| 3142 if (pretenure == NOT_TENURED) { | 3142 if (pretenure == NOT_TENURED) { |
| 3143 return AllocateByteArray(length); | 3143 return AllocateByteArray(length); |
| 3144 } | 3144 } |
| 3145 int size = ByteArray::SizeFor(length); | 3145 int size = ByteArray::SizeFor(length); |
| 3146 Object* result; | 3146 Object* result; |
| 3147 { MaybeObject* maybe_result = (size <= MaxObjectSizeInPagedSpace()) | 3147 { MaybeObject* maybe_result = (size <= Page::kMaxNonCodeHeapObjectSize) |
| 3148 ? old_data_space_->AllocateRaw(size) | 3148 ? old_data_space_->AllocateRaw(size) |
| 3149 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); | 3149 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); |
| 3150 if (!maybe_result->ToObject(&result)) return maybe_result; | 3150 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 3151 } | 3151 } |
| 3152 | 3152 |
| 3153 reinterpret_cast<ByteArray*>(result)->set_map_unsafe(byte_array_map()); | 3153 reinterpret_cast<ByteArray*>(result)->set_map_unsafe(byte_array_map()); |
| 3154 reinterpret_cast<ByteArray*>(result)->set_length(length); | 3154 reinterpret_cast<ByteArray*>(result)->set_length(length); |
| 3155 return result; | 3155 return result; |
| 3156 } | 3156 } |
| 3157 | 3157 |
| 3158 | 3158 |
| 3159 MaybeObject* Heap::AllocateByteArray(int length) { | 3159 MaybeObject* Heap::AllocateByteArray(int length) { |
| 3160 if (length < 0 || length > ByteArray::kMaxLength) { | 3160 if (length < 0 || length > ByteArray::kMaxLength) { |
| 3161 return Failure::OutOfMemoryException(); | 3161 return Failure::OutOfMemoryException(); |
| 3162 } | 3162 } |
| 3163 int size = ByteArray::SizeFor(length); | 3163 int size = ByteArray::SizeFor(length); |
| 3164 AllocationSpace space = | 3164 AllocationSpace space = |
| 3165 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : NEW_SPACE; | 3165 (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : NEW_SPACE; |
| 3166 Object* result; | 3166 Object* result; |
| 3167 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); | 3167 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 3168 if (!maybe_result->ToObject(&result)) return maybe_result; | 3168 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 3169 } | 3169 } |
| 3170 | 3170 |
| 3171 reinterpret_cast<ByteArray*>(result)->set_map_unsafe(byte_array_map()); | 3171 reinterpret_cast<ByteArray*>(result)->set_map_unsafe(byte_array_map()); |
| 3172 reinterpret_cast<ByteArray*>(result)->set_length(length); | 3172 reinterpret_cast<ByteArray*>(result)->set_length(length); |
| 3173 return result; | 3173 return result; |
| 3174 } | 3174 } |
| 3175 | 3175 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3220 MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED); | 3220 MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED); |
| 3221 if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info; | 3221 if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info; |
| 3222 | 3222 |
| 3223 // Compute size. | 3223 // Compute size. |
| 3224 int body_size = RoundUp(desc.instr_size, kObjectAlignment); | 3224 int body_size = RoundUp(desc.instr_size, kObjectAlignment); |
| 3225 int obj_size = Code::SizeFor(body_size); | 3225 int obj_size = Code::SizeFor(body_size); |
| 3226 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); | 3226 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); |
| 3227 MaybeObject* maybe_result; | 3227 MaybeObject* maybe_result; |
| 3228 // Large code objects and code objects which should stay at a fixed address | 3228 // Large code objects and code objects which should stay at a fixed address |
| 3229 // are allocated in large object space. | 3229 // are allocated in large object space. |
| 3230 if (obj_size > MaxObjectSizeInPagedSpace() || immovable) { | 3230 if (obj_size > code_space()->AreaSize() || immovable) { |
| 3231 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); | 3231 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); |
| 3232 } else { | 3232 } else { |
| 3233 maybe_result = code_space_->AllocateRaw(obj_size); | 3233 maybe_result = code_space_->AllocateRaw(obj_size); |
| 3234 } | 3234 } |
| 3235 | 3235 |
| 3236 Object* result; | 3236 Object* result; |
| 3237 if (!maybe_result->ToObject(&result)) return maybe_result; | 3237 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 3238 | 3238 |
| 3239 // Initialize the object | 3239 // Initialize the object |
| 3240 HeapObject::cast(result)->set_map_unsafe(code_map()); | 3240 HeapObject::cast(result)->set_map_unsafe(code_map()); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 3268 } | 3268 } |
| 3269 #endif | 3269 #endif |
| 3270 return code; | 3270 return code; |
| 3271 } | 3271 } |
| 3272 | 3272 |
| 3273 | 3273 |
| 3274 MaybeObject* Heap::CopyCode(Code* code) { | 3274 MaybeObject* Heap::CopyCode(Code* code) { |
| 3275 // Allocate an object the same size as the code object. | 3275 // Allocate an object the same size as the code object. |
| 3276 int obj_size = code->Size(); | 3276 int obj_size = code->Size(); |
| 3277 MaybeObject* maybe_result; | 3277 MaybeObject* maybe_result; |
| 3278 if (obj_size > MaxObjectSizeInPagedSpace()) { | 3278 if (obj_size > code_space()->AreaSize()) { |
| 3279 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); | 3279 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); |
| 3280 } else { | 3280 } else { |
| 3281 maybe_result = code_space_->AllocateRaw(obj_size); | 3281 maybe_result = code_space_->AllocateRaw(obj_size); |
| 3282 } | 3282 } |
| 3283 | 3283 |
| 3284 Object* result; | 3284 Object* result; |
| 3285 if (!maybe_result->ToObject(&result)) return maybe_result; | 3285 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 3286 | 3286 |
| 3287 // Copy code object. | 3287 // Copy code object. |
| 3288 Address old_addr = code->address(); | 3288 Address old_addr = code->address(); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 3311 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); | 3311 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); |
| 3312 | 3312 |
| 3313 int new_obj_size = Code::SizeFor(new_body_size); | 3313 int new_obj_size = Code::SizeFor(new_body_size); |
| 3314 | 3314 |
| 3315 Address old_addr = code->address(); | 3315 Address old_addr = code->address(); |
| 3316 | 3316 |
| 3317 size_t relocation_offset = | 3317 size_t relocation_offset = |
| 3318 static_cast<size_t>(code->instruction_end() - old_addr); | 3318 static_cast<size_t>(code->instruction_end() - old_addr); |
| 3319 | 3319 |
| 3320 MaybeObject* maybe_result; | 3320 MaybeObject* maybe_result; |
| 3321 if (new_obj_size > MaxObjectSizeInPagedSpace()) { | 3321 if (new_obj_size > code_space()->AreaSize()) { |
| 3322 maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); | 3322 maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); |
| 3323 } else { | 3323 } else { |
| 3324 maybe_result = code_space_->AllocateRaw(new_obj_size); | 3324 maybe_result = code_space_->AllocateRaw(new_obj_size); |
| 3325 } | 3325 } |
| 3326 | 3326 |
| 3327 Object* result; | 3327 Object* result; |
| 3328 if (!maybe_result->ToObject(&result)) return maybe_result; | 3328 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 3329 | 3329 |
| 3330 // Copy code object. | 3330 // Copy code object. |
| 3331 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); | 3331 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); |
| (...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3632 map->inobject_properties(); | 3632 map->inobject_properties(); |
| 3633 ASSERT(prop_size >= 0); | 3633 ASSERT(prop_size >= 0); |
| 3634 Object* properties; | 3634 Object* properties; |
| 3635 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); | 3635 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); |
| 3636 if (!maybe_properties->ToObject(&properties)) return maybe_properties; | 3636 if (!maybe_properties->ToObject(&properties)) return maybe_properties; |
| 3637 } | 3637 } |
| 3638 | 3638 |
| 3639 // Allocate the JSObject. | 3639 // Allocate the JSObject. |
| 3640 AllocationSpace space = | 3640 AllocationSpace space = |
| 3641 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; | 3641 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; |
| 3642 if (map->instance_size() > MaxObjectSizeInPagedSpace()) space = LO_SPACE; | 3642 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE; |
| 3643 Object* obj; | 3643 Object* obj; |
| 3644 { MaybeObject* maybe_obj = Allocate(map, space); | 3644 { MaybeObject* maybe_obj = Allocate(map, space); |
| 3645 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 3645 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 3646 } | 3646 } |
| 3647 | 3647 |
| 3648 // Initialize the JSObject. | 3648 // Initialize the JSObject. |
| 3649 InitializeJSObjectFromMap(JSObject::cast(obj), | 3649 InitializeJSObjectFromMap(JSObject::cast(obj), |
| 3650 FixedArray::cast(properties), | 3650 FixedArray::cast(properties), |
| 3651 map); | 3651 map); |
| 3652 ASSERT(JSObject::cast(obj)->HasFastSmiOnlyElements() || | 3652 ASSERT(JSObject::cast(obj)->HasFastSmiOnlyElements() || |
| (...skipping 429 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4082 } else { | 4082 } else { |
| 4083 if (chars > SeqTwoByteString::kMaxLength) { | 4083 if (chars > SeqTwoByteString::kMaxLength) { |
| 4084 return Failure::OutOfMemoryException(); | 4084 return Failure::OutOfMemoryException(); |
| 4085 } | 4085 } |
| 4086 map = symbol_map(); | 4086 map = symbol_map(); |
| 4087 size = SeqTwoByteString::SizeFor(chars); | 4087 size = SeqTwoByteString::SizeFor(chars); |
| 4088 } | 4088 } |
| 4089 | 4089 |
| 4090 // Allocate string. | 4090 // Allocate string. |
| 4091 Object* result; | 4091 Object* result; |
| 4092 { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace()) | 4092 { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize) |
| 4093 ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE) | 4093 ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE) |
| 4094 : old_data_space_->AllocateRaw(size); | 4094 : old_data_space_->AllocateRaw(size); |
| 4095 if (!maybe_result->ToObject(&result)) return maybe_result; | 4095 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4096 } | 4096 } |
| 4097 | 4097 |
| 4098 reinterpret_cast<HeapObject*>(result)->set_map_unsafe(map); | 4098 reinterpret_cast<HeapObject*>(result)->set_map_unsafe(map); |
| 4099 // Set length and hash fields of the allocated string. | 4099 // Set length and hash fields of the allocated string. |
| 4100 String* answer = String::cast(result); | 4100 String* answer = String::cast(result); |
| 4101 answer->set_length(chars); | 4101 answer->set_length(chars); |
| 4102 answer->set_hash_field(hash_field); | 4102 answer->set_hash_field(hash_field); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 4119 int size = SeqAsciiString::SizeFor(length); | 4119 int size = SeqAsciiString::SizeFor(length); |
| 4120 ASSERT(size <= SeqAsciiString::kMaxSize); | 4120 ASSERT(size <= SeqAsciiString::kMaxSize); |
| 4121 | 4121 |
| 4122 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 4122 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
| 4123 AllocationSpace retry_space = OLD_DATA_SPACE; | 4123 AllocationSpace retry_space = OLD_DATA_SPACE; |
| 4124 | 4124 |
| 4125 if (space == NEW_SPACE) { | 4125 if (space == NEW_SPACE) { |
| 4126 if (size > kMaxObjectSizeInNewSpace) { | 4126 if (size > kMaxObjectSizeInNewSpace) { |
| 4127 // Allocate in large object space, retry space will be ignored. | 4127 // Allocate in large object space, retry space will be ignored. |
| 4128 space = LO_SPACE; | 4128 space = LO_SPACE; |
| 4129 } else if (size > MaxObjectSizeInPagedSpace()) { | 4129 } else if (size > Page::kMaxNonCodeHeapObjectSize) { |
| 4130 // Allocate in new space, retry in large object space. | 4130 // Allocate in new space, retry in large object space. |
| 4131 retry_space = LO_SPACE; | 4131 retry_space = LO_SPACE; |
| 4132 } | 4132 } |
| 4133 } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) { | 4133 } else if (space == OLD_DATA_SPACE && |
| 4134 size > Page::kMaxNonCodeHeapObjectSize) { |
| 4134 space = LO_SPACE; | 4135 space = LO_SPACE; |
| 4135 } | 4136 } |
| 4136 Object* result; | 4137 Object* result; |
| 4137 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 4138 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
| 4138 if (!maybe_result->ToObject(&result)) return maybe_result; | 4139 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4139 } | 4140 } |
| 4140 | 4141 |
| 4141 // Partially initialize the object. | 4142 // Partially initialize the object. |
| 4142 HeapObject::cast(result)->set_map_unsafe(ascii_string_map()); | 4143 HeapObject::cast(result)->set_map_unsafe(ascii_string_map()); |
| 4143 String::cast(result)->set_length(length); | 4144 String::cast(result)->set_length(length); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 4154 } | 4155 } |
| 4155 int size = SeqTwoByteString::SizeFor(length); | 4156 int size = SeqTwoByteString::SizeFor(length); |
| 4156 ASSERT(size <= SeqTwoByteString::kMaxSize); | 4157 ASSERT(size <= SeqTwoByteString::kMaxSize); |
| 4157 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 4158 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
| 4158 AllocationSpace retry_space = OLD_DATA_SPACE; | 4159 AllocationSpace retry_space = OLD_DATA_SPACE; |
| 4159 | 4160 |
| 4160 if (space == NEW_SPACE) { | 4161 if (space == NEW_SPACE) { |
| 4161 if (size > kMaxObjectSizeInNewSpace) { | 4162 if (size > kMaxObjectSizeInNewSpace) { |
| 4162 // Allocate in large object space, retry space will be ignored. | 4163 // Allocate in large object space, retry space will be ignored. |
| 4163 space = LO_SPACE; | 4164 space = LO_SPACE; |
| 4164 } else if (size > MaxObjectSizeInPagedSpace()) { | 4165 } else if (size > Page::kMaxNonCodeHeapObjectSize) { |
| 4165 // Allocate in new space, retry in large object space. | 4166 // Allocate in new space, retry in large object space. |
| 4166 retry_space = LO_SPACE; | 4167 retry_space = LO_SPACE; |
| 4167 } | 4168 } |
| 4168 } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) { | 4169 } else if (space == OLD_DATA_SPACE && |
| 4170 size > Page::kMaxNonCodeHeapObjectSize) { |
| 4169 space = LO_SPACE; | 4171 space = LO_SPACE; |
| 4170 } | 4172 } |
| 4171 Object* result; | 4173 Object* result; |
| 4172 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 4174 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
| 4173 if (!maybe_result->ToObject(&result)) return maybe_result; | 4175 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4174 } | 4176 } |
| 4175 | 4177 |
| 4176 // Partially initialize the object. | 4178 // Partially initialize the object. |
| 4177 HeapObject::cast(result)->set_map_unsafe(string_map()); | 4179 HeapObject::cast(result)->set_map_unsafe(string_map()); |
| 4178 String::cast(result)->set_length(length); | 4180 String::cast(result)->set_length(length); |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4277 return Failure::OutOfMemoryException(); | 4279 return Failure::OutOfMemoryException(); |
| 4278 } | 4280 } |
| 4279 | 4281 |
| 4280 AllocationSpace space = | 4282 AllocationSpace space = |
| 4281 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; | 4283 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; |
| 4282 int size = FixedArray::SizeFor(length); | 4284 int size = FixedArray::SizeFor(length); |
| 4283 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { | 4285 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { |
| 4284 // Too big for new space. | 4286 // Too big for new space. |
| 4285 space = LO_SPACE; | 4287 space = LO_SPACE; |
| 4286 } else if (space == OLD_POINTER_SPACE && | 4288 } else if (space == OLD_POINTER_SPACE && |
| 4287 size > MaxObjectSizeInPagedSpace()) { | 4289 size > Page::kMaxNonCodeHeapObjectSize) { |
| 4288 // Too big for old pointer space. | 4290 // Too big for old pointer space. |
| 4289 space = LO_SPACE; | 4291 space = LO_SPACE; |
| 4290 } | 4292 } |
| 4291 | 4293 |
| 4292 AllocationSpace retry_space = | 4294 AllocationSpace retry_space = |
| 4293 (size <= MaxObjectSizeInPagedSpace()) ? OLD_POINTER_SPACE : LO_SPACE; | 4295 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_POINTER_SPACE : LO_SPACE; |
| 4294 | 4296 |
| 4295 return AllocateRaw(size, space, retry_space); | 4297 return AllocateRaw(size, space, retry_space); |
| 4296 } | 4298 } |
| 4297 | 4299 |
| 4298 | 4300 |
| 4299 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( | 4301 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( |
| 4300 Heap* heap, | 4302 Heap* heap, |
| 4301 int length, | 4303 int length, |
| 4302 PretenureFlag pretenure, | 4304 PretenureFlag pretenure, |
| 4303 Object* filler) { | 4305 Object* filler) { |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4388 return Failure::OutOfMemoryException(); | 4390 return Failure::OutOfMemoryException(); |
| 4389 } | 4391 } |
| 4390 | 4392 |
| 4391 AllocationSpace space = | 4393 AllocationSpace space = |
| 4392 (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 4394 (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
| 4393 int size = FixedDoubleArray::SizeFor(length); | 4395 int size = FixedDoubleArray::SizeFor(length); |
| 4394 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { | 4396 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { |
| 4395 // Too big for new space. | 4397 // Too big for new space. |
| 4396 space = LO_SPACE; | 4398 space = LO_SPACE; |
| 4397 } else if (space == OLD_DATA_SPACE && | 4399 } else if (space == OLD_DATA_SPACE && |
| 4398 size > MaxObjectSizeInPagedSpace()) { | 4400 size > Page::kMaxNonCodeHeapObjectSize) { |
| 4399 // Too big for old data space. | 4401 // Too big for old data space. |
| 4400 space = LO_SPACE; | 4402 space = LO_SPACE; |
| 4401 } | 4403 } |
| 4402 | 4404 |
| 4403 AllocationSpace retry_space = | 4405 AllocationSpace retry_space = |
| 4404 (size <= MaxObjectSizeInPagedSpace()) ? OLD_DATA_SPACE : LO_SPACE; | 4406 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE; |
| 4405 | 4407 |
| 4406 return AllocateRaw(size, space, retry_space); | 4408 return AllocateRaw(size, space, retry_space); |
| 4407 } | 4409 } |
| 4408 | 4410 |
| 4409 | 4411 |
| 4410 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { | 4412 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { |
| 4411 Object* result; | 4413 Object* result; |
| 4412 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); | 4414 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); |
| 4413 if (!maybe_result->ToObject(&result)) return maybe_result; | 4415 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4414 } | 4416 } |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4519 #define MAKE_CASE(NAME, Name, name) \ | 4521 #define MAKE_CASE(NAME, Name, name) \ |
| 4520 case NAME##_TYPE: map = name##_map(); break; | 4522 case NAME##_TYPE: map = name##_map(); break; |
| 4521 STRUCT_LIST(MAKE_CASE) | 4523 STRUCT_LIST(MAKE_CASE) |
| 4522 #undef MAKE_CASE | 4524 #undef MAKE_CASE |
| 4523 default: | 4525 default: |
| 4524 UNREACHABLE(); | 4526 UNREACHABLE(); |
| 4525 return Failure::InternalError(); | 4527 return Failure::InternalError(); |
| 4526 } | 4528 } |
| 4527 int size = map->instance_size(); | 4529 int size = map->instance_size(); |
| 4528 AllocationSpace space = | 4530 AllocationSpace space = |
| 4529 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE; | 4531 (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_POINTER_SPACE; |
| 4530 Object* result; | 4532 Object* result; |
| 4531 { MaybeObject* maybe_result = Allocate(map, space); | 4533 { MaybeObject* maybe_result = Allocate(map, space); |
| 4532 if (!maybe_result->ToObject(&result)) return maybe_result; | 4534 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4533 } | 4535 } |
| 4534 Struct::cast(result)->InitializeBody(size); | 4536 Struct::cast(result)->InitializeBody(size); |
| 4535 return result; | 4537 return result; |
| 4536 } | 4538 } |
| 4537 | 4539 |
| 4538 | 4540 |
| 4539 bool Heap::IsHeapIterable() { | 4541 bool Heap::IsHeapIterable() { |
| (...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4860 return symbol_table()->LookupSymbolIfExists(string, symbol); | 4862 return symbol_table()->LookupSymbolIfExists(string, symbol); |
| 4861 } | 4863 } |
| 4862 | 4864 |
| 4863 | 4865 |
| 4864 #ifdef DEBUG | 4866 #ifdef DEBUG |
| 4865 void Heap::ZapFromSpace() { | 4867 void Heap::ZapFromSpace() { |
| 4866 NewSpacePageIterator it(new_space_.FromSpaceStart(), | 4868 NewSpacePageIterator it(new_space_.FromSpaceStart(), |
| 4867 new_space_.FromSpaceEnd()); | 4869 new_space_.FromSpaceEnd()); |
| 4868 while (it.has_next()) { | 4870 while (it.has_next()) { |
| 4869 NewSpacePage* page = it.next(); | 4871 NewSpacePage* page = it.next(); |
| 4870 for (Address cursor = page->body(), limit = page->body_limit(); | 4872 for (Address cursor = page->area_start(), limit = page->area_end(); |
| 4871 cursor < limit; | 4873 cursor < limit; |
| 4872 cursor += kPointerSize) { | 4874 cursor += kPointerSize) { |
| 4873 Memory::Address_at(cursor) = kFromSpaceZapValue; | 4875 Memory::Address_at(cursor) = kFromSpaceZapValue; |
| 4874 } | 4876 } |
| 4875 } | 4877 } |
| 4876 } | 4878 } |
| 4877 #endif // DEBUG | 4879 #endif // DEBUG |
| 4878 | 4880 |
| 4879 | 4881 |
| 4880 void Heap::IterateAndMarkPointersToFromSpace(Address start, | 4882 void Heap::IterateAndMarkPointersToFromSpace(Address start, |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4999 // scanning a page and ensuring that all pointers to young space are in the | 5001 // scanning a page and ensuring that all pointers to young space are in the |
| 5000 // store buffer. | 5002 // store buffer. |
| 5001 void Heap::OldPointerSpaceCheckStoreBuffer() { | 5003 void Heap::OldPointerSpaceCheckStoreBuffer() { |
| 5002 OldSpace* space = old_pointer_space(); | 5004 OldSpace* space = old_pointer_space(); |
| 5003 PageIterator pages(space); | 5005 PageIterator pages(space); |
| 5004 | 5006 |
| 5005 store_buffer()->SortUniq(); | 5007 store_buffer()->SortUniq(); |
| 5006 | 5008 |
| 5007 while (pages.has_next()) { | 5009 while (pages.has_next()) { |
| 5008 Page* page = pages.next(); | 5010 Page* page = pages.next(); |
| 5009 Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart()); | 5011 Object** current = reinterpret_cast<Object**>(page->area_start()); |
| 5010 | 5012 |
| 5011 Address end = page->ObjectAreaEnd(); | 5013 Address end = page->area_end(); |
| 5012 | 5014 |
| 5013 Object*** store_buffer_position = store_buffer()->Start(); | 5015 Object*** store_buffer_position = store_buffer()->Start(); |
| 5014 Object*** store_buffer_top = store_buffer()->Top(); | 5016 Object*** store_buffer_top = store_buffer()->Top(); |
| 5015 | 5017 |
| 5016 Object** limit = reinterpret_cast<Object**>(end); | 5018 Object** limit = reinterpret_cast<Object**>(end); |
| 5017 CheckStoreBuffer(this, | 5019 CheckStoreBuffer(this, |
| 5018 current, | 5020 current, |
| 5019 limit, | 5021 limit, |
| 5020 &store_buffer_position, | 5022 &store_buffer_position, |
| 5021 store_buffer_top, | 5023 store_buffer_top, |
| 5022 &EverythingsAPointer, | 5024 &EverythingsAPointer, |
| 5023 space->top(), | 5025 space->top(), |
| 5024 space->limit()); | 5026 space->limit()); |
| 5025 } | 5027 } |
| 5026 } | 5028 } |
| 5027 | 5029 |
| 5028 | 5030 |
| 5029 void Heap::MapSpaceCheckStoreBuffer() { | 5031 void Heap::MapSpaceCheckStoreBuffer() { |
| 5030 MapSpace* space = map_space(); | 5032 MapSpace* space = map_space(); |
| 5031 PageIterator pages(space); | 5033 PageIterator pages(space); |
| 5032 | 5034 |
| 5033 store_buffer()->SortUniq(); | 5035 store_buffer()->SortUniq(); |
| 5034 | 5036 |
| 5035 while (pages.has_next()) { | 5037 while (pages.has_next()) { |
| 5036 Page* page = pages.next(); | 5038 Page* page = pages.next(); |
| 5037 Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart()); | 5039 Object** current = reinterpret_cast<Object**>(page->area_start()); |
| 5038 | 5040 |
| 5039 Address end = page->ObjectAreaEnd(); | 5041 Address end = page->area_end(); |
| 5040 | 5042 |
| 5041 Object*** store_buffer_position = store_buffer()->Start(); | 5043 Object*** store_buffer_position = store_buffer()->Start(); |
| 5042 Object*** store_buffer_top = store_buffer()->Top(); | 5044 Object*** store_buffer_top = store_buffer()->Top(); |
| 5043 | 5045 |
| 5044 Object** limit = reinterpret_cast<Object**>(end); | 5046 Object** limit = reinterpret_cast<Object**>(end); |
| 5045 CheckStoreBuffer(this, | 5047 CheckStoreBuffer(this, |
| 5046 current, | 5048 current, |
| 5047 limit, | 5049 limit, |
| 5048 &store_buffer_position, | 5050 &store_buffer_position, |
| 5049 store_buffer_top, | 5051 store_buffer_top, |
| (...skipping 1470 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6520 isolate_->heap()->store_buffer()->Compact(); | 6522 isolate_->heap()->store_buffer()->Compact(); |
| 6521 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6523 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
| 6522 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6524 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
| 6523 next = chunk->next_chunk(); | 6525 next = chunk->next_chunk(); |
| 6524 isolate_->memory_allocator()->Free(chunk); | 6526 isolate_->memory_allocator()->Free(chunk); |
| 6525 } | 6527 } |
| 6526 chunks_queued_for_free_ = NULL; | 6528 chunks_queued_for_free_ = NULL; |
| 6527 } | 6529 } |
| 6528 | 6530 |
| 6529 } } // namespace v8::internal | 6531 } } // namespace v8::internal |
| OLD | NEW |