OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1943 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1954 | 1954 |
1955 // Take another spin if there are now unswept objects in new space | 1955 // Take another spin if there are now unswept objects in new space |
1956 // (there are currently no more unswept promoted objects). | 1956 // (there are currently no more unswept promoted objects). |
1957 } while (new_space_front != new_space_.top()); | 1957 } while (new_space_front != new_space_.top()); |
1958 | 1958 |
1959 return new_space_front; | 1959 return new_space_front; |
1960 } | 1960 } |
1961 | 1961 |
1962 | 1962 |
1963 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0); | 1963 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0); |
| 1964 STATIC_ASSERT((ConstantPoolArray::kHeaderSize & kDoubleAlignmentMask) == 0); |
1964 | 1965 |
1965 | 1966 |
1966 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap, | 1967 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap, |
1967 HeapObject* object, | 1968 HeapObject* object, |
1968 int size)); | 1969 int size)); |
1969 | 1970 |
1970 static HeapObject* EnsureDoubleAligned(Heap* heap, | 1971 static HeapObject* EnsureDoubleAligned(Heap* heap, |
1971 HeapObject* object, | 1972 HeapObject* object, |
1972 int size) { | 1973 int size) { |
1973 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { | 1974 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { |
(...skipping 683 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2657 set_undetectable_ascii_string_map(Map::cast(obj)); | 2658 set_undetectable_ascii_string_map(Map::cast(obj)); |
2658 Map::cast(obj)->set_is_undetectable(); | 2659 Map::cast(obj)->set_is_undetectable(); |
2659 | 2660 |
2660 { MaybeObject* maybe_obj = | 2661 { MaybeObject* maybe_obj = |
2661 AllocateMap(FIXED_DOUBLE_ARRAY_TYPE, kVariableSizeSentinel); | 2662 AllocateMap(FIXED_DOUBLE_ARRAY_TYPE, kVariableSizeSentinel); |
2662 if (!maybe_obj->ToObject(&obj)) return false; | 2663 if (!maybe_obj->ToObject(&obj)) return false; |
2663 } | 2664 } |
2664 set_fixed_double_array_map(Map::cast(obj)); | 2665 set_fixed_double_array_map(Map::cast(obj)); |
2665 | 2666 |
2666 { MaybeObject* maybe_obj = | 2667 { MaybeObject* maybe_obj = |
| 2668 AllocateMap(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel); |
| 2669 if (!maybe_obj->ToObject(&obj)) return false; |
| 2670 } |
| 2671 set_constant_pool_array_map(Map::cast(obj)); |
| 2672 |
| 2673 { MaybeObject* maybe_obj = |
2667 AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel); | 2674 AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel); |
2668 if (!maybe_obj->ToObject(&obj)) return false; | 2675 if (!maybe_obj->ToObject(&obj)) return false; |
2669 } | 2676 } |
2670 set_byte_array_map(Map::cast(obj)); | 2677 set_byte_array_map(Map::cast(obj)); |
2671 | 2678 |
2672 { MaybeObject* maybe_obj = | 2679 { MaybeObject* maybe_obj = |
2673 AllocateMap(FREE_SPACE_TYPE, kVariableSizeSentinel); | 2680 AllocateMap(FREE_SPACE_TYPE, kVariableSizeSentinel); |
2674 if (!maybe_obj->ToObject(&obj)) return false; | 2681 if (!maybe_obj->ToObject(&obj)) return false; |
2675 } | 2682 } |
2676 set_free_space_map(Map::cast(obj)); | 2683 set_free_space_map(Map::cast(obj)); |
(...skipping 2714 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5391 HeapObject* dst = HeapObject::cast(obj); | 5398 HeapObject* dst = HeapObject::cast(obj); |
5392 dst->set_map_no_write_barrier(map); | 5399 dst->set_map_no_write_barrier(map); |
5393 CopyBlock( | 5400 CopyBlock( |
5394 dst->address() + FixedDoubleArray::kLengthOffset, | 5401 dst->address() + FixedDoubleArray::kLengthOffset, |
5395 src->address() + FixedDoubleArray::kLengthOffset, | 5402 src->address() + FixedDoubleArray::kLengthOffset, |
5396 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); | 5403 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); |
5397 return obj; | 5404 return obj; |
5398 } | 5405 } |
5399 | 5406 |
5400 | 5407 |
| 5408 MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, |
| 5409 Map* map) { |
| 5410 int int64_entries = src->count_of_int64_entries(); |
| 5411 int ptr_entries = src->count_of_ptr_entries(); |
| 5412 int int32_entries = src->count_of_int32_entries(); |
| 5413 Object* obj; |
| 5414 { MaybeObject* maybe_obj = |
| 5415 AllocateConstantPoolArray(int64_entries, ptr_entries, int32_entries); |
| 5416 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 5417 } |
| 5418 HeapObject* dst = HeapObject::cast(obj); |
| 5419 dst->set_map_no_write_barrier(map); |
| 5420 CopyBlock( |
| 5421 dst->address() + ConstantPoolArray::kLengthOffset, |
| 5422 src->address() + ConstantPoolArray::kLengthOffset, |
| 5423 ConstantPoolArray::SizeFor(int64_entries, ptr_entries, int32_entries) |
| 5424 - ConstantPoolArray::kLengthOffset); |
| 5425 return obj; |
| 5426 } |
| 5427 |
| 5428 |
5401 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { | 5429 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { |
5402 if (length < 0 || length > FixedArray::kMaxLength) { | 5430 if (length < 0 || length > FixedArray::kMaxLength) { |
5403 return Failure::OutOfMemoryException(0xe); | 5431 return Failure::OutOfMemoryException(0xe); |
5404 } | 5432 } |
5405 int size = FixedArray::SizeFor(length); | 5433 int size = FixedArray::SizeFor(length); |
5406 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); | 5434 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); |
5407 | 5435 |
5408 return AllocateRaw(size, space, OLD_POINTER_SPACE); | 5436 return AllocateRaw(size, space, OLD_POINTER_SPACE); |
5409 } | 5437 } |
5410 | 5438 |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5530 | 5558 |
5531 HeapObject* object; | 5559 HeapObject* object; |
5532 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); | 5560 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); |
5533 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; | 5561 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; |
5534 } | 5562 } |
5535 | 5563 |
5536 return EnsureDoubleAligned(this, object, size); | 5564 return EnsureDoubleAligned(this, object, size); |
5537 } | 5565 } |
5538 | 5566 |
5539 | 5567 |
| 5568 MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, |
| 5569 int number_of_ptr_entries, |
| 5570 int number_of_int32_entries) { |
| 5571 ASSERT(number_of_int64_entries > 0 || number_of_ptr_entries > 0 || |
| 5572 number_of_int32_entries > 0); |
| 5573 int size = ConstantPoolArray::SizeFor(number_of_int64_entries, |
| 5574 number_of_ptr_entries, |
| 5575 number_of_int32_entries); |
| 5576 #ifndef V8_HOST_ARCH_64_BIT |
| 5577 size += kPointerSize; |
| 5578 #endif |
| 5579 |
| 5580 HeapObject* object; |
| 5581 { MaybeObject* maybe_object = old_pointer_space_->AllocateRaw(size); |
| 5582 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; |
| 5583 } |
| 5584 object = EnsureDoubleAligned(this, object, size); |
| 5585 HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map()); |
| 5586 |
| 5587 ConstantPoolArray* constant_pool = |
| 5588 reinterpret_cast<ConstantPoolArray*>(object); |
| 5589 constant_pool->SetEntryCounts(number_of_int64_entries, |
| 5590 number_of_ptr_entries, |
| 5591 number_of_int32_entries); |
| 5592 MemsetPointer( |
| 5593 HeapObject::RawField( |
| 5594 constant_pool, |
| 5595 constant_pool->OffsetOfElementAt(constant_pool->first_ptr_index())), |
| 5596 undefined_value(), |
| 5597 number_of_ptr_entries); |
| 5598 return constant_pool; |
| 5599 } |
| 5600 |
| 5601 |
5540 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { | 5602 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { |
5541 Object* result; | 5603 Object* result; |
5542 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); | 5604 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); |
5543 if (!maybe_result->ToObject(&result)) return maybe_result; | 5605 if (!maybe_result->ToObject(&result)) return maybe_result; |
5544 } | 5606 } |
5545 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( | 5607 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( |
5546 hash_table_map()); | 5608 hash_table_map()); |
5547 ASSERT(result->IsHashTable()); | 5609 ASSERT(result->IsHashTable()); |
5548 return result; | 5610 return result; |
5549 } | 5611 } |
(...skipping 2350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7900 if (FLAG_concurrent_recompilation) { | 7962 if (FLAG_concurrent_recompilation) { |
7901 heap_->relocation_mutex_->Lock(); | 7963 heap_->relocation_mutex_->Lock(); |
7902 #ifdef DEBUG | 7964 #ifdef DEBUG |
7903 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 7965 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
7904 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 7966 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
7905 #endif // DEBUG | 7967 #endif // DEBUG |
7906 } | 7968 } |
7907 } | 7969 } |
7908 | 7970 |
7909 } } // namespace v8::internal | 7971 } } // namespace v8::internal |
OLD | NEW |