Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(38)

Side by Side Diff: src/heap/heap.cc

Issue 1131783003: Embedded constant pools. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 1922 matching lines...) Expand 10 before | Expand all | Expand 10 after
1933 // Take another spin if there are now unswept objects in new space 1933 // Take another spin if there are now unswept objects in new space
1934 // (there are currently no more unswept promoted objects). 1934 // (there are currently no more unswept promoted objects).
1935 } while (new_space_front != new_space_.top()); 1935 } while (new_space_front != new_space_.top());
1936 1936
1937 return new_space_front; 1937 return new_space_front;
1938 } 1938 }
1939 1939
1940 1940
1941 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 1941 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) ==
1942 0); // NOLINT 1942 0); // NOLINT
1943 STATIC_ASSERT((ConstantPoolArray::kFirstEntryOffset & kDoubleAlignmentMask) ==
1944 0); // NOLINT
1945 STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset &
1946 kDoubleAlignmentMask) == 0); // NOLINT
1947 STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) == 1943 STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) ==
1948 0); // NOLINT 1944 0); // NOLINT
1949 1945
1950 1946
1951 HeapObject* Heap::EnsureDoubleAligned(HeapObject* object, int size) { 1947 HeapObject* Heap::EnsureDoubleAligned(HeapObject* object, int size) {
1952 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { 1948 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) {
1953 CreateFillerObjectAt(object->address(), kPointerSize); 1949 CreateFillerObjectAt(object->address(), kPointerSize);
1954 return HeapObject::FromAddress(object->address() + kPointerSize); 1950 return HeapObject::FromAddress(object->address() + kPointerSize);
1955 } else { 1951 } else {
1956 CreateFillerObjectAt(object->address() + size - kPointerSize, kPointerSize); 1952 CreateFillerObjectAt(object->address() + size - kPointerSize, kPointerSize);
(...skipping 595 matching lines...) Expand 10 before | Expand all | Expand 10 after
2552 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \ 2548 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \
2553 { \ 2549 { \
2554 Map* map; \ 2550 Map* map; \
2555 if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \ 2551 if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \
2556 set_##field_name##_map(map); \ 2552 set_##field_name##_map(map); \
2557 } 2553 }
2558 2554
2559 ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array); 2555 ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array);
2560 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined); 2556 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined);
2561 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null); 2557 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null);
2562 ALLOCATE_PARTIAL_MAP(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel,
2563 constant_pool_array);
2564 2558
2565 #undef ALLOCATE_PARTIAL_MAP 2559 #undef ALLOCATE_PARTIAL_MAP
2566 } 2560 }
2567 2561
2568 // Allocate the empty array. 2562 // Allocate the empty array.
2569 { 2563 {
2570 AllocationResult allocation = AllocateEmptyFixedArray(); 2564 AllocationResult allocation = AllocateEmptyFixedArray();
2571 if (!allocation.To(&obj)) return false; 2565 if (!allocation.To(&obj)) return false;
2572 } 2566 }
2573 set_empty_fixed_array(FixedArray::cast(obj)); 2567 set_empty_fixed_array(FixedArray::cast(obj));
(...skipping 16 matching lines...) Expand all
2590 // Set preliminary exception sentinel value before actually initializing it. 2584 // Set preliminary exception sentinel value before actually initializing it.
2591 set_exception(null_value()); 2585 set_exception(null_value());
2592 2586
2593 // Allocate the empty descriptor array. 2587 // Allocate the empty descriptor array.
2594 { 2588 {
2595 AllocationResult allocation = AllocateEmptyFixedArray(); 2589 AllocationResult allocation = AllocateEmptyFixedArray();
2596 if (!allocation.To(&obj)) return false; 2590 if (!allocation.To(&obj)) return false;
2597 } 2591 }
2598 set_empty_descriptor_array(DescriptorArray::cast(obj)); 2592 set_empty_descriptor_array(DescriptorArray::cast(obj));
2599 2593
2600 // Allocate the constant pool array.
2601 {
2602 AllocationResult allocation = AllocateEmptyConstantPoolArray();
2603 if (!allocation.To(&obj)) return false;
2604 }
2605 set_empty_constant_pool_array(ConstantPoolArray::cast(obj));
2606
2607 // Fix the instance_descriptors for the existing maps. 2594 // Fix the instance_descriptors for the existing maps.
2608 meta_map()->set_code_cache(empty_fixed_array()); 2595 meta_map()->set_code_cache(empty_fixed_array());
2609 meta_map()->set_dependent_code(DependentCode::cast(empty_fixed_array())); 2596 meta_map()->set_dependent_code(DependentCode::cast(empty_fixed_array()));
2610 meta_map()->set_raw_transitions(Smi::FromInt(0)); 2597 meta_map()->set_raw_transitions(Smi::FromInt(0));
2611 meta_map()->set_instance_descriptors(empty_descriptor_array()); 2598 meta_map()->set_instance_descriptors(empty_descriptor_array());
2612 if (FLAG_unbox_double_fields) { 2599 if (FLAG_unbox_double_fields) {
2613 meta_map()->set_layout_descriptor(LayoutDescriptor::FastPointerLayout()); 2600 meta_map()->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
2614 } 2601 }
2615 2602
2616 fixed_array_map()->set_code_cache(empty_fixed_array()); 2603 fixed_array_map()->set_code_cache(empty_fixed_array());
(...skipping 16 matching lines...) Expand all
2633 } 2620 }
2634 2621
2635 null_map()->set_code_cache(empty_fixed_array()); 2622 null_map()->set_code_cache(empty_fixed_array());
2636 null_map()->set_dependent_code(DependentCode::cast(empty_fixed_array())); 2623 null_map()->set_dependent_code(DependentCode::cast(empty_fixed_array()));
2637 null_map()->set_raw_transitions(Smi::FromInt(0)); 2624 null_map()->set_raw_transitions(Smi::FromInt(0));
2638 null_map()->set_instance_descriptors(empty_descriptor_array()); 2625 null_map()->set_instance_descriptors(empty_descriptor_array());
2639 if (FLAG_unbox_double_fields) { 2626 if (FLAG_unbox_double_fields) {
2640 null_map()->set_layout_descriptor(LayoutDescriptor::FastPointerLayout()); 2627 null_map()->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
2641 } 2628 }
2642 2629
2643 constant_pool_array_map()->set_code_cache(empty_fixed_array());
2644 constant_pool_array_map()->set_dependent_code(
2645 DependentCode::cast(empty_fixed_array()));
2646 constant_pool_array_map()->set_raw_transitions(Smi::FromInt(0));
2647 constant_pool_array_map()->set_instance_descriptors(empty_descriptor_array());
2648 if (FLAG_unbox_double_fields) {
2649 constant_pool_array_map()->set_layout_descriptor(
2650 LayoutDescriptor::FastPointerLayout());
2651 }
2652
2653 // Fix prototype object for existing maps. 2630 // Fix prototype object for existing maps.
2654 meta_map()->set_prototype(null_value()); 2631 meta_map()->set_prototype(null_value());
2655 meta_map()->set_constructor_or_backpointer(null_value()); 2632 meta_map()->set_constructor_or_backpointer(null_value());
2656 2633
2657 fixed_array_map()->set_prototype(null_value()); 2634 fixed_array_map()->set_prototype(null_value());
2658 fixed_array_map()->set_constructor_or_backpointer(null_value()); 2635 fixed_array_map()->set_constructor_or_backpointer(null_value());
2659 2636
2660 undefined_map()->set_prototype(null_value()); 2637 undefined_map()->set_prototype(null_value());
2661 undefined_map()->set_constructor_or_backpointer(null_value()); 2638 undefined_map()->set_constructor_or_backpointer(null_value());
2662 2639
2663 null_map()->set_prototype(null_value()); 2640 null_map()->set_prototype(null_value());
2664 null_map()->set_constructor_or_backpointer(null_value()); 2641 null_map()->set_constructor_or_backpointer(null_value());
2665 2642
2666 constant_pool_array_map()->set_prototype(null_value());
2667 constant_pool_array_map()->set_constructor_or_backpointer(null_value());
2668
2669 { // Map allocation 2643 { // Map allocation
2670 #define ALLOCATE_MAP(instance_type, size, field_name) \ 2644 #define ALLOCATE_MAP(instance_type, size, field_name) \
2671 { \ 2645 { \
2672 Map* map; \ 2646 Map* map; \
2673 if (!AllocateMap((instance_type), size).To(&map)) return false; \ 2647 if (!AllocateMap((instance_type), size).To(&map)) return false; \
2674 set_##field_name##_map(map); \ 2648 set_##field_name##_map(map); \
2675 } 2649 }
2676 2650
2677 #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \ 2651 #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \
2678 ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name) 2652 ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name)
(...skipping 1040 matching lines...) Expand 10 before | Expand all | Expand 10 after
3719 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || 3693 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() ||
3720 isolate_->code_range()->contains(code->address())); 3694 isolate_->code_range()->contains(code->address()));
3721 code->set_gc_metadata(Smi::FromInt(0)); 3695 code->set_gc_metadata(Smi::FromInt(0));
3722 code->set_ic_age(global_ic_age_); 3696 code->set_ic_age(global_ic_age_);
3723 return code; 3697 return code;
3724 } 3698 }
3725 3699
3726 3700
3727 AllocationResult Heap::CopyCode(Code* code) { 3701 AllocationResult Heap::CopyCode(Code* code) {
3728 AllocationResult allocation; 3702 AllocationResult allocation;
3729 HeapObject* new_constant_pool;
3730 if (FLAG_enable_ool_constant_pool &&
3731 code->constant_pool() != empty_constant_pool_array()) {
3732 // Copy the constant pool, since edits to the copied code may modify
3733 // the constant pool.
3734 allocation = CopyConstantPoolArray(code->constant_pool());
3735 if (!allocation.To(&new_constant_pool)) return allocation;
3736 } else {
3737 new_constant_pool = empty_constant_pool_array();
3738 }
3739 3703
3740 HeapObject* result = NULL; 3704 HeapObject* result = NULL;
3741 // Allocate an object the same size as the code object. 3705 // Allocate an object the same size as the code object.
3742 int obj_size = code->Size(); 3706 int obj_size = code->Size();
3743 allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); 3707 allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE);
3744 if (!allocation.To(&result)) return allocation; 3708 if (!allocation.To(&result)) return allocation;
3745 3709
3746 // Copy code object. 3710 // Copy code object.
3747 Address old_addr = code->address(); 3711 Address old_addr = code->address();
3748 Address new_addr = result->address(); 3712 Address new_addr = result->address();
3749 CopyBlock(new_addr, old_addr, obj_size); 3713 CopyBlock(new_addr, old_addr, obj_size);
3750 Code* new_code = Code::cast(result); 3714 Code* new_code = Code::cast(result);
3751 3715
3752 // Update the constant pool.
3753 new_code->set_constant_pool(new_constant_pool);
3754
3755 // Relocate the copy. 3716 // Relocate the copy.
3756 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment)); 3717 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment));
3757 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || 3718 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() ||
3758 isolate_->code_range()->contains(code->address())); 3719 isolate_->code_range()->contains(code->address()));
3759 new_code->Relocate(new_addr - old_addr); 3720 new_code->Relocate(new_addr - old_addr);
3760 return new_code; 3721 return new_code;
3761 } 3722 }
3762 3723
3763 3724
3764 AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) { 3725 AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
3765 // Allocate ByteArray and ConstantPoolArray before the Code object, so that we 3726 // Allocate ByteArray before the Code object, so that we do not risk
3766 // do not risk leaving uninitialized Code object (and breaking the heap). 3727 // leaving uninitialized Code object (and breaking the heap).
3767 ByteArray* reloc_info_array; 3728 ByteArray* reloc_info_array;
3768 { 3729 {
3769 AllocationResult allocation = 3730 AllocationResult allocation =
3770 AllocateByteArray(reloc_info.length(), TENURED); 3731 AllocateByteArray(reloc_info.length(), TENURED);
3771 if (!allocation.To(&reloc_info_array)) return allocation; 3732 if (!allocation.To(&reloc_info_array)) return allocation;
3772 } 3733 }
3773 HeapObject* new_constant_pool;
3774 if (FLAG_enable_ool_constant_pool &&
3775 code->constant_pool() != empty_constant_pool_array()) {
3776 // Copy the constant pool, since edits to the copied code may modify
3777 // the constant pool.
3778 AllocationResult allocation = CopyConstantPoolArray(code->constant_pool());
3779 if (!allocation.To(&new_constant_pool)) return allocation;
3780 } else {
3781 new_constant_pool = empty_constant_pool_array();
3782 }
3783 3734
3784 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); 3735 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
3785 3736
3786 int new_obj_size = Code::SizeFor(new_body_size); 3737 int new_obj_size = Code::SizeFor(new_body_size);
3787 3738
3788 Address old_addr = code->address(); 3739 Address old_addr = code->address();
3789 3740
3790 size_t relocation_offset = 3741 size_t relocation_offset =
3791 static_cast<size_t>(code->instruction_end() - old_addr); 3742 static_cast<size_t>(code->instruction_end() - old_addr);
3792 3743
3793 HeapObject* result; 3744 HeapObject* result;
3794 AllocationResult allocation = 3745 AllocationResult allocation =
3795 AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE); 3746 AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE);
3796 if (!allocation.To(&result)) return allocation; 3747 if (!allocation.To(&result)) return allocation;
3797 3748
3798 // Copy code object. 3749 // Copy code object.
3799 Address new_addr = result->address(); 3750 Address new_addr = result->address();
3800 3751
3801 // Copy header and instructions. 3752 // Copy header and instructions.
3802 CopyBytes(new_addr, old_addr, relocation_offset); 3753 CopyBytes(new_addr, old_addr, relocation_offset);
3803 3754
3804 Code* new_code = Code::cast(result); 3755 Code* new_code = Code::cast(result);
3805 new_code->set_relocation_info(reloc_info_array); 3756 new_code->set_relocation_info(reloc_info_array);
3806 3757
3807 // Update constant pool.
3808 new_code->set_constant_pool(new_constant_pool);
3809
3810 // Copy patched rinfo. 3758 // Copy patched rinfo.
3811 CopyBytes(new_code->relocation_start(), reloc_info.start(), 3759 CopyBytes(new_code->relocation_start(), reloc_info.start(),
3812 static_cast<size_t>(reloc_info.length())); 3760 static_cast<size_t>(reloc_info.length()));
3813 3761
3814 // Relocate the copy. 3762 // Relocate the copy.
3815 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment)); 3763 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment));
3816 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || 3764 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() ||
3817 isolate_->code_range()->contains(code->address())); 3765 isolate_->code_range()->contains(code->address()));
3818 new_code->Relocate(new_addr - old_addr); 3766 new_code->Relocate(new_addr - old_addr);
3819 3767
(...skipping 472 matching lines...) Expand 10 before | Expand all | Expand 10 after
4292 if (!allocation.To(&obj)) return allocation; 4240 if (!allocation.To(&obj)) return allocation;
4293 } 4241 }
4294 obj->set_map_no_write_barrier(map); 4242 obj->set_map_no_write_barrier(map);
4295 CopyBlock(obj->address() + FixedDoubleArray::kLengthOffset, 4243 CopyBlock(obj->address() + FixedDoubleArray::kLengthOffset,
4296 src->address() + FixedDoubleArray::kLengthOffset, 4244 src->address() + FixedDoubleArray::kLengthOffset,
4297 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); 4245 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
4298 return obj; 4246 return obj;
4299 } 4247 }
4300 4248
4301 4249
4302 AllocationResult Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src,
4303 Map* map) {
4304 HeapObject* obj;
4305 if (src->is_extended_layout()) {
4306 ConstantPoolArray::NumberOfEntries small(src,
4307 ConstantPoolArray::SMALL_SECTION);
4308 ConstantPoolArray::NumberOfEntries extended(
4309 src, ConstantPoolArray::EXTENDED_SECTION);
4310 AllocationResult allocation =
4311 AllocateExtendedConstantPoolArray(small, extended);
4312 if (!allocation.To(&obj)) return allocation;
4313 } else {
4314 ConstantPoolArray::NumberOfEntries small(src,
4315 ConstantPoolArray::SMALL_SECTION);
4316 AllocationResult allocation = AllocateConstantPoolArray(small);
4317 if (!allocation.To(&obj)) return allocation;
4318 }
4319 obj->set_map_no_write_barrier(map);
4320 CopyBlock(obj->address() + ConstantPoolArray::kFirstEntryOffset,
4321 src->address() + ConstantPoolArray::kFirstEntryOffset,
4322 src->size() - ConstantPoolArray::kFirstEntryOffset);
4323 return obj;
4324 }
4325
4326
4327 AllocationResult Heap::AllocateRawFixedArray(int length, 4250 AllocationResult Heap::AllocateRawFixedArray(int length,
4328 PretenureFlag pretenure) { 4251 PretenureFlag pretenure) {
4329 if (length < 0 || length > FixedArray::kMaxLength) { 4252 if (length < 0 || length > FixedArray::kMaxLength) {
4330 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); 4253 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
4331 } 4254 }
4332 int size = FixedArray::SizeFor(length); 4255 int size = FixedArray::SizeFor(length);
4333 AllocationSpace space = SelectSpace(size, pretenure); 4256 AllocationSpace space = SelectSpace(size, pretenure);
4334 4257
4335 return AllocateRaw(size, space, OLD_SPACE); 4258 return AllocateRaw(size, space, OLD_SPACE);
4336 } 4259 }
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
4406 HeapObject* object; 4329 HeapObject* object;
4407 { 4330 {
4408 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); 4331 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
4409 if (!allocation.To(&object)) return allocation; 4332 if (!allocation.To(&object)) return allocation;
4410 } 4333 }
4411 4334
4412 return EnsureDoubleAligned(object, size); 4335 return EnsureDoubleAligned(object, size);
4413 } 4336 }
4414 4337
4415 4338
4416 AllocationResult Heap::AllocateConstantPoolArray(
4417 const ConstantPoolArray::NumberOfEntries& small) {
4418 CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType));
4419 int size = ConstantPoolArray::SizeFor(small);
4420 #ifndef V8_HOST_ARCH_64_BIT
4421 size += kPointerSize;
4422 #endif
4423 AllocationSpace space = SelectSpace(size, TENURED);
4424
4425 HeapObject* object;
4426 {
4427 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
4428 if (!allocation.To(&object)) return allocation;
4429 }
4430 object = EnsureDoubleAligned(object, size);
4431 object->set_map_no_write_barrier(constant_pool_array_map());
4432
4433 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
4434 constant_pool->Init(small);
4435 constant_pool->ClearPtrEntries(isolate());
4436 return constant_pool;
4437 }
4438
4439
4440 AllocationResult Heap::AllocateExtendedConstantPoolArray(
4441 const ConstantPoolArray::NumberOfEntries& small,
4442 const ConstantPoolArray::NumberOfEntries& extended) {
4443 CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType));
4444 CHECK(extended.are_in_range(0, kMaxInt));
4445 int size = ConstantPoolArray::SizeForExtended(small, extended);
4446 #ifndef V8_HOST_ARCH_64_BIT
4447 size += kPointerSize;
4448 #endif
4449 AllocationSpace space = SelectSpace(size, TENURED);
4450
4451 HeapObject* object;
4452 {
4453 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
4454 if (!allocation.To(&object)) return allocation;
4455 }
4456 object = EnsureDoubleAligned(object, size);
4457 object->set_map_no_write_barrier(constant_pool_array_map());
4458
4459 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
4460 constant_pool->InitExtended(small, extended);
4461 constant_pool->ClearPtrEntries(isolate());
4462 return constant_pool;
4463 }
4464
4465
4466 AllocationResult Heap::AllocateEmptyConstantPoolArray() {
4467 ConstantPoolArray::NumberOfEntries small(0, 0, 0, 0);
4468 int size = ConstantPoolArray::SizeFor(small);
4469 HeapObject* result = NULL;
4470 {
4471 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
4472 if (!allocation.To(&result)) return allocation;
4473 }
4474 result->set_map_no_write_barrier(constant_pool_array_map());
4475 ConstantPoolArray::cast(result)->Init(small);
4476 return result;
4477 }
4478
4479
4480 AllocationResult Heap::AllocateSymbol() { 4339 AllocationResult Heap::AllocateSymbol() {
4481 // Statically ensure that it is safe to allocate symbols in paged spaces. 4340 // Statically ensure that it is safe to allocate symbols in paged spaces.
4482 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize); 4341 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize);
4483 4342
4484 HeapObject* result = NULL; 4343 HeapObject* result = NULL;
4485 AllocationResult allocation = 4344 AllocationResult allocation =
4486 AllocateRaw(Symbol::kSize, OLD_SPACE, OLD_SPACE); 4345 AllocateRaw(Symbol::kSize, OLD_SPACE, OLD_SPACE);
4487 if (!allocation.To(&result)) return allocation; 4346 if (!allocation.To(&result)) return allocation;
4488 4347
4489 result->set_map_no_write_barrier(symbol_map()); 4348 result->set_map_no_write_barrier(symbol_map());
(...skipping 2006 matching lines...) Expand 10 before | Expand all | Expand 10 after
6496 } 6355 }
6497 delete list; 6356 delete list;
6498 } else { 6357 } else {
6499 prev = list; 6358 prev = list;
6500 } 6359 }
6501 list = next; 6360 list = next;
6502 } 6361 }
6503 } 6362 }
6504 } 6363 }
6505 } // namespace v8::internal 6364 } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698