Chromium Code Reviews| Index: src/code-stub-assembler.cc |
| diff --git a/src/code-stub-assembler.cc b/src/code-stub-assembler.cc |
| index 95818bda1837339c71b4fc2807e3345b63046149..08ab7ef71bfb810565b4636cf39ee6363ef1982f 100644 |
| --- a/src/code-stub-assembler.cc |
| +++ b/src/code-stub-assembler.cc |
| @@ -555,6 +555,11 @@ Node* CodeStubAssembler::LoadFixedArrayElementConstantIndex(Node* object, |
| return Load(MachineType::AnyTagged(), object, offset); |
| } |
| +Node* CodeStubAssembler::LoadNativeContext(Node* context) { |
| + return LoadFixedArrayElementConstantIndex(context, |
| + Context::NATIVE_CONTEXT_INDEX); |
| +} |
| + |
| Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { |
| return StoreNoWriteBarrier( |
| MachineRepresentation::kFloat64, object, |
| @@ -582,6 +587,9 @@ Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) { |
| Node* CodeStubAssembler::StoreFixedArrayElementNoWriteBarrier(Node* object, |
| Node* index, |
| Node* value) { |
| + if (Is64()) { |
| + index = ChangeInt32ToInt64(index); |
| + } |
| Node* offset = |
| IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)), |
| IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag)); |
| @@ -601,6 +609,45 @@ Node* CodeStubAssembler::StoreFixedArrayElementInt32Index(Node* object, |
| return Store(MachineRepresentation::kTagged, object, offset, value); |
| } |
| +Node* CodeStubAssembler::StoreFixedDoubleArrayElementInt32Index(Node* object, |
| + Node* index, |
| + Node* value) { |
| + if (Is64()) { |
| + index = ChangeInt32ToInt64(index); |
|
Benedikt Meurer
2016/05/03 03:57:49
How about adding a ChangeInt32ToIntPtr helper that
danno
2016/05/03 07:10:48
Done.
|
| + } |
| + Node* offset = |
| + IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)), |
| + IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag)); |
| + return StoreNoWriteBarrier(MachineRepresentation::kFloat64, object, offset, |
| + value); |
| +} |
| + |
| +Node* CodeStubAssembler::StoreFixedArrayElementInt32Index(Node* object, |
| + int index, |
| + Node* value) { |
| + Node* offset = IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag + |
| + index * kPointerSize); |
| + return Store(MachineRepresentation::kTagged, object, offset, value); |
| +} |
| + |
| +Node* CodeStubAssembler::StoreFixedArrayElementNoWriteBarrier(Node* object, |
| + int index, |
| + Node* value) { |
| + Node* offset = IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag + |
| + index * kPointerSize); |
| + return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, |
| + value); |
| +} |
| + |
| +Node* CodeStubAssembler::StoreFixedDoubleArrayElementInt32Index(Node* object, |
| + int index, |
| + Node* value) { |
| + Node* offset = IntPtrConstant(FixedDoubleArray::kHeaderSize - kHeapObjectTag + |
| + index * kDoubleSize); |
| + return StoreNoWriteBarrier(MachineRepresentation::kFloat64, object, offset, |
| + value); |
| +} |
| + |
| Node* CodeStubAssembler::AllocateHeapNumber() { |
| Node* result = Allocate(HeapNumber::kSize, kNone); |
| StoreMapNoWriteBarrier(result, HeapNumberMapConstant()); |
| @@ -633,6 +680,85 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(int length) { |
| return result; |
| } |
| +Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, |
| + Node* native_context, int capacity, |
| + int length, |
| + compiler::Node* allocation_site) { |
| + bool is_double = IsFastDoubleElementsKind(kind); |
| + int element_size = is_double ? kDoubleSize : kPointerSize; |
| + int total_size = |
| + JSArray::kSize + FixedArray::kHeaderSize + element_size * capacity; |
| + int elements_offset = JSArray::kSize; |
| + |
| + if (allocation_site != nullptr) { |
| + total_size += AllocationMemento::kSize; |
| + elements_offset += AllocationMemento::kSize; |
| + } |
| + |
| + // Allocate both array and elements object, and initialize the JSArray. |
| + Heap* heap = isolate()->heap(); |
| + Node* array = Allocate(total_size); |
| + Node* array_map = LoadFixedArrayElementConstantIndex( |
| + native_context, Context::ArrayMapIndex(kind)); |
| + StoreMapNoWriteBarrier(array, array_map); |
| + Node* empty_properties = |
| + HeapConstant(Handle<HeapObject>(heap->empty_fixed_array())); |
| + StoreObjectFieldNoWriteBarrier(array, JSArray::kPropertiesOffset, |
| + empty_properties); |
| + StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, |
| + SmiConstant(Smi::FromInt(length))); |
| + |
| + if (allocation_site != nullptr) { |
| + CreateAllocationMemento(array, JSArray::kSize, allocation_site); |
| + } |
| + |
| + // Setup elements object. |
| + Node* elements = InnerAllocate(array, elements_offset); |
| + StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements); |
| + Handle<Map> elements_map(is_double ? heap->fixed_double_array_map() |
| + : heap->fixed_array_map()); |
| + StoreMapNoWriteBarrier(elements, HeapConstant(elements_map)); |
| + StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, |
| + SmiConstant(Smi::FromInt(capacity))); |
| + |
| + Node* double_hole = Float64Constant(bit_cast<double>(kHoleNanInt64)); |
| + Node* hole = HeapConstant(Handle<HeapObject>(heap->the_hole_value())); |
| + if (capacity <= kElementLoopUnrollThreshold) { |
| + for (int i = 0; i < capacity; ++i) { |
| + if (is_double) { |
| + StoreFixedDoubleArrayElementInt32Index(elements, i, double_hole); |
| + } else { |
| + StoreFixedArrayElementNoWriteBarrier(elements, i, hole); |
| + } |
| + } |
| + } else { |
| + // TODO(danno): Add a loop for initialization |
| + UNIMPLEMENTED(); |
| + } |
| + |
| + return array; |
| +} |
| + |
| +void CodeStubAssembler::CreateAllocationMemento( |
| + compiler::Node* base_allocation, int base_allocation_size, |
| + compiler::Node* allocation_site) { |
| + StoreObjectFieldNoWriteBarrier( |
| + base_allocation, AllocationMemento::kMapOffset + base_allocation_size, |
| + HeapConstant(Handle<Map>(isolate()->heap()->allocation_memento_map()))); |
| + StoreObjectFieldNoWriteBarrier( |
| + base_allocation, |
| + AllocationMemento::kAllocationSiteOffset + base_allocation_size, |
| + allocation_site); |
| + if (FLAG_allocation_site_pretenuring) { |
| + Node* count = LoadObjectField(allocation_site, |
| + AllocationSite::kPretenureCreateCountOffset); |
| + Node* incremented_count = IntPtrAdd(count, SmiConstant(Smi::FromInt(1))); |
| + StoreObjectFieldNoWriteBarrier(allocation_site, |
| + AllocationSite::kPretenureCreateCountOffset, |
| + incremented_count); |
| + } |
| +} |
| + |
| Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) { |
| // We might need to loop once due to ToNumber conversion. |
| Variable var_value(this, MachineRepresentation::kTagged), |