Chromium Code Reviews| Index: src/x64/stub-cache-x64.cc |
| diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc |
| index 71ce856169ad6bcf185b3c02b9b9d9aaec1fbd20..4ebf694dc4b6a8e4c65fea4f5988983465b970fe 100644 |
| --- a/src/x64/stub-cache-x64.cc |
| +++ b/src/x64/stub-cache-x64.cc |
| @@ -3584,6 +3584,57 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) { |
| } |
| +void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement( |
| + MacroAssembler* masm) { |
| + // ----------- S t a t e ------------- |
| + // -- rax : key |
| + // -- rdx : receiver |
| + // -- rsp[0] : return address |
| + // ----------------------------------- |
| + Label miss_force_generic, slow_allocate_heapnumber; |
| + |
| + // This stub is meant to be tail-jumped to, the receiver must already |
| + // have been verified by the caller to not be a smi. |
| + |
| + // Check that the key is a smi. |
| + __ JumpIfNotSmi(rax, &miss_force_generic); |
| + |
| + // Get the elements array. |
| + __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); |
| + __ AssertFastElements(rcx); |
| + |
| + // Check that the key is within bounds. |
| + __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset)); |
| + __ j(above_equal, &miss_force_generic); |
| + |
| + // Check for the hole |
| + __ SmiToInteger32(kScratchRegister, rax); |
| + uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32); |
| + __ cmpl(FieldOperand(rcx, kScratchRegister, times_8, offset), |
| + Immediate(kHoleNanUpper32)); |
| + __ j(equal, &miss_force_generic); |
| + |
| + // Always allocate a heap number for the result. |
| + __ fld_d(FieldOperand(rcx, kScratchRegister, times_8, |
|
Mads Ager (chromium)
2011/07/12 12:03:26
Same as on ia32 this can lead to unbalanced push/p
danno
2011/07/13 08:59:52
Done.
|
| + FixedDoubleArray::kHeaderSize)); |
| + __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); |
| + // Set the value. |
| + __ movq(rax, rcx); |
| + __ fstp_d(FieldOperand(rcx, HeapNumber::kValueOffset)); |
| + __ ret(0); |
| + |
| + __ bind(&slow_allocate_heapnumber); |
| + Handle<Code> slow_ic = |
| + masm->isolate()->builtins()->KeyedLoadIC_Slow(); |
| + __ jmp(slow_ic, RelocInfo::CODE_TARGET); |
| + |
| + __ bind(&miss_force_generic); |
| + Handle<Code> miss_ic = |
| + masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); |
| + __ jmp(miss_ic, RelocInfo::CODE_TARGET); |
| +} |
| + |
| + |
| void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm, |
| bool is_js_array) { |
| // ----------- S t a t e ------------- |
| @@ -3634,6 +3685,83 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm, |
| } |
| +void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement( |
| + MacroAssembler* masm, |
| + bool is_js_array) { |
| + // ----------- S t a t e ------------- |
| + // -- rax : value |
| + // -- rcx : key |
| + // -- rdx : receiver |
| + // -- rsp[0] : return address |
| + // ----------------------------------- |
| + Label miss_force_generic, smi_value, is_nan, have_double_value; |
| + |
| + // This stub is meant to be tail-jumped to, the receiver must already |
| + // have been verified by the caller to not be a smi. |
| + |
| + // Check that the key is a smi. |
| + __ JumpIfNotSmi(rcx, &miss_force_generic); |
| + |
| + // Get the elements array. |
| + __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); |
| + __ AssertFastElements(rdi); |
| + |
| + // Check that the key is within bounds. |
| + if (is_js_array) { |
| + __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); |
| + __ j(above_equal, &miss_force_generic); |
|
Mads Ager (chromium)
2011/07/12 12:03:26
Move the branch out of the conditional?
danno
2011/07/13 08:59:52
Done.
|
| + } else { |
| + __ SmiCompare(rcx, FieldOperand(rdi, FixedDoubleArray::kLengthOffset)); |
| + __ j(above_equal, &miss_force_generic); |
| + } |
| + |
| + // Handle smi values specially |
| + __ JumpIfSmi(rax, &smi_value, Label::kNear); |
| + |
| + __ CheckMap(rax, |
| + masm->isolate()->factory()->heap_number_map(), |
| + &miss_force_generic, |
| + DONT_DO_SMI_CHECK); |
| + |
| + // Double value, check for any nan and canonicalize. |
| + uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32); |
| + __ cmpl(FieldOperand(rax, offset), Immediate(0x7ff00000)); |
| + __ j(greater, &is_nan, Label::kNear); |
| + |
| + ExternalReference canonical_nan_reference = |
| + ExternalReference::address_of_canonical_non_hole_nan(); |
| + __ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset)); |
| + __ bind(&have_double_value); |
| + |
| + __ SmiToInteger32(rcx, rcx); |
| + __ movsd(FieldOperand(rdi, rcx, times_8, FixedDoubleArray::kHeaderSize), |
| + xmm0); |
| + __ ret(0); |
| + |
| + __ bind(&is_nan); |
| + // Convert all nans to the same canonical nan value in the double array. |
| + __ Set(kScratchRegister, kCanonicalNonHoleNanInt64); |
| + __ movq(xmm0, kScratchRegister); |
| + __ jmp(&have_double_value, Label::kNear); |
| + |
| + __ bind(&smi_value); |
| + // Value is a smi. convert to a double and store. |
| + __ SmiToInteger32(rax, rax); |
| + __ push(rax); |
| + __ fild_s(Operand(rsp, 0)); |
| + __ pop(rax); |
| + __ SmiToInteger32(rcx, rcx); |
| + __ fstp_d(FieldOperand(rdi, rcx, times_8, FixedDoubleArray::kHeaderSize)); |
| + __ ret(0); |
| + |
| + // Handle store cache miss, replacing the ic with the generic stub. |
| + __ bind(&miss_force_generic); |
| + Handle<Code> ic_force_generic = |
| + masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); |
| + __ jmp(ic_force_generic, RelocInfo::CODE_TARGET); |
| +} |
| + |
| + |
| #undef __ |
| } } // namespace v8::internal |