| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "v8.h" | 5 #include "v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_IA32 | 7 #if V8_TARGET_ARCH_X87 |
| 8 | 8 |
| 9 #include "codegen.h" | 9 #include "codegen.h" |
| 10 #include "ic-inl.h" | 10 #include "ic-inl.h" |
| 11 #include "runtime.h" | 11 #include "runtime.h" |
| 12 #include "stub-cache.h" | 12 #include "stub-cache.h" |
| 13 | 13 |
| 14 namespace v8 { | 14 namespace v8 { |
| 15 namespace internal { | 15 namespace internal { |
| 16 | 16 |
| 17 // ---------------------------------------------------------------------------- | 17 // ---------------------------------------------------------------------------- |
| (...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 183 Immediate(kTypeAndReadOnlyMask)); | 183 Immediate(kTypeAndReadOnlyMask)); |
| 184 __ j(not_zero, miss_label); | 184 __ j(not_zero, miss_label); |
| 185 | 185 |
| 186 // Store the value at the masked, scaled index. | 186 // Store the value at the masked, scaled index. |
| 187 const int kValueOffset = kElementsStartOffset + kPointerSize; | 187 const int kValueOffset = kElementsStartOffset + kPointerSize; |
| 188 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag)); | 188 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag)); |
| 189 __ mov(Operand(r0, 0), value); | 189 __ mov(Operand(r0, 0), value); |
| 190 | 190 |
| 191 // Update write barrier. Make sure not to clobber the value. | 191 // Update write barrier. Make sure not to clobber the value. |
| 192 __ mov(r1, value); | 192 __ mov(r1, value); |
| 193 __ RecordWrite(elements, r0, r1, kDontSaveFPRegs); | 193 __ RecordWrite(elements, r0, r1); |
| 194 } | 194 } |
| 195 | 195 |
| 196 | 196 |
| 197 // Checks the receiver for special cases (value type, slow case bits). | 197 // Checks the receiver for special cases (value type, slow case bits). |
| 198 // Falls through for regular JS object. | 198 // Falls through for regular JS object. |
| 199 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, | 199 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, |
| 200 Register receiver, | 200 Register receiver, |
| 201 Register map, | 201 Register map, |
| 202 int interceptor_bit, | 202 int interceptor_bit, |
| 203 Label* slow) { | 203 Label* slow) { |
| (...skipping 461 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 665 // -- ecx : key | 665 // -- ecx : key |
| 666 // -- edx : receiver | 666 // -- edx : receiver |
| 667 // -- esp[0] : return address | 667 // -- esp[0] : return address |
| 668 // ----------------------------------- | 668 // ----------------------------------- |
| 669 Label slow, notin; | 669 Label slow, notin; |
| 670 Operand mapped_location = | 670 Operand mapped_location = |
| 671 GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, edi, ¬in, &slow); | 671 GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, edi, ¬in, &slow); |
| 672 __ mov(mapped_location, eax); | 672 __ mov(mapped_location, eax); |
| 673 __ lea(ecx, mapped_location); | 673 __ lea(ecx, mapped_location); |
| 674 __ mov(edx, eax); | 674 __ mov(edx, eax); |
| 675 __ RecordWrite(ebx, ecx, edx, kDontSaveFPRegs); | 675 __ RecordWrite(ebx, ecx, edx); |
| 676 __ Ret(); | 676 __ Ret(); |
| 677 __ bind(¬in); | 677 __ bind(¬in); |
| 678 // The unmapped lookup expects that the parameter map is in ebx. | 678 // The unmapped lookup expects that the parameter map is in ebx. |
| 679 Operand unmapped_location = | 679 Operand unmapped_location = |
| 680 GenerateUnmappedArgumentsLookup(masm, ecx, ebx, edi, &slow); | 680 GenerateUnmappedArgumentsLookup(masm, ecx, ebx, edi, &slow); |
| 681 __ mov(unmapped_location, eax); | 681 __ mov(unmapped_location, eax); |
| 682 __ lea(edi, unmapped_location); | 682 __ lea(edi, unmapped_location); |
| 683 __ mov(edx, eax); | 683 __ mov(edx, eax); |
| 684 __ RecordWrite(ebx, edi, edx, kDontSaveFPRegs); | 684 __ RecordWrite(ebx, edi, edx); |
| 685 __ Ret(); | 685 __ Ret(); |
| 686 __ bind(&slow); | 686 __ bind(&slow); |
| 687 GenerateMiss(masm); | 687 GenerateMiss(masm); |
| 688 } | 688 } |
| 689 | 689 |
| 690 | 690 |
| 691 static void KeyedStoreGenerateGenericHelper( | 691 static void KeyedStoreGenerateGenericHelper( |
| 692 MacroAssembler* masm, | 692 MacroAssembler* masm, |
| 693 Label* fast_object, | 693 Label* fast_object, |
| 694 Label* fast_double, | 694 Label* fast_double, |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 744 __ bind(&finish_object_store); | 744 __ bind(&finish_object_store); |
| 745 if (increment_length == kIncrementLength) { | 745 if (increment_length == kIncrementLength) { |
| 746 // Add 1 to receiver->length. | 746 // Add 1 to receiver->length. |
| 747 __ add(FieldOperand(edx, JSArray::kLengthOffset), | 747 __ add(FieldOperand(edx, JSArray::kLengthOffset), |
| 748 Immediate(Smi::FromInt(1))); | 748 Immediate(Smi::FromInt(1))); |
| 749 } | 749 } |
| 750 __ mov(FixedArrayElementOperand(ebx, ecx), eax); | 750 __ mov(FixedArrayElementOperand(ebx, ecx), eax); |
| 751 // Update write barrier for the elements array address. | 751 // Update write barrier for the elements array address. |
| 752 __ mov(edx, eax); // Preserve the value which is returned. | 752 __ mov(edx, eax); // Preserve the value which is returned. |
| 753 __ RecordWriteArray( | 753 __ RecordWriteArray( |
| 754 ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 754 ebx, edx, ecx, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 755 __ ret(0); | 755 __ ret(0); |
| 756 | 756 |
| 757 __ bind(fast_double); | 757 __ bind(fast_double); |
| 758 if (check_map == kCheckMap) { | 758 if (check_map == kCheckMap) { |
| 759 // Check for fast double array case. If this fails, call through to the | 759 // Check for fast double array case. If this fails, call through to the |
| 760 // runtime. | 760 // runtime. |
| 761 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map()); | 761 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map()); |
| 762 __ j(not_equal, slow); | 762 __ j(not_equal, slow); |
| 763 // If the value is a number, store it as a double in the FastDoubleElements | 763 // If the value is a number, store it as a double in the FastDoubleElements |
| 764 // array. | 764 // array. |
| 765 } | 765 } |
| 766 | 766 |
| 767 // HOLECHECK: guards "A[i] double hole?" | 767 // HOLECHECK: guards "A[i] double hole?" |
| 768 // We have to see if the double version of the hole is present. If so | 768 // We have to see if the double version of the hole is present. If so |
| 769 // go to the runtime. | 769 // go to the runtime. |
| 770 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32); | 770 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32); |
| 771 __ cmp(FieldOperand(ebx, ecx, times_4, offset), Immediate(kHoleNanUpper32)); | 771 __ cmp(FieldOperand(ebx, ecx, times_4, offset), Immediate(kHoleNanUpper32)); |
| 772 __ j(not_equal, &fast_double_without_map_check); | 772 __ j(not_equal, &fast_double_without_map_check); |
| 773 __ JumpIfDictionaryInPrototypeChain(edx, ebx, edi, slow); | 773 __ JumpIfDictionaryInPrototypeChain(edx, ebx, edi, slow); |
| 774 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); | 774 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); |
| 775 | 775 |
| 776 __ bind(&fast_double_without_map_check); | 776 __ bind(&fast_double_without_map_check); |
| 777 __ StoreNumberToDoubleElements(eax, ebx, ecx, edi, xmm0, | 777 __ StoreNumberToDoubleElements(eax, ebx, ecx, edi, |
| 778 &transition_double_elements); | 778 &transition_double_elements, false); |
| 779 if (increment_length == kIncrementLength) { | 779 if (increment_length == kIncrementLength) { |
| 780 // Add 1 to receiver->length. | 780 // Add 1 to receiver->length. |
| 781 __ add(FieldOperand(edx, JSArray::kLengthOffset), | 781 __ add(FieldOperand(edx, JSArray::kLengthOffset), |
| 782 Immediate(Smi::FromInt(1))); | 782 Immediate(Smi::FromInt(1))); |
| 783 } | 783 } |
| 784 __ ret(0); | 784 __ ret(0); |
| 785 | 785 |
| 786 __ bind(&transition_smi_elements); | 786 __ bind(&transition_smi_elements); |
| 787 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); | 787 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); |
| 788 | 788 |
| (...skipping 487 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1276 *jmp_address == Assembler::kJzShortOpcode)); | 1276 *jmp_address == Assembler::kJzShortOpcode)); |
| 1277 Condition cc = (check == ENABLE_INLINED_SMI_CHECK) | 1277 Condition cc = (check == ENABLE_INLINED_SMI_CHECK) |
| 1278 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) | 1278 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) |
| 1279 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); | 1279 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); |
| 1280 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); | 1280 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); |
| 1281 } | 1281 } |
| 1282 | 1282 |
| 1283 | 1283 |
| 1284 } } // namespace v8::internal | 1284 } } // namespace v8::internal |
| 1285 | 1285 |
| 1286 #endif // V8_TARGET_ARCH_IA32 | 1286 #endif // V8_TARGET_ARCH_X87 |
| OLD | NEW |