OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
205 Immediate(kTypeAndReadOnlyMask)); | 205 Immediate(kTypeAndReadOnlyMask)); |
206 __ j(not_zero, miss_label); | 206 __ j(not_zero, miss_label); |
207 | 207 |
208 // Store the value at the masked, scaled index. | 208 // Store the value at the masked, scaled index. |
209 const int kValueOffset = kElementsStartOffset + kPointerSize; | 209 const int kValueOffset = kElementsStartOffset + kPointerSize; |
210 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag)); | 210 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag)); |
211 __ mov(Operand(r0, 0), value); | 211 __ mov(Operand(r0, 0), value); |
212 | 212 |
213 // Update write barrier. Make sure not to clobber the value. | 213 // Update write barrier. Make sure not to clobber the value. |
214 __ mov(r1, value); | 214 __ mov(r1, value); |
215 __ RecordWrite(elements, r0, r1); | 215 __ RecordWrite(elements, r0, r1, kDontSaveFPRegs); |
216 } | 216 } |
217 | 217 |
218 | 218 |
219 void LoadIC::GenerateArrayLength(MacroAssembler* masm) { | 219 void LoadIC::GenerateArrayLength(MacroAssembler* masm) { |
220 // ----------- S t a t e ------------- | 220 // ----------- S t a t e ------------- |
221 // -- eax : receiver | 221 // -- eax : receiver |
222 // -- ecx : name | 222 // -- ecx : name |
223 // -- esp[0] : return address | 223 // -- esp[0] : return address |
224 // ----------------------------------- | 224 // ----------------------------------- |
225 Label miss; | 225 Label miss; |
(...skipping 477 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
703 // -- ecx : key | 703 // -- ecx : key |
704 // -- edx : receiver | 704 // -- edx : receiver |
705 // -- esp[0] : return address | 705 // -- esp[0] : return address |
706 // ----------------------------------- | 706 // ----------------------------------- |
707 Label slow, notin; | 707 Label slow, notin; |
708 Operand mapped_location = | 708 Operand mapped_location = |
709 GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, edi, ¬in, &slow); | 709 GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, edi, ¬in, &slow); |
710 __ mov(mapped_location, eax); | 710 __ mov(mapped_location, eax); |
711 __ lea(ecx, mapped_location); | 711 __ lea(ecx, mapped_location); |
712 __ mov(edx, eax); | 712 __ mov(edx, eax); |
713 __ RecordWrite(ebx, ecx, edx); | 713 __ RecordWrite(ebx, ecx, edx, kDontSaveFPRegs); |
714 __ Ret(); | 714 __ Ret(); |
715 __ bind(¬in); | 715 __ bind(¬in); |
716 // The unmapped lookup expects that the parameter map is in ebx. | 716 // The unmapped lookup expects that the parameter map is in ebx. |
717 Operand unmapped_location = | 717 Operand unmapped_location = |
718 GenerateUnmappedArgumentsLookup(masm, ecx, ebx, edi, &slow); | 718 GenerateUnmappedArgumentsLookup(masm, ecx, ebx, edi, &slow); |
719 __ mov(unmapped_location, eax); | 719 __ mov(unmapped_location, eax); |
720 __ lea(edi, unmapped_location); | 720 __ lea(edi, unmapped_location); |
721 __ mov(edx, eax); | 721 __ mov(edx, eax); |
722 __ RecordWrite(ebx, edi, edx); | 722 __ RecordWrite(ebx, edi, edx, kDontSaveFPRegs); |
723 __ Ret(); | 723 __ Ret(); |
724 __ bind(&slow); | 724 __ bind(&slow); |
725 GenerateMiss(masm, false); | 725 GenerateMiss(masm, false); |
726 } | 726 } |
727 | 727 |
728 | 728 |
729 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, | 729 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, |
730 StrictModeFlag strict_mode) { | 730 StrictModeFlag strict_mode) { |
731 // ----------- S t a t e ------------- | 731 // ----------- S t a t e ------------- |
732 // -- eax : value | 732 // -- eax : value |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
804 __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis. | 804 __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis. |
805 __ j(above_equal, &extra); | 805 __ j(above_equal, &extra); |
806 | 806 |
807 // Fast case: Do the store. | 807 // Fast case: Do the store. |
808 __ bind(&fast); | 808 __ bind(&fast); |
809 // eax: value | 809 // eax: value |
810 // ecx: key (a smi) | 810 // ecx: key (a smi) |
811 // edx: receiver | 811 // edx: receiver |
812 // edi: FixedArray receiver->elements | 812 // edi: FixedArray receiver->elements |
813 __ mov(CodeGenerator::FixedArrayElementOperand(edi, ecx), eax); | 813 __ mov(CodeGenerator::FixedArrayElementOperand(edi, ecx), eax); |
| 814 |
814 // Update write barrier for the elements array address. | 815 // Update write barrier for the elements array address. |
815 __ mov(edx, Operand(eax)); | 816 __ mov(edx, Operand(eax)); // Preserve the value which is returned. |
816 __ RecordWrite(edi, 0, edx, ecx); | 817 __ RecordWriteArray(edi, edx, ecx, kDontSaveFPRegs); |
817 __ ret(0); | 818 __ ret(0); |
818 } | 819 } |
819 | 820 |
820 | 821 |
821 // The generated code does not accept smi keys. | 822 // The generated code does not accept smi keys. |
822 // The generated code falls through if both probes miss. | 823 // The generated code falls through if both probes miss. |
823 static void GenerateMonomorphicCacheProbe(MacroAssembler* masm, | 824 static void GenerateMonomorphicCacheProbe(MacroAssembler* masm, |
824 int argc, | 825 int argc, |
825 Code::Kind kind, | 826 Code::Kind kind, |
826 Code::ExtraICState extra_ic_state) { | 827 Code::ExtraICState extra_ic_state) { |
(...skipping 809 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1636 Condition cc = *jmp_address == Assembler::kJncShortOpcode | 1637 Condition cc = *jmp_address == Assembler::kJncShortOpcode |
1637 ? not_zero | 1638 ? not_zero |
1638 : zero; | 1639 : zero; |
1639 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); | 1640 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); |
1640 } | 1641 } |
1641 | 1642 |
1642 | 1643 |
1643 } } // namespace v8::internal | 1644 } } // namespace v8::internal |
1644 | 1645 |
1645 #endif // V8_TARGET_ARCH_IA32 | 1646 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |