| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 336 Register receiver, | 336 Register receiver, |
| 337 Register result, | 337 Register result, |
| 338 Register scratch, | 338 Register scratch, |
| 339 Label* miss_label) { | 339 Label* miss_label) { |
| 340 __ TryGetFunctionPrototype(receiver, result, miss_label); | 340 __ TryGetFunctionPrototype(receiver, result, miss_label); |
| 341 if (!result.is(rax)) __ movq(rax, result); | 341 if (!result.is(rax)) __ movq(rax, result); |
| 342 __ ret(0); | 342 __ ret(0); |
| 343 } | 343 } |
| 344 | 344 |
| 345 | 345 |
| 346 void StubCompiler::DoGenerateFastPropertyLoad(MacroAssembler* masm, | 346 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, |
| 347 Register dst, | 347 Register dst, |
| 348 Register src, | 348 Register src, |
| 349 bool inobject, | 349 bool inobject, |
| 350 int index) { | 350 int index, |
| 351 Representation representation) { |
| 352 ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); |
| 351 int offset = index * kPointerSize; | 353 int offset = index * kPointerSize; |
| 352 if (!inobject) { | 354 if (!inobject) { |
| 353 // Calculate the offset into the properties array. | 355 // Calculate the offset into the properties array. |
| 354 offset = offset + FixedArray::kHeaderSize; | 356 offset = offset + FixedArray::kHeaderSize; |
| 355 __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset)); | 357 __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset)); |
| 356 src = dst; | 358 src = dst; |
| 357 } | 359 } |
| 358 __ movq(dst, FieldOperand(src, offset)); | 360 __ movq(dst, FieldOperand(src, offset)); |
| 359 } | 361 } |
| 360 | 362 |
| (...skipping 377 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 738 void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, | 740 void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, |
| 739 Handle<JSObject> object, | 741 Handle<JSObject> object, |
| 740 LookupResult* lookup, | 742 LookupResult* lookup, |
| 741 Handle<Map> transition, | 743 Handle<Map> transition, |
| 742 Handle<Name> name, | 744 Handle<Name> name, |
| 743 Register receiver_reg, | 745 Register receiver_reg, |
| 744 Register name_reg, | 746 Register name_reg, |
| 745 Register value_reg, | 747 Register value_reg, |
| 746 Register scratch1, | 748 Register scratch1, |
| 747 Register scratch2, | 749 Register scratch2, |
| 750 Register unused, |
| 748 Label* miss_label, | 751 Label* miss_label, |
| 749 Label* miss_restore_name) { | 752 Label* miss_restore_name, |
| 753 Label* slow) { |
| 750 // Check that the map of the object hasn't changed. | 754 // Check that the map of the object hasn't changed. |
| 751 __ CheckMap(receiver_reg, Handle<Map>(object->map()), | 755 __ CheckMap(receiver_reg, Handle<Map>(object->map()), |
| 752 miss_label, DO_SMI_CHECK, REQUIRE_EXACT_MAP); | 756 miss_label, DO_SMI_CHECK, REQUIRE_EXACT_MAP); |
| 753 | 757 |
| 754 // Perform global security token check if needed. | 758 // Perform global security token check if needed. |
| 755 if (object->IsJSGlobalProxy()) { | 759 if (object->IsJSGlobalProxy()) { |
| 756 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label); | 760 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label); |
| 757 } | 761 } |
| 758 | 762 |
| 759 int descriptor = transition->LastAdded(); | 763 int descriptor = transition->LastAdded(); |
| 760 DescriptorArray* descriptors = transition->instance_descriptors(); | 764 DescriptorArray* descriptors = transition->instance_descriptors(); |
| 761 PropertyDetails details = descriptors->GetDetails(descriptor); | 765 PropertyDetails details = descriptors->GetDetails(descriptor); |
| 762 Representation representation = details.representation(); | 766 Representation representation = details.representation(); |
| 763 ASSERT(!representation.IsNone()); | 767 ASSERT(!representation.IsNone()); |
| 764 | 768 |
| 765 // Ensure no transitions to deprecated maps are followed. | 769 // Ensure no transitions to deprecated maps are followed. |
| 766 __ CheckMapDeprecated(transition, scratch1, miss_label); | 770 __ CheckMapDeprecated(transition, scratch1, miss_label); |
| 767 | 771 |
| 768 if (FLAG_track_fields && representation.IsSmi()) { | |
| 769 __ JumpIfNotSmi(value_reg, miss_label); | |
| 770 } else if (FLAG_track_double_fields && representation.IsDouble()) { | |
| 771 Label do_store; | |
| 772 __ JumpIfSmi(value_reg, &do_store); | |
| 773 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), | |
| 774 miss_label, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); | |
| 775 __ bind(&do_store); | |
| 776 } | |
| 777 | |
| 778 // Check that we are allowed to write this. | 772 // Check that we are allowed to write this. |
| 779 if (object->GetPrototype()->IsJSObject()) { | 773 if (object->GetPrototype()->IsJSObject()) { |
| 780 JSObject* holder; | 774 JSObject* holder; |
| 781 // holder == object indicates that no property was found. | 775 // holder == object indicates that no property was found. |
| 782 if (lookup->holder() != *object) { | 776 if (lookup->holder() != *object) { |
| 783 holder = lookup->holder(); | 777 holder = lookup->holder(); |
| 784 } else { | 778 } else { |
| 785 // Find the top object. | 779 // Find the top object. |
| 786 holder = *object; | 780 holder = *object; |
| 787 do { | 781 do { |
| 788 holder = JSObject::cast(holder->GetPrototype()); | 782 holder = JSObject::cast(holder->GetPrototype()); |
| 789 } while (holder->GetPrototype()->IsJSObject()); | 783 } while (holder->GetPrototype()->IsJSObject()); |
| 790 } | 784 } |
| 791 Register holder_reg = CheckPrototypes( | 785 Register holder_reg = CheckPrototypes( |
| 792 object, receiver_reg, Handle<JSObject>(holder), name_reg, | 786 object, receiver_reg, Handle<JSObject>(holder), name_reg, |
| 793 scratch1, scratch2, name, miss_restore_name); | 787 scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER); |
| 794 // If no property was found, and the holder (the last object in the | 788 // If no property was found, and the holder (the last object in the |
| 795 // prototype chain) is in slow mode, we need to do a negative lookup on the | 789 // prototype chain) is in slow mode, we need to do a negative lookup on the |
| 796 // holder. | 790 // holder. |
| 797 if (lookup->holder() == *object) { | 791 if (lookup->holder() == *object) { |
| 798 if (holder->IsJSGlobalObject()) { | 792 if (holder->IsJSGlobalObject()) { |
| 799 GenerateCheckPropertyCell( | 793 GenerateCheckPropertyCell( |
| 800 masm, | 794 masm, |
| 801 Handle<GlobalObject>(GlobalObject::cast(holder)), | 795 Handle<GlobalObject>(GlobalObject::cast(holder)), |
| 802 name, | 796 name, |
| 803 scratch1, | 797 scratch1, |
| 804 miss_restore_name); | 798 miss_restore_name); |
| 805 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) { | 799 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) { |
| 806 GenerateDictionaryNegativeLookup( | 800 GenerateDictionaryNegativeLookup( |
| 807 masm, miss_restore_name, holder_reg, name, scratch1, scratch2); | 801 masm, miss_restore_name, holder_reg, name, scratch1, scratch2); |
| 808 } | 802 } |
| 809 } | 803 } |
| 810 } | 804 } |
| 811 | 805 |
| 806 Register storage_reg = name_reg; |
| 807 |
| 808 if (FLAG_track_fields && representation.IsSmi()) { |
| 809 __ JumpIfNotSmi(value_reg, miss_restore_name); |
| 810 } else if (FLAG_track_double_fields && representation.IsDouble()) { |
| 811 Label do_store, heap_number; |
| 812 __ AllocateHeapNumber(storage_reg, scratch1, slow); |
| 813 |
| 814 __ JumpIfNotSmi(value_reg, &heap_number); |
| 815 __ SmiToInteger32(scratch1, value_reg); |
| 816 __ cvtlsi2sd(xmm0, scratch1); |
| 817 __ jmp(&do_store); |
| 818 |
| 819 __ bind(&heap_number); |
| 820 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), |
| 821 miss_restore_name, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); |
| 822 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); |
| 823 |
| 824 __ bind(&do_store); |
| 825 __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0); |
| 826 } |
| 827 |
| 812 // Stub never generated for non-global objects that require access | 828 // Stub never generated for non-global objects that require access |
| 813 // checks. | 829 // checks. |
| 814 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); | 830 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); |
| 815 | 831 |
| 816 // Perform map transition for the receiver if necessary. | 832 // Perform map transition for the receiver if necessary. |
| 817 if (object->map()->unused_property_fields() == 0) { | 833 if (object->map()->unused_property_fields() == 0) { |
| 818 // The properties must be extended before we can store the value. | 834 // The properties must be extended before we can store the value. |
| 819 // We jump to a runtime call that extends the properties array. | 835 // We jump to a runtime call that extends the properties array. |
| 820 __ pop(scratch1); // Return address. | 836 __ pop(scratch1); // Return address. |
| 821 __ push(receiver_reg); | 837 __ push(receiver_reg); |
| 822 __ Push(transition); | 838 __ Push(transition); |
| 823 __ push(value_reg); | 839 __ push(value_reg); |
| 824 __ push(scratch1); | 840 __ push(scratch1); |
| 825 __ TailCallExternalReference( | 841 __ TailCallExternalReference( |
| 826 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), | 842 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), |
| 827 masm->isolate()), | 843 masm->isolate()), |
| 828 3, | 844 3, |
| 829 1); | 845 1); |
| 830 return; | 846 return; |
| 831 } | 847 } |
| 832 | 848 |
| 833 // Update the map of the object. | 849 // Update the map of the object. |
| 834 __ Move(scratch1, transition); | 850 __ Move(scratch1, transition); |
| 835 __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); | 851 __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); |
| 836 | 852 |
| 837 // Update the write barrier for the map field and pass the now unused | 853 // Update the write barrier for the map field. |
| 838 // name_reg as scratch register. | |
| 839 __ RecordWriteField(receiver_reg, | 854 __ RecordWriteField(receiver_reg, |
| 840 HeapObject::kMapOffset, | 855 HeapObject::kMapOffset, |
| 841 scratch1, | 856 scratch1, |
| 842 name_reg, | 857 scratch2, |
| 843 kDontSaveFPRegs, | 858 kDontSaveFPRegs, |
| 844 OMIT_REMEMBERED_SET, | 859 OMIT_REMEMBERED_SET, |
| 845 OMIT_SMI_CHECK); | 860 OMIT_SMI_CHECK); |
| 846 | 861 |
| 847 int index = transition->instance_descriptors()->GetFieldIndex( | 862 int index = transition->instance_descriptors()->GetFieldIndex( |
| 848 transition->LastAdded()); | 863 transition->LastAdded()); |
| 849 | 864 |
| 850 // Adjust for the number of properties stored in the object. Even in the | 865 // Adjust for the number of properties stored in the object. Even in the |
| 851 // face of a transition we can use the old map here because the size of the | 866 // face of a transition we can use the old map here because the size of the |
| 852 // object and the number of in-object properties is not going to change. | 867 // object and the number of in-object properties is not going to change. |
| 853 index -= object->map()->inobject_properties(); | 868 index -= object->map()->inobject_properties(); |
| 854 | 869 |
| 855 // TODO(verwaest): Share this code as a code stub. | 870 // TODO(verwaest): Share this code as a code stub. |
| 856 if (index < 0) { | 871 if (index < 0) { |
| 857 // Set the property straight into the object. | 872 // Set the property straight into the object. |
| 858 int offset = object->map()->instance_size() + (index * kPointerSize); | 873 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 859 __ movq(FieldOperand(receiver_reg, offset), value_reg); | 874 if (FLAG_track_double_fields && representation.IsDouble()) { |
| 875 __ movq(FieldOperand(receiver_reg, offset), storage_reg); |
| 876 } else { |
| 877 __ movq(FieldOperand(receiver_reg, offset), value_reg); |
| 878 } |
| 860 | 879 |
| 861 if (!FLAG_track_fields || !representation.IsSmi()) { | 880 if (!FLAG_track_fields || !representation.IsSmi()) { |
| 862 // Update the write barrier for the array address. | 881 // Update the write barrier for the array address. |
| 863 // Pass the value being stored in the now unused name_reg. | 882 // Pass the value being stored in the now unused name_reg. |
| 864 __ movq(name_reg, value_reg); | 883 if (!FLAG_track_double_fields || !representation.IsDouble()) { |
| 884 __ movq(name_reg, value_reg); |
| 885 } else { |
| 886 ASSERT(storage_reg.is(name_reg)); |
| 887 } |
| 865 __ RecordWriteField( | 888 __ RecordWriteField( |
| 866 receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs); | 889 receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs); |
| 867 } | 890 } |
| 868 } else { | 891 } else { |
| 869 // Write to the properties array. | 892 // Write to the properties array. |
| 870 int offset = index * kPointerSize + FixedArray::kHeaderSize; | 893 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 871 // Get the properties array (optimistically). | 894 // Get the properties array (optimistically). |
| 872 __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); | 895 __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| 873 __ movq(FieldOperand(scratch1, offset), value_reg); | 896 if (FLAG_track_double_fields && representation.IsDouble()) { |
| 897 __ movq(FieldOperand(scratch1, offset), storage_reg); |
| 898 } else { |
| 899 __ movq(FieldOperand(scratch1, offset), value_reg); |
| 900 } |
| 874 | 901 |
| 875 if (!FLAG_track_fields || !representation.IsSmi()) { | 902 if (!FLAG_track_fields || !representation.IsSmi()) { |
| 876 // Update the write barrier for the array address. | 903 // Update the write barrier for the array address. |
| 877 // Pass the value being stored in the now unused name_reg. | 904 // Pass the value being stored in the now unused name_reg. |
| 878 __ movq(name_reg, value_reg); | 905 if (!FLAG_track_double_fields || !representation.IsDouble()) { |
| 906 __ movq(name_reg, value_reg); |
| 907 } else { |
| 908 ASSERT(storage_reg.is(name_reg)); |
| 909 } |
| 879 __ RecordWriteField( | 910 __ RecordWriteField( |
| 880 scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs); | 911 scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs); |
| 881 } | 912 } |
| 882 } | 913 } |
| 883 | 914 |
| 884 // Return the value (register rax). | 915 // Return the value (register rax). |
| 885 ASSERT(value_reg.is(rax)); | 916 ASSERT(value_reg.is(rax)); |
| 886 __ ret(0); | 917 __ ret(0); |
| 887 } | 918 } |
| 888 | 919 |
| (...skipping 27 matching lines...) Expand all Loading... |
| 916 // Adjust for the number of properties stored in the object. Even in the | 947 // Adjust for the number of properties stored in the object. Even in the |
| 917 // face of a transition we can use the old map here because the size of the | 948 // face of a transition we can use the old map here because the size of the |
| 918 // object and the number of in-object properties is not going to change. | 949 // object and the number of in-object properties is not going to change. |
| 919 index -= object->map()->inobject_properties(); | 950 index -= object->map()->inobject_properties(); |
| 920 | 951 |
| 921 Representation representation = lookup->representation(); | 952 Representation representation = lookup->representation(); |
| 922 ASSERT(!representation.IsNone()); | 953 ASSERT(!representation.IsNone()); |
| 923 if (FLAG_track_fields && representation.IsSmi()) { | 954 if (FLAG_track_fields && representation.IsSmi()) { |
| 924 __ JumpIfNotSmi(value_reg, miss_label); | 955 __ JumpIfNotSmi(value_reg, miss_label); |
| 925 } else if (FLAG_track_double_fields && representation.IsDouble()) { | 956 } else if (FLAG_track_double_fields && representation.IsDouble()) { |
| 926 Label do_store; | 957 // Load the double storage. |
| 927 __ JumpIfSmi(value_reg, &do_store); | 958 if (index < 0) { |
| 959 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 960 __ movq(scratch1, FieldOperand(receiver_reg, offset)); |
| 961 } else { |
| 962 __ movq(scratch1, |
| 963 FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| 964 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 965 __ movq(scratch1, FieldOperand(scratch1, offset)); |
| 966 } |
| 967 |
| 968 // Store the value into the storage. |
| 969 Label do_store, heap_number; |
| 970 __ JumpIfNotSmi(value_reg, &heap_number); |
| 971 __ SmiToInteger32(scratch2, value_reg); |
| 972 __ cvtlsi2sd(xmm0, scratch2); |
| 973 __ jmp(&do_store); |
| 974 |
| 975 __ bind(&heap_number); |
| 928 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), | 976 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), |
| 929 miss_label, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); | 977 miss_label, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); |
| 978 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); |
| 979 |
| 930 __ bind(&do_store); | 980 __ bind(&do_store); |
| 981 __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0); |
| 982 // Return the value (register rax). |
| 983 ASSERT(value_reg.is(rax)); |
| 984 __ ret(0); |
| 985 return; |
| 931 } | 986 } |
| 932 | 987 |
| 933 // TODO(verwaest): Share this code as a code stub. | 988 // TODO(verwaest): Share this code as a code stub. |
| 934 if (index < 0) { | 989 if (index < 0) { |
| 935 // Set the property straight into the object. | 990 // Set the property straight into the object. |
| 936 int offset = object->map()->instance_size() + (index * kPointerSize); | 991 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 937 __ movq(FieldOperand(receiver_reg, offset), value_reg); | 992 __ movq(FieldOperand(receiver_reg, offset), value_reg); |
| 938 | 993 |
| 939 if (!FLAG_track_fields || !representation.IsSmi()) { | 994 if (!FLAG_track_fields || !representation.IsSmi()) { |
| 940 // Update the write barrier for the array address. | 995 // Update the write barrier for the array address. |
| (...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1189 if (!global.is_null()) { | 1244 if (!global.is_null()) { |
| 1190 GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss); | 1245 GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss); |
| 1191 } | 1246 } |
| 1192 | 1247 |
| 1193 HandlerFrontendFooter(success, &miss); | 1248 HandlerFrontendFooter(success, &miss); |
| 1194 } | 1249 } |
| 1195 | 1250 |
| 1196 | 1251 |
| 1197 void BaseLoadStubCompiler::GenerateLoadField(Register reg, | 1252 void BaseLoadStubCompiler::GenerateLoadField(Register reg, |
| 1198 Handle<JSObject> holder, | 1253 Handle<JSObject> holder, |
| 1199 PropertyIndex field) { | 1254 PropertyIndex field, |
| 1255 Representation representation) { |
| 1200 if (!reg.is(receiver())) __ movq(receiver(), reg); | 1256 if (!reg.is(receiver())) __ movq(receiver(), reg); |
| 1201 if (kind() == Code::LOAD_IC) { | 1257 if (kind() == Code::LOAD_IC) { |
| 1202 LoadFieldStub stub(field.is_inobject(holder), | 1258 LoadFieldStub stub(field.is_inobject(holder), |
| 1203 field.translate(holder)); | 1259 field.translate(holder), |
| 1260 representation); |
| 1204 GenerateTailCall(masm(), stub.GetCode(isolate())); | 1261 GenerateTailCall(masm(), stub.GetCode(isolate())); |
| 1205 } else { | 1262 } else { |
| 1206 KeyedLoadFieldStub stub(field.is_inobject(holder), | 1263 KeyedLoadFieldStub stub(field.is_inobject(holder), |
| 1207 field.translate(holder)); | 1264 field.translate(holder), |
| 1265 representation); |
| 1208 GenerateTailCall(masm(), stub.GetCode(isolate())); | 1266 GenerateTailCall(masm(), stub.GetCode(isolate())); |
| 1209 } | 1267 } |
| 1210 } | 1268 } |
| 1211 | 1269 |
| 1212 | 1270 |
| 1213 void BaseLoadStubCompiler::GenerateLoadCallback( | 1271 void BaseLoadStubCompiler::GenerateLoadCallback( |
| 1214 Register reg, | 1272 Register reg, |
| 1215 Handle<ExecutableAccessorInfo> callback) { | 1273 Handle<ExecutableAccessorInfo> callback) { |
| 1216 // Insert additional parameters into the stack frame above return address. | 1274 // Insert additional parameters into the stack frame above return address. |
| 1217 ASSERT(!scratch2().is(reg)); | 1275 ASSERT(!scratch2().is(reg)); |
| (...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1454 const int argc = arguments().immediate(); | 1512 const int argc = arguments().immediate(); |
| 1455 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); | 1513 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); |
| 1456 | 1514 |
| 1457 // Check that the receiver isn't a smi. | 1515 // Check that the receiver isn't a smi. |
| 1458 __ JumpIfSmi(rdx, &miss); | 1516 __ JumpIfSmi(rdx, &miss); |
| 1459 | 1517 |
| 1460 // Do the right check and compute the holder register. | 1518 // Do the right check and compute the holder register. |
| 1461 Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi, | 1519 Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi, |
| 1462 name, &miss); | 1520 name, &miss); |
| 1463 | 1521 |
| 1464 GenerateFastPropertyLoad(masm(), rdi, reg, holder, index); | 1522 GenerateFastPropertyLoad(masm(), rdi, reg, index.is_inobject(holder), |
| 1523 index.translate(holder), Representation::Tagged()); |
| 1465 | 1524 |
| 1466 // Check that the function really is a function. | 1525 // Check that the function really is a function. |
| 1467 __ JumpIfSmi(rdi, &miss); | 1526 __ JumpIfSmi(rdi, &miss); |
| 1468 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx); | 1527 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx); |
| 1469 __ j(not_equal, &miss); | 1528 __ j(not_equal, &miss); |
| 1470 | 1529 |
| 1471 // Patch the receiver on the stack with the global proxy if | 1530 // Patch the receiver on the stack with the global proxy if |
| 1472 // necessary. | 1531 // necessary. |
| 1473 if (object->IsGlobalObject()) { | 1532 if (object->IsGlobalObject()) { |
| 1474 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); | 1533 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); |
| (...skipping 2055 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3530 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); | 3589 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); |
| 3531 } | 3590 } |
| 3532 } | 3591 } |
| 3533 | 3592 |
| 3534 | 3593 |
| 3535 #undef __ | 3594 #undef __ |
| 3536 | 3595 |
| 3537 } } // namespace v8::internal | 3596 } } // namespace v8::internal |
| 3538 | 3597 |
| 3539 #endif // V8_TARGET_ARCH_X64 | 3598 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |