OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 875 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
886 | 886 |
887 __ mov(a3, a0); | 887 __ mov(a3, a0); |
888 __ Push(a3, a2); | 888 __ Push(a3, a2); |
889 | 889 |
890 // Perform tail call to the entry. | 890 // Perform tail call to the entry. |
891 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); | 891 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); |
892 __ TailCallExternalReference(ref, 2, 1); | 892 __ TailCallExternalReference(ref, 2, 1); |
893 } | 893 } |
894 | 894 |
895 | 895 |
896 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm, | |
897 Register object, | |
898 Register key, | |
899 Register scratch1, | |
900 Register scratch2, | |
901 Register scratch3, | |
902 Label* unmapped_case, | |
903 Label* slow_case) { | |
904 Heap* heap = masm->isolate()->heap(); | |
905 | |
906 // Check that the receiver isn't a smi. | |
907 __ JumpIfSmi(object, slow_case); | |
908 | |
909 // Check that the key is a positive smi. | |
910 __ And(scratch1, scratch1, Operand(0x8000001)); | |
Karl Klose
2011/06/21 09:26:50
I think this must be:
__ And(scratch1, key, Operan
| |
911 __ Branch(slow_case, ne, key, Operand(scratch1)); | |
912 | |
913 // Load the elements into scratch1 and check its map. | |
914 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map()); | |
915 __ lw(scratch1, FieldMemOperand(object, JSObject::kElementsOffset)); | |
916 __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK); | |
917 | |
918 // Check if element is in the range of mapped arguments. If not, jump | |
919 // to the unmapped lookup with the parameter map in scratch1. | |
920 __ lw(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); | |
921 __ Subu(scratch2, scratch2, Operand(Smi::FromInt(2))); | |
922 __ Branch(unmapped_case, Ugreater_equal, key, Operand(scratch2)); | |
923 | |
924 // Load element index and check whether it is the hole. | |
925 const int kOffset = | |
926 FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag; | |
927 | |
928 __ li(scratch3, Operand(kPointerSize >> 1)); | |
929 __ mul(scratch3, key, scratch3); | |
930 __ Addu(scratch3, scratch3, Operand(kOffset)); | |
931 | |
932 __ Addu(scratch2, scratch1, scratch3); | |
933 __ lw(scratch2, MemOperand(scratch2)); | |
934 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex); | |
935 __ Branch(unmapped_case, eq, scratch2, Operand(scratch3)); | |
936 | |
937 // Load value from context and return it. We can reuse scratch1 because | |
938 // we do not jump to the unmapped lookup (which requires the parameter | |
939 // map in scratch1). | |
940 __ lw(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); | |
941 __ li(scratch3, Operand(kPointerSize >> 1)); | |
942 __ mul(scratch3, scratch2, scratch3); | |
943 __ Addu(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag)); | |
944 __ Addu(scratch2, scratch1, scratch3); | |
945 return MemOperand(scratch2); | |
946 } | |
947 | |
948 | |
949 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, | |
950 Register key, | |
951 Register parameter_map, | |
952 Register scratch, | |
953 Label* slow_case) { | |
954 // Element is in arguments backing store, which is referenced by the | |
955 // second element of the parameter_map. The parameter_map register | |
956 // must be loaded with the parameter map of the arguments object and is | |
957 // overwritten. | |
958 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; | |
959 Register backing_store = parameter_map; | |
960 __ lw(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset)); | |
961 __ lw(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset)); | |
962 __ Branch(slow_case, Ugreater_equal, key, Operand(scratch)); | |
963 __ li(scratch, Operand(kPointerSize >> 1)); | |
964 __ mul(scratch, key, scratch); | |
965 __ Addu(scratch, | |
966 scratch, | |
967 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
968 __ Addu(scratch, backing_store, scratch); | |
969 return MemOperand(scratch); | |
970 } | |
971 | |
972 | |
973 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) { | |
974 // ---------- S t a t e -------------- | |
975 // -- lr : return address | |
976 // -- a0 : key | |
Karl Klose
2011/06/21 09:26:50
According to the code below the key is in v0, not
| |
977 // -- a1 : receiver | |
978 // ----------------------------------- | |
979 Label slow, notin; | |
980 MemOperand mapped_location = | |
981 GenerateMappedArgumentsLookup(masm, a1, v0, a2, a3, t0, ¬in, &slow); | |
982 __ lw(v0, mapped_location); | |
983 __ Ret(); | |
984 __ bind(¬in); | |
985 // The unmapped lookup expects that the parameter map is in a2. | |
986 MemOperand unmapped_location = | |
987 GenerateUnmappedArgumentsLookup(masm, v0, a2, a3, &slow); | |
988 __ lw(a2, unmapped_location); | |
989 __ Branch(&slow, eq, a2, Operand(a3)); | |
990 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex); | |
991 __ mov(v0, a2); | |
992 __ Ret(); | |
993 __ bind(&slow); | |
994 GenerateMiss(masm, false); | |
995 } | |
996 | |
997 | |
998 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) { | |
999 // ---------- S t a t e -------------- | |
1000 // -- v0 : value | |
1001 // -- a1 : key | |
1002 // -- a2 : receiver | |
1003 // -- lr : return address | |
1004 // ----------------------------------- | |
1005 Label slow, notin; | |
1006 MemOperand mapped_location = | |
1007 GenerateMappedArgumentsLookup(masm, a2, a1, a3, t0, t1, ¬in, &slow); | |
1008 __ sw(v0, mapped_location); | |
1009 __ Ret(); | |
1010 __ bind(¬in); | |
1011 // The unmapped lookup expects that the parameter map is in a3. | |
1012 MemOperand unmapped_location = | |
1013 GenerateUnmappedArgumentsLookup(masm, a1, a3, t0, &slow); | |
1014 __ sw(v0, unmapped_location); | |
1015 __ Ret(); | |
1016 __ bind(&slow); | |
1017 GenerateMiss(masm, false); | |
1018 } | |
1019 | |
1020 | |
1021 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm, | |
1022 int argc) { | |
1023 // ----------- S t a t e ------------- | |
1024 // -- a2 : name | |
1025 // -- lr : return address | |
1026 // ----------------------------------- | |
1027 Label slow, notin; | |
1028 // Load receiver. | |
1029 __ lw(a1, MemOperand(sp, argc * kPointerSize)); | |
1030 MemOperand mapped_location = | |
1031 GenerateMappedArgumentsLookup(masm, a1, a2, a3, t0, t1, ¬in, &slow); | |
1032 __ lw(a1, mapped_location); | |
1033 GenerateFunctionTailCall(masm, argc, &slow, a3); | |
1034 __ bind(¬in); | |
1035 // The unmapped lookup expects that the parameter map is in a3. | |
1036 MemOperand unmapped_location = | |
1037 GenerateUnmappedArgumentsLookup(masm, a2, a3, t0, &slow); | |
1038 __ lw(a1, unmapped_location); | |
1039 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex); | |
1040 __ Branch(&slow, eq, a1, Operand(a3)); | |
1041 GenerateFunctionTailCall(masm, argc, &slow, a3); | |
1042 __ bind(&slow); | |
1043 GenerateMiss(masm, argc); | |
1044 } | |
1045 | |
1046 | |
1047 Object* KeyedLoadIC_Miss(Arguments args); | |
1048 | |
1049 | |
896 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { | 1050 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { |
897 // ---------- S t a t e -------------- | 1051 // ---------- S t a t e -------------- |
898 // -- ra : return address | 1052 // -- ra : return address |
899 // -- a0 : key | 1053 // -- a0 : key |
900 // -- a1 : receiver | 1054 // -- a1 : receiver |
901 // ----------------------------------- | 1055 // ----------------------------------- |
902 Isolate* isolate = masm->isolate(); | 1056 Isolate* isolate = masm->isolate(); |
903 | 1057 |
904 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0); | 1058 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0); |
905 | 1059 |
(...skipping 665 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1571 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch)); | 1725 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch)); |
1572 patcher.masm()->andi(at, reg, kSmiTagMask); | 1726 patcher.masm()->andi(at, reg, kSmiTagMask); |
1573 patcher.ChangeBranchCondition(eq); | 1727 patcher.ChangeBranchCondition(eq); |
1574 } | 1728 } |
1575 } | 1729 } |
1576 | 1730 |
1577 | 1731 |
1578 } } // namespace v8::internal | 1732 } } // namespace v8::internal |
1579 | 1733 |
1580 #endif // V8_TARGET_ARCH_MIPS | 1734 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |