| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 848 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 859 Operand(scratch1, LSL, kPointerSizeLog2 + 1)); | 859 Operand(scratch1, LSL, kPointerSizeLog2 + 1)); |
| 860 | 860 |
| 861 Register probe = mask; | 861 Register probe = mask; |
| 862 __ ldr(probe, | 862 __ ldr(probe, |
| 863 FieldMemOperand(scratch1, FixedArray::kHeaderSize)); | 863 FieldMemOperand(scratch1, FixedArray::kHeaderSize)); |
| 864 __ BranchOnSmi(probe, not_found); | 864 __ BranchOnSmi(probe, not_found); |
| 865 __ sub(scratch2, object, Operand(kHeapObjectTag)); | 865 __ sub(scratch2, object, Operand(kHeapObjectTag)); |
| 866 __ vldr(d0, scratch2, HeapNumber::kValueOffset); | 866 __ vldr(d0, scratch2, HeapNumber::kValueOffset); |
| 867 __ sub(probe, probe, Operand(kHeapObjectTag)); | 867 __ sub(probe, probe, Operand(kHeapObjectTag)); |
| 868 __ vldr(d1, probe, HeapNumber::kValueOffset); | 868 __ vldr(d1, probe, HeapNumber::kValueOffset); |
| 869 __ vcmp(d0, d1); | 869 __ VFPCompareAndSetFlags(d0, d1); |
| 870 __ vmrs(pc); | |
| 871 __ b(ne, not_found); // The cache did not contain this value. | 870 __ b(ne, not_found); // The cache did not contain this value. |
| 872 __ b(&load_result_from_cache); | 871 __ b(&load_result_from_cache); |
| 873 } else { | 872 } else { |
| 874 __ b(not_found); | 873 __ b(not_found); |
| 875 } | 874 } |
| 876 } | 875 } |
| 877 | 876 |
| 878 __ bind(&is_smi); | 877 __ bind(&is_smi); |
| 879 Register scratch = scratch1; | 878 Register scratch = scratch1; |
| 880 __ and_(scratch, mask, Operand(object, ASR, 1)); | 879 __ and_(scratch, mask, Operand(object, ASR, 1)); |
| (...skipping 29 matching lines...) Expand all Loading... |
| 910 GenerateLookupNumberStringCache(masm, r1, r0, r2, r3, r4, false, &runtime); | 909 GenerateLookupNumberStringCache(masm, r1, r0, r2, r3, r4, false, &runtime); |
| 911 __ add(sp, sp, Operand(1 * kPointerSize)); | 910 __ add(sp, sp, Operand(1 * kPointerSize)); |
| 912 __ Ret(); | 911 __ Ret(); |
| 913 | 912 |
| 914 __ bind(&runtime); | 913 __ bind(&runtime); |
| 915 // Handle number to string in the runtime system if not found in the cache. | 914 // Handle number to string in the runtime system if not found in the cache. |
| 916 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); | 915 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); |
| 917 } | 916 } |
| 918 | 917 |
| 919 | 918 |
| 920 void RecordWriteStub::Generate(MacroAssembler* masm) { | |
| 921 __ add(offset_, object_, Operand(offset_)); | |
| 922 __ RecordWriteHelper(object_, offset_, scratch_); | |
| 923 __ Ret(); | |
| 924 } | |
| 925 | |
| 926 | |
| 927 // On entry lhs_ and rhs_ are the values to be compared. | 919 // On entry lhs_ and rhs_ are the values to be compared. |
| 928 // On exit r0 is 0, positive or negative to indicate the result of | 920 // On exit r0 is 0, positive or negative to indicate the result of |
| 929 // the comparison. | 921 // the comparison. |
| 930 void CompareStub::Generate(MacroAssembler* masm) { | 922 void CompareStub::Generate(MacroAssembler* masm) { |
| 931 ASSERT((lhs_.is(r0) && rhs_.is(r1)) || | 923 ASSERT((lhs_.is(r0) && rhs_.is(r1)) || |
| 932 (lhs_.is(r1) && rhs_.is(r0))); | 924 (lhs_.is(r1) && rhs_.is(r0))); |
| 933 | 925 |
| 934 Label slow; // Call builtin. | 926 Label slow; // Call builtin. |
| 935 Label not_smis, both_loaded_as_doubles, lhs_not_nan; | 927 Label not_smis, both_loaded_as_doubles, lhs_not_nan; |
| 936 | 928 |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 975 EmitSmiNonsmiComparison(masm, lhs_, rhs_, &lhs_not_nan, &slow, strict_); | 967 EmitSmiNonsmiComparison(masm, lhs_, rhs_, &lhs_not_nan, &slow, strict_); |
| 976 | 968 |
| 977 __ bind(&both_loaded_as_doubles); | 969 __ bind(&both_loaded_as_doubles); |
| 978 // The arguments have been converted to doubles and stored in d6 and d7, if | 970 // The arguments have been converted to doubles and stored in d6 and d7, if |
| 979 // VFP3 is supported, or in r0, r1, r2, and r3. | 971 // VFP3 is supported, or in r0, r1, r2, and r3. |
| 980 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { | 972 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 981 __ bind(&lhs_not_nan); | 973 __ bind(&lhs_not_nan); |
| 982 CpuFeatures::Scope scope(VFP3); | 974 CpuFeatures::Scope scope(VFP3); |
| 983 Label no_nan; | 975 Label no_nan; |
| 984 // ARMv7 VFP3 instructions to implement double precision comparison. | 976 // ARMv7 VFP3 instructions to implement double precision comparison. |
| 985 __ vcmp(d7, d6); | 977 __ VFPCompareAndSetFlags(d7, d6); |
| 986 __ vmrs(pc); // Move vector status bits to normal status bits. | |
| 987 Label nan; | 978 Label nan; |
| 988 __ b(vs, &nan); | 979 __ b(vs, &nan); |
| 989 __ mov(r0, Operand(EQUAL), LeaveCC, eq); | 980 __ mov(r0, Operand(EQUAL), LeaveCC, eq); |
| 990 __ mov(r0, Operand(LESS), LeaveCC, lt); | 981 __ mov(r0, Operand(LESS), LeaveCC, lt); |
| 991 __ mov(r0, Operand(GREATER), LeaveCC, gt); | 982 __ mov(r0, Operand(GREATER), LeaveCC, gt); |
| 992 __ Ret(); | 983 __ Ret(); |
| 993 | 984 |
| 994 __ bind(&nan); | 985 __ bind(&nan); |
| 995 // If one of the sides was a NaN then the v flag is set. Load r0 with | 986 // If one of the sides was a NaN then the v flag is set. Load r0 with |
| 996 // whatever it takes to make the comparison fail, since comparisons with NaN | 987 // whatever it takes to make the comparison fail, since comparisons with NaN |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1096 __ b(eq, &false_result); | 1087 __ b(eq, &false_result); |
| 1097 | 1088 |
| 1098 // HeapNumber => false iff +0, -0, or NaN. | 1089 // HeapNumber => false iff +0, -0, or NaN. |
| 1099 __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset)); | 1090 __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset)); |
| 1100 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 1091 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); |
| 1101 __ cmp(scratch, ip); | 1092 __ cmp(scratch, ip); |
| 1102 __ b(¬_heap_number, ne); | 1093 __ b(¬_heap_number, ne); |
| 1103 | 1094 |
| 1104 __ sub(ip, tos_, Operand(kHeapObjectTag)); | 1095 __ sub(ip, tos_, Operand(kHeapObjectTag)); |
| 1105 __ vldr(d1, ip, HeapNumber::kValueOffset); | 1096 __ vldr(d1, ip, HeapNumber::kValueOffset); |
| 1106 __ vcmp(d1, 0.0); | 1097 __ VFPCompareAndSetFlags(d1, 0.0); |
| 1107 __ vmrs(pc); | |
| 1108 // "tos_" is a register, and contains a non zero value by default. | 1098 // "tos_" is a register, and contains a non zero value by default. |
| 1109 // Hence we only need to overwrite "tos_" with zero to return false for | 1099 // Hence we only need to overwrite "tos_" with zero to return false for |
| 1110 // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. | 1100 // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. |
| 1111 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO | 1101 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO |
| 1112 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN | 1102 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN |
| 1113 __ Ret(); | 1103 __ Ret(); |
| 1114 | 1104 |
| 1115 __ bind(¬_heap_number); | 1105 __ bind(¬_heap_number); |
| 1116 | 1106 |
| 1117 // Check if the value is 'null'. | 1107 // Check if the value is 'null'. |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1224 // After this point we have the left hand side in r1 and the right hand side | 1214 // After this point we have the left hand side in r1 and the right hand side |
| 1225 // in r0. | 1215 // in r0. |
| 1226 if (lhs.is(r0)) { | 1216 if (lhs.is(r0)) { |
| 1227 __ Swap(r0, r1, ip); | 1217 __ Swap(r0, r1, ip); |
| 1228 } | 1218 } |
| 1229 | 1219 |
| 1230 // The type transition also calculates the answer. | 1220 // The type transition also calculates the answer. |
| 1231 bool generate_code_to_calculate_answer = true; | 1221 bool generate_code_to_calculate_answer = true; |
| 1232 | 1222 |
| 1233 if (ShouldGenerateFPCode()) { | 1223 if (ShouldGenerateFPCode()) { |
| 1224 // DIV has neither SmiSmi fast code nor specialized slow code. |
| 1225 // So don't try to patch a DIV Stub. |
| 1234 if (runtime_operands_type_ == BinaryOpIC::DEFAULT) { | 1226 if (runtime_operands_type_ == BinaryOpIC::DEFAULT) { |
| 1235 switch (op_) { | 1227 switch (op_) { |
| 1236 case Token::ADD: | 1228 case Token::ADD: |
| 1237 case Token::SUB: | 1229 case Token::SUB: |
| 1238 case Token::MUL: | 1230 case Token::MUL: |
| 1239 case Token::DIV: | |
| 1240 GenerateTypeTransition(masm); // Tail call. | 1231 GenerateTypeTransition(masm); // Tail call. |
| 1241 generate_code_to_calculate_answer = false; | 1232 generate_code_to_calculate_answer = false; |
| 1242 break; | 1233 break; |
| 1243 | 1234 |
| 1235 case Token::DIV: |
| 1236 // DIV has neither SmiSmi fast code nor specialized slow code. |
| 1237 // So don't try to patch a DIV Stub. |
| 1238 break; |
| 1239 |
| 1244 default: | 1240 default: |
| 1245 break; | 1241 break; |
| 1246 } | 1242 } |
| 1247 } | 1243 } |
| 1248 | 1244 |
| 1249 if (generate_code_to_calculate_answer) { | 1245 if (generate_code_to_calculate_answer) { |
| 1250 Label r0_is_smi, r1_is_smi, finished_loading_r0, finished_loading_r1; | 1246 Label r0_is_smi, r1_is_smi, finished_loading_r0, finished_loading_r1; |
| 1251 if (mode_ == NO_OVERWRITE) { | 1247 if (mode_ == NO_OVERWRITE) { |
| 1252 // In the case where there is no chance of an overwritable float we may | 1248 // In the case where there is no chance of an overwritable float we may |
| 1253 // as well do the allocation immediately while r0 and r1 are untouched. | 1249 // as well do the allocation immediately while r0 and r1 are untouched. |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1294 __ mov(r7, Operand(r0)); | 1290 __ mov(r7, Operand(r0)); |
| 1295 ConvertToDoubleStub stub3(r3, r2, r7, r4); | 1291 ConvertToDoubleStub stub3(r3, r2, r7, r4); |
| 1296 __ push(lr); | 1292 __ push(lr); |
| 1297 __ Call(stub3.GetCode(), RelocInfo::CODE_TARGET); | 1293 __ Call(stub3.GetCode(), RelocInfo::CODE_TARGET); |
| 1298 __ pop(lr); | 1294 __ pop(lr); |
| 1299 } | 1295 } |
| 1300 | 1296 |
| 1301 // HEAP_NUMBERS stub is slower than GENERIC on a pair of smis. | 1297 // HEAP_NUMBERS stub is slower than GENERIC on a pair of smis. |
| 1302 // r0 is known to be a smi. If r1 is also a smi then switch to GENERIC. | 1298 // r0 is known to be a smi. If r1 is also a smi then switch to GENERIC. |
| 1303 Label r1_is_not_smi; | 1299 Label r1_is_not_smi; |
| 1304 if (runtime_operands_type_ == BinaryOpIC::HEAP_NUMBERS) { | 1300 if ((runtime_operands_type_ == BinaryOpIC::HEAP_NUMBERS) && |
| 1301 HasSmiSmiFastPath()) { |
| 1305 __ tst(r1, Operand(kSmiTagMask)); | 1302 __ tst(r1, Operand(kSmiTagMask)); |
| 1306 __ b(ne, &r1_is_not_smi); | 1303 __ b(ne, &r1_is_not_smi); |
| 1307 GenerateTypeTransition(masm); // Tail call. | 1304 GenerateTypeTransition(masm); // Tail call. |
| 1308 } | 1305 } |
| 1309 | 1306 |
| 1310 __ bind(&finished_loading_r0); | 1307 __ bind(&finished_loading_r0); |
| 1311 | 1308 |
| 1312 // Move r1 to a double in r0-r1. | 1309 // Move r1 to a double in r0-r1. |
| 1313 __ tst(r1, Operand(kSmiTagMask)); | 1310 __ tst(r1, Operand(kSmiTagMask)); |
| 1314 __ b(eq, &r1_is_smi); // It's a Smi so don't check it's a heap number. | 1311 __ b(eq, &r1_is_smi); // It's a Smi so don't check it's a heap number. |
| (...skipping 1201 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2516 | 2513 |
| 2517 // Set the top handler address to next handler past the current ENTRY handler. | 2514 // Set the top handler address to next handler past the current ENTRY handler. |
| 2518 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); | 2515 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
| 2519 __ pop(r2); | 2516 __ pop(r2); |
| 2520 __ str(r2, MemOperand(r3)); | 2517 __ str(r2, MemOperand(r3)); |
| 2521 | 2518 |
| 2522 if (type == OUT_OF_MEMORY) { | 2519 if (type == OUT_OF_MEMORY) { |
| 2523 // Set external caught exception to false. | 2520 // Set external caught exception to false. |
| 2524 ExternalReference external_caught( | 2521 ExternalReference external_caught( |
| 2525 Isolate::k_external_caught_exception_address); | 2522 Isolate::k_external_caught_exception_address); |
| 2526 __ mov(r0, Operand(false)); | 2523 __ mov(r0, Operand(false, RelocInfo::NONE)); |
| 2527 __ mov(r2, Operand(external_caught)); | 2524 __ mov(r2, Operand(external_caught)); |
| 2528 __ str(r0, MemOperand(r2)); | 2525 __ str(r0, MemOperand(r2)); |
| 2529 | 2526 |
| 2530 // Set pending exception and r0 to out of memory exception. | 2527 // Set pending exception and r0 to out of memory exception. |
| 2531 Failure* out_of_memory = Failure::OutOfMemoryException(); | 2528 Failure* out_of_memory = Failure::OutOfMemoryException(); |
| 2532 __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory))); | 2529 __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory))); |
| 2533 __ mov(r2, Operand(ExternalReference( | 2530 __ mov(r2, Operand(ExternalReference( |
| 2534 Isolate::k_pending_exception_address))); | 2531 Isolate::k_pending_exception_address))); |
| 2535 __ str(r0, MemOperand(r2)); | 2532 __ str(r0, MemOperand(r2)); |
| 2536 } | 2533 } |
| (...skipping 358 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2895 #ifdef DEBUG | 2892 #ifdef DEBUG |
| 2896 if (FLAG_debug_code) { | 2893 if (FLAG_debug_code) { |
| 2897 __ mov(lr, Operand(pc)); | 2894 __ mov(lr, Operand(pc)); |
| 2898 } | 2895 } |
| 2899 #endif | 2896 #endif |
| 2900 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); | 2897 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); |
| 2901 } | 2898 } |
| 2902 | 2899 |
| 2903 | 2900 |
| 2904 // Uses registers r0 to r4. Expected input is | 2901 // Uses registers r0 to r4. Expected input is |
| 2905 // function in r0 (or at sp+1*ptrsz) and object in | 2902 // object in r0 (or at sp+1*kPointerSize) and function in |
| 2906 // r1 (or at sp), depending on whether or not | 2903 // r1 (or at sp), depending on whether or not |
| 2907 // args_in_registers() is true. | 2904 // args_in_registers() is true. |
| 2908 void InstanceofStub::Generate(MacroAssembler* masm) { | 2905 void InstanceofStub::Generate(MacroAssembler* masm) { |
| 2909 // Fixed register usage throughout the stub: | 2906 // Fixed register usage throughout the stub: |
| 2910 const Register object = r1; // Object (lhs). | 2907 const Register object = r0; // Object (lhs). |
| 2911 const Register map = r3; // Map of the object. | 2908 const Register map = r3; // Map of the object. |
| 2912 const Register function = r0; // Function (rhs). | 2909 const Register function = r1; // Function (rhs). |
| 2913 const Register prototype = r4; // Prototype of the function. | 2910 const Register prototype = r4; // Prototype of the function. |
| 2914 const Register scratch = r2; | 2911 const Register scratch = r2; |
| 2915 Label slow, loop, is_instance, is_not_instance, not_js_object; | 2912 Label slow, loop, is_instance, is_not_instance, not_js_object; |
| 2916 if (!args_in_registers()) { | 2913 if (!HasArgsInRegisters()) { |
| 2917 __ ldr(function, MemOperand(sp, 1 * kPointerSize)); | 2914 __ ldr(object, MemOperand(sp, 1 * kPointerSize)); |
| 2918 __ ldr(object, MemOperand(sp, 0)); | 2915 __ ldr(function, MemOperand(sp, 0)); |
| 2919 } | 2916 } |
| 2920 | 2917 |
| 2921 // Check that the left hand is a JS object and load map. | 2918 // Check that the left hand is a JS object and load map. |
| 2922 __ BranchOnSmi(object, &slow); | 2919 __ BranchOnSmi(object, ¬_js_object); |
| 2923 __ IsObjectJSObjectType(object, map, scratch, &slow); | 2920 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); |
| 2924 | 2921 |
| 2925 // Look up the function and the map in the instanceof cache. | 2922 // Look up the function and the map in the instanceof cache. |
| 2926 Label miss; | 2923 Label miss; |
| 2927 __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex); | 2924 __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex); |
| 2928 __ cmp(object, ip); | 2925 __ cmp(function, ip); |
| 2929 __ b(ne, &miss); | 2926 __ b(ne, &miss); |
| 2930 __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex); | 2927 __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex); |
| 2931 __ cmp(map, ip); | 2928 __ cmp(map, ip); |
| 2932 __ b(ne, &miss); | 2929 __ b(ne, &miss); |
| 2933 __ LoadRoot(function, Heap::kInstanceofCacheAnswerRootIndex); | 2930 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); |
| 2934 __ Ret(args_in_registers() ? 0 : 2); | 2931 __ Ret(HasArgsInRegisters() ? 0 : 2); |
| 2935 | 2932 |
| 2936 __ bind(&miss); | 2933 __ bind(&miss); |
| 2937 __ TryGetFunctionPrototype(object, prototype, scratch, &slow); | 2934 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); |
| 2938 | 2935 |
| 2939 // Check that the function prototype is a JS object. | 2936 // Check that the function prototype is a JS object. |
| 2940 __ BranchOnSmi(prototype, &slow); | 2937 __ BranchOnSmi(prototype, &slow); |
| 2941 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); | 2938 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); |
| 2942 | 2939 |
| 2943 __ StoreRoot(object, Heap::kInstanceofCacheFunctionRootIndex); | 2940 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); |
| 2944 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); | 2941 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); |
| 2945 | 2942 |
| 2946 // Register mapping: r3 is object map and r4 is function prototype. | 2943 // Register mapping: r3 is object map and r4 is function prototype. |
| 2947 // Get prototype of object into r2. | 2944 // Get prototype of object into r2. |
| 2948 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); | 2945 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); |
| 2949 | 2946 |
| 2950 // Loop through the prototype chain looking for the function prototype. | 2947 // Loop through the prototype chain looking for the function prototype. |
| 2951 __ bind(&loop); | 2948 __ bind(&loop); |
| 2952 __ cmp(scratch, Operand(prototype)); | 2949 __ cmp(scratch, Operand(prototype)); |
| 2953 __ b(eq, &is_instance); | 2950 __ b(eq, &is_instance); |
| 2954 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 2951 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 2955 __ cmp(scratch, ip); | 2952 __ cmp(scratch, ip); |
| 2956 __ b(eq, &is_not_instance); | 2953 __ b(eq, &is_not_instance); |
| 2957 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); | 2954 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 2958 __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset)); | 2955 __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset)); |
| 2959 __ jmp(&loop); | 2956 __ jmp(&loop); |
| 2960 | 2957 |
| 2961 __ bind(&is_instance); | 2958 __ bind(&is_instance); |
| 2962 __ mov(r0, Operand(Smi::FromInt(0))); | 2959 __ mov(r0, Operand(Smi::FromInt(0))); |
| 2963 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); | 2960 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); |
| 2964 __ Ret(args_in_registers() ? 0 : 2); | 2961 __ Ret(HasArgsInRegisters() ? 0 : 2); |
| 2965 | 2962 |
| 2966 __ bind(&is_not_instance); | 2963 __ bind(&is_not_instance); |
| 2967 __ mov(r0, Operand(Smi::FromInt(1))); | 2964 __ mov(r0, Operand(Smi::FromInt(1))); |
| 2968 __ Ret(args_in_registers() ? 0 : 2); | 2965 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); |
| 2966 __ Ret(HasArgsInRegisters() ? 0 : 2); |
| 2969 | 2967 |
| 2970 Label object_not_null, object_not_null_or_smi; | 2968 Label object_not_null, object_not_null_or_smi; |
| 2971 __ bind(¬_js_object); | 2969 __ bind(¬_js_object); |
| 2972 // Before null, smi and string value checks, check that the rhs is a function | 2970 // Before null, smi and string value checks, check that the rhs is a function |
| 2973 // as for a non-function rhs an exception needs to be thrown. | 2971 // as for a non-function rhs an exception needs to be thrown. |
| 2974 __ BranchOnSmi(function, &slow); | 2972 __ BranchOnSmi(function, &slow); |
| 2975 __ CompareObjectType(function, map, scratch, JS_FUNCTION_TYPE); | 2973 __ CompareObjectType(function, map, scratch, JS_FUNCTION_TYPE); |
| 2976 __ b(ne, &slow); | 2974 __ b(ne, &slow); |
| 2977 | 2975 |
| 2978 // Null is not instance of anything. | 2976 // Null is not instance of anything. |
| 2979 __ cmp(scratch, Operand(FACTORY->null_value())); | 2977 __ cmp(scratch, Operand(FACTORY->null_value())); |
| 2980 __ b(ne, &object_not_null); | 2978 __ b(ne, &object_not_null); |
| 2981 __ mov(r0, Operand(Smi::FromInt(1))); | 2979 __ mov(r0, Operand(Smi::FromInt(1))); |
| 2982 __ Ret(args_in_registers() ? 0 : 2); | 2980 __ Ret(HasArgsInRegisters() ? 0 : 2); |
| 2983 | 2981 |
| 2984 __ bind(&object_not_null); | 2982 __ bind(&object_not_null); |
| 2985 // Smi values are not instances of anything. | 2983 // Smi values are not instances of anything. |
| 2986 __ BranchOnNotSmi(object, &object_not_null_or_smi); | 2984 __ BranchOnNotSmi(object, &object_not_null_or_smi); |
| 2987 __ mov(r0, Operand(Smi::FromInt(1))); | 2985 __ mov(r0, Operand(Smi::FromInt(1))); |
| 2988 __ Ret(args_in_registers() ? 0 : 2); | 2986 __ Ret(HasArgsInRegisters() ? 0 : 2); |
| 2989 | 2987 |
| 2990 __ bind(&object_not_null_or_smi); | 2988 __ bind(&object_not_null_or_smi); |
| 2991 // String values are not instances of anything. | 2989 // String values are not instances of anything. |
| 2992 __ IsObjectJSStringType(object, scratch, &slow); | 2990 __ IsObjectJSStringType(object, scratch, &slow); |
| 2993 __ mov(r0, Operand(Smi::FromInt(1))); | 2991 __ mov(r0, Operand(Smi::FromInt(1))); |
| 2994 __ Ret(args_in_registers() ? 0 : 2); | 2992 __ Ret(HasArgsInRegisters() ? 0 : 2); |
| 2995 | 2993 |
| 2996 // Slow-case. Tail call builtin. | 2994 // Slow-case. Tail call builtin. |
| 2997 __ bind(&slow); | 2995 __ bind(&slow); |
| 2996 if (HasArgsInRegisters()) { |
| 2997 __ Push(r0, r1); |
| 2998 } |
| 2998 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS); | 2999 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS); |
| 2999 } | 3000 } |
| 3000 | 3001 |
| 3001 | 3002 |
| 3002 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { | 3003 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { |
| 3003 // The displacement is the offset of the last parameter (if any) | 3004 // The displacement is the offset of the last parameter (if any) |
| 3004 // relative to the frame pointer. | 3005 // relative to the frame pointer. |
| 3005 static const int kDisplacement = | 3006 static const int kDisplacement = |
| 3006 StandardFrameConstants::kCallerSPOffset - kPointerSize; | 3007 StandardFrameConstants::kCallerSPOffset - kPointerSize; |
| 3007 | 3008 |
| 3008 // Check that the key is a smi. | 3009 // Check that the key is a smi. |
| 3009 Label slow; | 3010 Label slow; |
| 3010 __ BranchOnNotSmi(r1, &slow); | 3011 __ BranchOnNotSmi(r1, &slow); |
| 3011 | 3012 |
| 3012 // Check if the calling frame is an arguments adaptor frame. | 3013 // Check if the calling frame is an arguments adaptor frame. |
| 3013 Label adaptor; | 3014 Label adaptor; |
| 3014 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 3015 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 3015 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); | 3016 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); |
| 3016 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 3017 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 3017 __ b(eq, &adaptor); | 3018 __ b(eq, &adaptor); |
| 3018 | 3019 |
| 3019 // Check index against formal parameters count limit passed in | 3020 // Check index against formal parameters count limit passed in |
| 3020 // through register r0. Use unsigned comparison to get negative | 3021 // through register r0. Use unsigned comparison to get negative |
| 3021 // check for free. | 3022 // check for free. |
| 3022 __ cmp(r1, r0); | 3023 __ cmp(r1, r0); |
| 3023 __ b(cs, &slow); | 3024 __ b(hs, &slow); |
| 3024 | 3025 |
| 3025 // Read the argument from the stack and return it. | 3026 // Read the argument from the stack and return it. |
| 3026 __ sub(r3, r0, r1); | 3027 __ sub(r3, r0, r1); |
| 3027 __ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize)); | 3028 __ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 3028 __ ldr(r0, MemOperand(r3, kDisplacement)); | 3029 __ ldr(r0, MemOperand(r3, kDisplacement)); |
| 3029 __ Jump(lr); | 3030 __ Jump(lr); |
| 3030 | 3031 |
| 3031 // Arguments adaptor case: Check index against actual arguments | 3032 // Arguments adaptor case: Check index against actual arguments |
| 3032 // limit found in the arguments adaptor frame. Use unsigned | 3033 // limit found in the arguments adaptor frame. Use unsigned |
| 3033 // comparison to get negative check for free. | 3034 // comparison to get negative check for free. |
| (...skipping 1880 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4914 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { | 4915 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { |
| 4915 CpuFeatures::Scope scope(VFP3); | 4916 CpuFeatures::Scope scope(VFP3); |
| 4916 | 4917 |
| 4917 // Load left and right operand | 4918 // Load left and right operand |
| 4918 __ sub(r2, r1, Operand(kHeapObjectTag)); | 4919 __ sub(r2, r1, Operand(kHeapObjectTag)); |
| 4919 __ vldr(d0, r2, HeapNumber::kValueOffset); | 4920 __ vldr(d0, r2, HeapNumber::kValueOffset); |
| 4920 __ sub(r2, r0, Operand(kHeapObjectTag)); | 4921 __ sub(r2, r0, Operand(kHeapObjectTag)); |
| 4921 __ vldr(d1, r2, HeapNumber::kValueOffset); | 4922 __ vldr(d1, r2, HeapNumber::kValueOffset); |
| 4922 | 4923 |
| 4923 // Compare operands | 4924 // Compare operands |
| 4924 __ vcmp(d0, d1); | 4925 __ VFPCompareAndSetFlags(d0, d1); |
| 4925 __ vmrs(pc); // Move vector status bits to normal status bits. | |
| 4926 | 4926 |
| 4927 // Don't base result on status bits when a NaN is involved. | 4927 // Don't base result on status bits when a NaN is involved. |
| 4928 __ b(vs, &unordered); | 4928 __ b(vs, &unordered); |
| 4929 | 4929 |
| 4930 // Return a result of -1, 0, or 1, based on status bits. | 4930 // Return a result of -1, 0, or 1, based on status bits. |
| 4931 __ mov(r0, Operand(EQUAL), LeaveCC, eq); | 4931 __ mov(r0, Operand(EQUAL), LeaveCC, eq); |
| 4932 __ mov(r0, Operand(LESS), LeaveCC, lt); | 4932 __ mov(r0, Operand(LESS), LeaveCC, lt); |
| 4933 __ mov(r0, Operand(GREATER), LeaveCC, gt); | 4933 __ mov(r0, Operand(GREATER), LeaveCC, gt); |
| 4934 __ Ret(); | 4934 __ Ret(); |
| 4935 | 4935 |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4986 __ pop(r1); | 4986 __ pop(r1); |
| 4987 __ Jump(r2); | 4987 __ Jump(r2); |
| 4988 } | 4988 } |
| 4989 | 4989 |
| 4990 | 4990 |
| 4991 #undef __ | 4991 #undef __ |
| 4992 | 4992 |
| 4993 } } // namespace v8::internal | 4993 } } // namespace v8::internal |
| 4994 | 4994 |
| 4995 #endif // V8_TARGET_ARCH_ARM | 4995 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |