| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 873 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 884 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { | 884 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { |
| 885 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | 885 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); |
| 886 } | 886 } |
| 887 | 887 |
| 888 | 888 |
| 889 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | 889 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
| 890 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 890 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
| 891 } | 891 } |
| 892 | 892 |
| 893 | 893 |
| 894 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { | |
| 895 // For now, we are relying on the fact that Runtime::NotifyOSR | |
| 896 // doesn't do any garbage collection which allows us to save/restore | |
| 897 // the registers without worrying about which of them contain | |
| 898 // pointers. This seems a bit fragile. | |
| 899 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); | |
| 900 { | |
| 901 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 902 __ CallRuntime(Runtime::kNotifyOSR, 0); | |
| 903 } | |
| 904 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); | |
| 905 __ Ret(); | |
| 906 } | |
| 907 | |
| 908 | |
| 909 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | 894 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
| 910 // Lookup the function in the JavaScript frame. | 895 // Lookup the function in the JavaScript frame. |
| 911 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 896 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 912 { | 897 { |
| 913 FrameScope scope(masm, StackFrame::INTERNAL); | 898 FrameScope scope(masm, StackFrame::INTERNAL); |
| 914 // Lookup and calculate pc offset. | 899 // Lookup and calculate pc offset. |
| 915 __ ldr(r1, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); | 900 __ ldr(r1, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); |
| 916 __ ldr(r2, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); | 901 __ ldr(r2, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); |
| 917 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset)); | 902 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset)); |
| 918 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); | 903 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 945 // Compute the target address = code_obj + header_size + osr_offset | 930 // Compute the target address = code_obj + header_size + osr_offset |
| 946 // <entry_addr> = <code_obj> + #header_size + <osr_offset> | 931 // <entry_addr> = <code_obj> + #header_size + <osr_offset> |
| 947 __ add(r0, r0, Operand::SmiUntag(r1)); | 932 __ add(r0, r0, Operand::SmiUntag(r1)); |
| 948 __ add(lr, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 933 __ add(lr, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 949 | 934 |
| 950 // And "return" to the OSR entry point of the function. | 935 // And "return" to the OSR entry point of the function. |
| 951 __ Ret(); | 936 __ Ret(); |
| 952 } | 937 } |
| 953 | 938 |
| 954 | 939 |
| 940 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
| 941 // We check the stack limit as indicator that recompilation might be done. |
| 942 Label ok; |
| 943 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 944 __ cmp(sp, Operand(ip)); |
| 945 __ b(hs, &ok); |
| 946 { |
| 947 FrameScope scope(masm, StackFrame::INTERNAL); |
| 948 __ CallRuntime(Runtime::kStackGuard, 0); |
| 949 } |
| 950 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
| 951 RelocInfo::CODE_TARGET); |
| 952 |
| 953 __ bind(&ok); |
| 954 __ Ret(); |
| 955 } |
| 956 |
| 957 |
| 955 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | 958 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
| 956 // 1. Make sure we have at least one argument. | 959 // 1. Make sure we have at least one argument. |
| 957 // r0: actual number of arguments | 960 // r0: actual number of arguments |
| 958 { Label done; | 961 { Label done; |
| 959 __ cmp(r0, Operand::Zero()); | 962 __ cmp(r0, Operand::Zero()); |
| 960 __ b(ne, &done); | 963 __ b(ne, &done); |
| 961 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 964 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 962 __ push(r2); | 965 __ push(r2); |
| 963 __ add(r0, r0, Operand(1)); | 966 __ add(r0, r0, Operand(1)); |
| 964 __ bind(&done); | 967 __ bind(&done); |
| (...skipping 468 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1433 __ bind(&dont_adapt_arguments); | 1436 __ bind(&dont_adapt_arguments); |
| 1434 __ Jump(r3); | 1437 __ Jump(r3); |
| 1435 } | 1438 } |
| 1436 | 1439 |
| 1437 | 1440 |
| 1438 #undef __ | 1441 #undef __ |
| 1439 | 1442 |
| 1440 } } // namespace v8::internal | 1443 } } // namespace v8::internal |
| 1441 | 1444 |
| 1442 #endif // V8_TARGET_ARCH_ARM | 1445 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |