| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 445 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 456 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 456 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 457 __ Push(r2, r1, r3); // first argument, constructor, new target | 457 __ Push(r2, r1, r3); // first argument, constructor, new target |
| 458 __ CallRuntime(Runtime::kNewObject); | 458 __ CallRuntime(Runtime::kNewObject); |
| 459 __ Pop(r2); | 459 __ Pop(r2); |
| 460 } | 460 } |
| 461 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset)); | 461 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset)); |
| 462 __ Ret(); | 462 __ Ret(); |
| 463 } | 463 } |
| 464 | 464 |
| 465 | 465 |
| 466 static void CallRuntimePassFunction( | |
| 467 MacroAssembler* masm, Runtime::FunctionId function_id) { | |
| 468 // ----------- S t a t e ------------- | |
| 469 // -- r1 : target function (preserved for callee) | |
| 470 // -- r3 : new target (preserved for callee) | |
| 471 // ----------------------------------- | |
| 472 | |
| 473 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
| 474 // Push a copy of the target function and the new target. | |
| 475 __ push(r1); | |
| 476 __ push(r3); | |
| 477 // Push function as parameter to the runtime call. | |
| 478 __ Push(r1); | |
| 479 | |
| 480 __ CallRuntime(function_id, 1); | |
| 481 // Restore target function and new target. | |
| 482 __ pop(r3); | |
| 483 __ pop(r1); | |
| 484 } | |
| 485 | |
| 486 | |
| 487 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | 466 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
| 488 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 467 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 489 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset)); | 468 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset)); |
| 490 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); | 469 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 491 __ Jump(r2); | 470 __ Jump(r2); |
| 492 } | 471 } |
| 493 | 472 |
| 473 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, |
| 474 Runtime::FunctionId function_id) { |
| 475 // ----------- S t a t e ------------- |
| 476 // -- r0 : argument count (preserved for callee) |
| 477 // -- r1 : target function (preserved for callee) |
| 478 // -- r3 : new target (preserved for callee) |
| 479 // ----------------------------------- |
| 480 { |
| 481 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 482 // Push the number of arguments to the callee. |
| 483 __ SmiTag(r0); |
| 484 __ push(r0); |
| 485 // Push a copy of the target function and the new target. |
| 486 __ push(r1); |
| 487 __ push(r3); |
| 488 // Push function as parameter to the runtime call. |
| 489 __ Push(r1); |
| 490 |
| 491 __ CallRuntime(function_id, 1); |
| 492 __ mov(r2, r0); |
| 493 |
| 494 // Restore target function and new target. |
| 495 __ pop(r3); |
| 496 __ pop(r1); |
| 497 __ pop(r0); |
| 498 __ SmiUntag(r0, r0); |
| 499 } |
| 500 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 501 __ Jump(r2); |
| 502 } |
| 503 |
| 494 | 504 |
| 495 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { | |
| 496 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 497 __ Jump(r0); | |
| 498 } | |
| 499 | |
| 500 | |
| 501 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 505 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
| 502 // Checking whether the queued function is ready for install is optional, | 506 // Checking whether the queued function is ready for install is optional, |
| 503 // since we come across interrupts and stack checks elsewhere. However, | 507 // since we come across interrupts and stack checks elsewhere. However, |
| 504 // not checking may delay installing ready functions, and always checking | 508 // not checking may delay installing ready functions, and always checking |
| 505 // would be quite expensive. A good compromise is to first check against | 509 // would be quite expensive. A good compromise is to first check against |
| 506 // stack limit as a cue for an interrupt signal. | 510 // stack limit as a cue for an interrupt signal. |
| 507 Label ok; | 511 Label ok; |
| 508 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 512 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 509 __ cmp(sp, Operand(ip)); | 513 __ cmp(sp, Operand(ip)); |
| 510 __ b(hs, &ok); | 514 __ b(hs, &ok); |
| 511 | 515 |
| 512 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); | 516 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); |
| 513 GenerateTailCallToReturnedCode(masm); | |
| 514 | 517 |
| 515 __ bind(&ok); | 518 __ bind(&ok); |
| 516 GenerateTailCallToSharedCode(masm); | 519 GenerateTailCallToSharedCode(masm); |
| 517 } | 520 } |
| 518 | 521 |
| 519 | 522 |
| 520 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 523 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
| 521 bool is_api_function, | 524 bool is_api_function, |
| 522 bool create_implicit_receiver, | 525 bool create_implicit_receiver, |
| 523 bool check_derived_construct) { | 526 bool check_derived_construct) { |
| (...skipping 714 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1238 // This simulates the initial call to bytecode handlers in interpreter entry | 1241 // This simulates the initial call to bytecode handlers in interpreter entry |
| 1239 // trampoline. The return will never actually be taken, but our stack walker | 1242 // trampoline. The return will never actually be taken, but our stack walker |
| 1240 // uses this address to determine whether a frame is interpreted. | 1243 // uses this address to determine whether a frame is interpreted. |
| 1241 __ Move(lr, masm->isolate()->builtins()->InterpreterEntryTrampoline()); | 1244 __ Move(lr, masm->isolate()->builtins()->InterpreterEntryTrampoline()); |
| 1242 | 1245 |
| 1243 Generate_EnterBytecodeDispatch(masm); | 1246 Generate_EnterBytecodeDispatch(masm); |
| 1244 } | 1247 } |
| 1245 | 1248 |
| 1246 | 1249 |
| 1247 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | 1250 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
| 1248 CallRuntimePassFunction(masm, Runtime::kCompileLazy); | 1251 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); |
| 1249 GenerateTailCallToReturnedCode(masm); | |
| 1250 } | 1252 } |
| 1251 | 1253 |
| 1252 | 1254 |
| 1253 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 1255 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
| 1254 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent); | 1256 GenerateTailCallToReturnedCode(masm, |
| 1255 GenerateTailCallToReturnedCode(masm); | 1257 Runtime::kCompileOptimized_NotConcurrent); |
| 1256 } | 1258 } |
| 1257 | 1259 |
| 1258 | 1260 |
| 1259 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | 1261 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
| 1260 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent); | 1262 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); |
| 1261 GenerateTailCallToReturnedCode(masm); | |
| 1262 } | 1263 } |
| 1263 | 1264 |
| 1264 | 1265 |
| 1265 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | 1266 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
| 1266 // For now, we are relying on the fact that make_code_young doesn't do any | 1267 // For now, we are relying on the fact that make_code_young doesn't do any |
| 1267 // garbage collection which allows us to save/restore the registers without | 1268 // garbage collection which allows us to save/restore the registers without |
| 1268 // worrying about which of them contain pointers. We also don't build an | 1269 // worrying about which of them contain pointers. We also don't build an |
| 1269 // internal frame to make the code faster, since we shouldn't have to do stack | 1270 // internal frame to make the code faster, since we shouldn't have to do stack |
| 1270 // crawls in MakeCodeYoung. This seems a bit fragile. | 1271 // crawls in MakeCodeYoung. This seems a bit fragile. |
| 1271 | 1272 |
| (...skipping 1410 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2682 } | 2683 } |
| 2683 } | 2684 } |
| 2684 | 2685 |
| 2685 | 2686 |
| 2686 #undef __ | 2687 #undef __ |
| 2687 | 2688 |
| 2688 } // namespace internal | 2689 } // namespace internal |
| 2689 } // namespace v8 | 2690 } // namespace v8 |
| 2690 | 2691 |
| 2691 #endif // V8_TARGET_ARCH_ARM | 2692 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |