| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 310 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | 310 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
| 311 // Checking whether the queued function is ready for install is optional, | 311 // Checking whether the queued function is ready for install is optional, |
| 312 // since we come across interrupts and stack checks elsewhere. However, not | 312 // since we come across interrupts and stack checks elsewhere. However, not |
| 313 // checking may delay installing ready functions, and always checking would be | 313 // checking may delay installing ready functions, and always checking would be |
| 314 // quite expensive. A good compromise is to first check against stack limit as | 314 // quite expensive. A good compromise is to first check against stack limit as |
| 315 // a cue for an interrupt signal. | 315 // a cue for an interrupt signal. |
| 316 Label ok; | 316 Label ok; |
| 317 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex); | 317 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex); |
| 318 __ B(hs, &ok); | 318 __ B(hs, &ok); |
| 319 | 319 |
| 320 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); | 320 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode); |
| 321 GenerateTailCallToReturnedCode(masm); | 321 GenerateTailCallToReturnedCode(masm); |
| 322 | 322 |
| 323 __ Bind(&ok); | 323 __ Bind(&ok); |
| 324 GenerateTailCallToSharedCode(masm); | 324 GenerateTailCallToSharedCode(masm); |
| 325 } | 325 } |
| 326 | 326 |
| 327 | 327 |
| 328 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 328 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
| 329 bool is_api_function, | 329 bool is_api_function, |
| 330 bool count_constructions, | 330 bool count_constructions, |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 400 FieldMemOperand(x3, SharedFunctionInfo::kConstructionCountOffset); | 400 FieldMemOperand(x3, SharedFunctionInfo::kConstructionCountOffset); |
| 401 __ Ldrb(x4, constructor_count); | 401 __ Ldrb(x4, constructor_count); |
| 402 __ Subs(x4, x4, 1); | 402 __ Subs(x4, x4, 1); |
| 403 __ Strb(x4, constructor_count); | 403 __ Strb(x4, constructor_count); |
| 404 __ B(ne, &allocate); | 404 __ B(ne, &allocate); |
| 405 | 405 |
| 406 // Push the constructor and map to the stack, and the constructor again | 406 // Push the constructor and map to the stack, and the constructor again |
| 407 // as argument to the runtime call. | 407 // as argument to the runtime call. |
| 408 __ Push(constructor, init_map, constructor); | 408 __ Push(constructor, init_map, constructor); |
| 409 // The call will replace the stub, so the countdown is only done once. | 409 // The call will replace the stub, so the countdown is only done once. |
| 410 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); | 410 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1); |
| 411 __ Pop(init_map, constructor); | 411 __ Pop(init_map, constructor); |
| 412 __ Bind(&allocate); | 412 __ Bind(&allocate); |
| 413 } | 413 } |
| 414 | 414 |
| 415 // Now allocate the JSObject on the heap. | 415 // Now allocate the JSObject on the heap. |
| 416 Register obj_size = x3; | 416 Register obj_size = x3; |
| 417 Register new_obj = x4; | 417 Register new_obj = x4; |
| 418 __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset)); | 418 __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset)); |
| 419 if (create_memento) { | 419 if (create_memento) { |
| 420 __ Add(x7, obj_size, | 420 __ Add(x7, obj_size, |
| (...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 559 } | 559 } |
| 560 | 560 |
| 561 // Allocate the new receiver object using the runtime call. | 561 // Allocate the new receiver object using the runtime call. |
| 562 __ Bind(&rt_call); | 562 __ Bind(&rt_call); |
| 563 Label count_incremented; | 563 Label count_incremented; |
| 564 if (create_memento) { | 564 if (create_memento) { |
| 565 // Get the cell or allocation site. | 565 // Get the cell or allocation site. |
| 566 __ Peek(x4, 2 * kXRegSize); | 566 __ Peek(x4, 2 * kXRegSize); |
| 567 __ Push(x4); | 567 __ Push(x4); |
| 568 __ Push(constructor); // Argument for Runtime_NewObject. | 568 __ Push(constructor); // Argument for Runtime_NewObject. |
| 569 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); | 569 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2); |
| 570 __ Mov(x4, x0); | 570 __ Mov(x4, x0); |
| 571 // If we ended up using the runtime, and we want a memento, then the | 571 // If we ended up using the runtime, and we want a memento, then the |
| 572 // runtime call made it for us, and we shouldn't do create count | 572 // runtime call made it for us, and we shouldn't do create count |
| 573 // increment. | 573 // increment. |
| 574 __ jmp(&count_incremented); | 574 __ jmp(&count_incremented); |
| 575 } else { | 575 } else { |
| 576 __ Push(constructor); // Argument for Runtime_NewObject. | 576 __ Push(constructor); // Argument for Runtime_NewObject. |
| 577 __ CallRuntime(Runtime::kNewObject, 1); | 577 __ CallRuntime(Runtime::kHiddenNewObject, 1); |
| 578 __ Mov(x4, x0); | 578 __ Mov(x4, x0); |
| 579 } | 579 } |
| 580 | 580 |
| 581 // Receiver for constructor call allocated. | 581 // Receiver for constructor call allocated. |
| 582 // x4: JSObject | 582 // x4: JSObject |
| 583 __ Bind(&allocated); | 583 __ Bind(&allocated); |
| 584 | 584 |
| 585 if (create_memento) { | 585 if (create_memento) { |
| 586 __ Peek(x10, 2 * kXRegSize); | 586 __ Peek(x10, 2 * kXRegSize); |
| 587 __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented); | 587 __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented); |
| (...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 804 Generate_JSEntryTrampolineHelper(masm, false); | 804 Generate_JSEntryTrampolineHelper(masm, false); |
| 805 } | 805 } |
| 806 | 806 |
| 807 | 807 |
| 808 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 808 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
| 809 Generate_JSEntryTrampolineHelper(masm, true); | 809 Generate_JSEntryTrampolineHelper(masm, true); |
| 810 } | 810 } |
| 811 | 811 |
| 812 | 812 |
| 813 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { | 813 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { |
| 814 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); | 814 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized); |
| 815 GenerateTailCallToReturnedCode(masm); | 815 GenerateTailCallToReturnedCode(masm); |
| 816 } | 816 } |
| 817 | 817 |
| 818 | 818 |
| 819 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { | 819 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { |
| 820 FrameScope scope(masm, StackFrame::INTERNAL); | 820 FrameScope scope(masm, StackFrame::INTERNAL); |
| 821 Register function = x1; | 821 Register function = x1; |
| 822 | 822 |
| 823 // Preserve function. At the same time, push arguments for | 823 // Preserve function. At the same time, push arguments for |
| 824 // kCompileOptimized. | 824 // kHiddenCompileOptimized. |
| 825 __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent)); | 825 __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent)); |
| 826 __ Push(function, function, x10); | 826 __ Push(function, function, x10); |
| 827 | 827 |
| 828 __ CallRuntime(Runtime::kCompileOptimized, 2); | 828 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2); |
| 829 | 829 |
| 830 // Restore receiver. | 830 // Restore receiver. |
| 831 __ Pop(function); | 831 __ Pop(function); |
| 832 } | 832 } |
| 833 | 833 |
| 834 | 834 |
| 835 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 835 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
| 836 CallCompileOptimized(masm, false); | 836 CallCompileOptimized(masm, false); |
| 837 GenerateTailCallToReturnedCode(masm); | 837 GenerateTailCallToReturnedCode(masm); |
| 838 } | 838 } |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 928 FrameScope scope(masm, StackFrame::INTERNAL); | 928 FrameScope scope(masm, StackFrame::INTERNAL); |
| 929 | 929 |
| 930 // Preserve registers across notification, this is important for compiled | 930 // Preserve registers across notification, this is important for compiled |
| 931 // stubs that tail call the runtime on deopts passing their parameters in | 931 // stubs that tail call the runtime on deopts passing their parameters in |
| 932 // registers. | 932 // registers. |
| 933 // TODO(jbramley): Is it correct (and appropriate) to use safepoint | 933 // TODO(jbramley): Is it correct (and appropriate) to use safepoint |
| 934 // registers here? According to the comment above, we should only need to | 934 // registers here? According to the comment above, we should only need to |
| 935 // preserve the registers with parameters. | 935 // preserve the registers with parameters. |
| 936 __ PushXRegList(kSafepointSavedRegisters); | 936 __ PushXRegList(kSafepointSavedRegisters); |
| 937 // Pass the function and deoptimization type to the runtime system. | 937 // Pass the function and deoptimization type to the runtime system. |
| 938 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); | 938 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles); |
| 939 __ PopXRegList(kSafepointSavedRegisters); | 939 __ PopXRegList(kSafepointSavedRegisters); |
| 940 } | 940 } |
| 941 | 941 |
| 942 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate). | 942 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate). |
| 943 __ Drop(1); | 943 __ Drop(1); |
| 944 | 944 |
| 945 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this | 945 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this |
| 946 // into lr before it jumps here. | 946 // into lr before it jumps here. |
| 947 __ Br(lr); | 947 __ Br(lr); |
| 948 } | 948 } |
| 949 | 949 |
| 950 | 950 |
| 951 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | 951 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
| 952 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | 952 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
| 953 } | 953 } |
| 954 | 954 |
| 955 | 955 |
| 956 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | 956 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
| 957 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | 957 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
| 958 } | 958 } |
| 959 | 959 |
| 960 | 960 |
| 961 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 961 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
| 962 Deoptimizer::BailoutType type) { | 962 Deoptimizer::BailoutType type) { |
| 963 { | 963 { |
| 964 FrameScope scope(masm, StackFrame::INTERNAL); | 964 FrameScope scope(masm, StackFrame::INTERNAL); |
| 965 // Pass the deoptimization type to the runtime system. | 965 // Pass the deoptimization type to the runtime system. |
| 966 __ Mov(x0, Smi::FromInt(static_cast<int>(type))); | 966 __ Mov(x0, Smi::FromInt(static_cast<int>(type))); |
| 967 __ Push(x0); | 967 __ Push(x0); |
| 968 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 968 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1); |
| 969 } | 969 } |
| 970 | 970 |
| 971 // Get the full codegen state from the stack and untag it. | 971 // Get the full codegen state from the stack and untag it. |
| 972 Register state = x6; | 972 Register state = x6; |
| 973 __ Peek(state, 0); | 973 __ Peek(state, 0); |
| 974 __ SmiUntag(state); | 974 __ SmiUntag(state); |
| 975 | 975 |
| 976 // Switch on the state. | 976 // Switch on the state. |
| 977 Label with_tos_register, unknown_state; | 977 Label with_tos_register, unknown_state; |
| 978 __ CompareAndBranch( | 978 __ CompareAndBranch( |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1043 } | 1043 } |
| 1044 | 1044 |
| 1045 | 1045 |
| 1046 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | 1046 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
| 1047 // We check the stack limit as indicator that recompilation might be done. | 1047 // We check the stack limit as indicator that recompilation might be done. |
| 1048 Label ok; | 1048 Label ok; |
| 1049 __ CompareRoot(jssp, Heap::kStackLimitRootIndex); | 1049 __ CompareRoot(jssp, Heap::kStackLimitRootIndex); |
| 1050 __ B(hs, &ok); | 1050 __ B(hs, &ok); |
| 1051 { | 1051 { |
| 1052 FrameScope scope(masm, StackFrame::INTERNAL); | 1052 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1053 __ CallRuntime(Runtime::kStackGuard, 0); | 1053 __ CallRuntime(Runtime::kHiddenStackGuard, 0); |
| 1054 } | 1054 } |
| 1055 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | 1055 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
| 1056 RelocInfo::CODE_TARGET); | 1056 RelocInfo::CODE_TARGET); |
| 1057 | 1057 |
| 1058 __ Bind(&ok); | 1058 __ Bind(&ok); |
| 1059 __ Ret(); | 1059 __ Ret(); |
| 1060 } | 1060 } |
| 1061 | 1061 |
| 1062 | 1062 |
| 1063 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | 1063 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
| (...skipping 489 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1553 __ Bind(&dont_adapt_arguments); | 1553 __ Bind(&dont_adapt_arguments); |
| 1554 __ Jump(code_entry); | 1554 __ Jump(code_entry); |
| 1555 } | 1555 } |
| 1556 | 1556 |
| 1557 | 1557 |
| 1558 #undef __ | 1558 #undef __ |
| 1559 | 1559 |
| 1560 } } // namespace v8::internal | 1560 } } // namespace v8::internal |
| 1561 | 1561 |
| 1562 #endif // V8_TARGET_ARCH_ARM | 1562 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |