Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(481)

Side by Side Diff: src/arm64/builtins-arm64.cc

Issue 1553703002: [runtime] TailCallRuntime and CallRuntime should use default argument counts (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@2015-12-29_TailCallRuntime_default_result_size_1_1550923002
Patch Set: Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM64 5 #if V8_TARGET_ARCH_ARM64
6 6
7 #include "src/arm64/frames-arm64.h" 7 #include "src/arm64/frames-arm64.h"
8 #include "src/codegen.h" 8 #include "src/codegen.h"
9 #include "src/debug/debug.h" 9 #include "src/debug/debug.h"
10 #include "src/deoptimizer.h" 10 #include "src/deoptimizer.h"
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after
182 __ Bind(&to_string); 182 __ Bind(&to_string);
183 { 183 {
184 ToStringStub stub(masm->isolate()); 184 ToStringStub stub(masm->isolate());
185 __ TailCallStub(&stub); 185 __ TailCallStub(&stub);
186 } 186 }
187 187
188 // 3b. Convert symbol in x0 to a string. 188 // 3b. Convert symbol in x0 to a string.
189 __ Bind(&symbol_descriptive_string); 189 __ Bind(&symbol_descriptive_string);
190 { 190 {
191 __ Push(x0); 191 __ Push(x0);
192 __ TailCallRuntime(Runtime::kSymbolDescriptiveString, 1); 192 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
193 } 193 }
194 } 194 }
195 195
196 196
197 // static 197 // static
198 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { 198 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
199 // ----------- S t a t e ------------- 199 // ----------- S t a t e -------------
200 // -- x0 : number of arguments 200 // -- x0 : number of arguments
201 // -- x1 : constructor function 201 // -- x1 : constructor function
202 // -- x3 : new target 202 // -- x3 : new target
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
263 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset)); 263 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
264 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize); 264 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
265 __ Ret(); 265 __ Ret();
266 } 266 }
267 267
268 // 5. Fallback to the runtime to create new object. 268 // 5. Fallback to the runtime to create new object.
269 __ bind(&new_object); 269 __ bind(&new_object);
270 { 270 {
271 FrameScope scope(masm, StackFrame::INTERNAL); 271 FrameScope scope(masm, StackFrame::INTERNAL);
272 __ Push(x2, x1, x3); // first argument, constructor, new target 272 __ Push(x2, x1, x3); // first argument, constructor, new target
273 __ CallRuntime(Runtime::kNewObject, 2); 273 __ CallRuntime(Runtime::kNewObject);
274 __ Pop(x2); 274 __ Pop(x2);
275 } 275 }
276 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset)); 276 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
277 __ Ret(); 277 __ Ret();
278 } 278 }
279 279
280 280
281 static void CallRuntimePassFunction(MacroAssembler* masm, 281 static void CallRuntimePassFunction(MacroAssembler* masm,
282 Runtime::FunctionId function_id) { 282 Runtime::FunctionId function_id) {
283 // ----------- S t a t e ------------- 283 // ----------- S t a t e -------------
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after
464 // Fill the remaining fields with one pointer filler map. 464 // Fill the remaining fields with one pointer filler map.
465 __ LoadRoot(filler, Heap::kOnePointerFillerMapRootIndex); 465 __ LoadRoot(filler, Heap::kOnePointerFillerMapRootIndex);
466 __ InitializeFieldsWithFiller(write_address, next_obj, filler); 466 __ InitializeFieldsWithFiller(write_address, next_obj, filler);
467 467
468 __ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd)); 468 __ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
469 __ B(ne, &allocated); 469 __ B(ne, &allocated);
470 470
471 // Push the constructor, new_target and the object to the stack, 471 // Push the constructor, new_target and the object to the stack,
472 // and then the initial map as an argument to the runtime call. 472 // and then the initial map as an argument to the runtime call.
473 __ Push(constructor, new_target, new_obj, init_map); 473 __ Push(constructor, new_target, new_obj, init_map);
474 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); 474 __ CallRuntime(Runtime::kFinalizeInstanceSize);
475 __ Pop(new_obj, new_target, constructor); 475 __ Pop(new_obj, new_target, constructor);
476 476
477 // Continue with JSObject being successfully allocated. 477 // Continue with JSObject being successfully allocated.
478 __ B(&allocated); 478 __ B(&allocated);
479 479
480 __ bind(&no_inobject_slack_tracking); 480 __ bind(&no_inobject_slack_tracking);
481 } 481 }
482 482
483 __ InitializeFieldsWithFiller(write_address, next_obj, filler); 483 __ InitializeFieldsWithFiller(write_address, next_obj, filler);
484 484
485 // Continue with JSObject being successfully allocated. 485 // Continue with JSObject being successfully allocated.
486 __ B(&allocated); 486 __ B(&allocated);
487 } 487 }
488 488
489 // Allocate the new receiver object using the runtime call. 489 // Allocate the new receiver object using the runtime call.
490 // x1: constructor function 490 // x1: constructor function
491 // x3: new target 491 // x3: new target
492 __ Bind(&rt_call); 492 __ Bind(&rt_call);
493 493
494 // Push the constructor and new_target twice, second pair as arguments 494 // Push the constructor and new_target twice, second pair as arguments
495 // to the runtime call. 495 // to the runtime call.
496 __ Push(constructor, new_target, constructor, new_target); 496 __ Push(constructor, new_target, constructor, new_target);
497 __ CallRuntime(Runtime::kNewObject, 2); 497 __ CallRuntime(Runtime::kNewObject);
498 __ Mov(x4, x0); 498 __ Mov(x4, x0);
499 __ Pop(new_target, constructor); 499 __ Pop(new_target, constructor);
500 500
501 // Receiver for constructor call allocated. 501 // Receiver for constructor call allocated.
502 // x1: constructor function 502 // x1: constructor function
503 // x3: new target 503 // x3: new target
504 // x4: JSObject 504 // x4: JSObject
505 __ Bind(&allocated); 505 __ Bind(&allocated);
506 506
507 // Reload the number of arguments from the stack. 507 // Reload the number of arguments from the stack.
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
630 630
631 631
632 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { 632 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
633 Generate_JSConstructStubHelper(masm, false, false); 633 Generate_JSConstructStubHelper(masm, false, false);
634 } 634 }
635 635
636 636
637 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { 637 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
638 FrameScope scope(masm, StackFrame::INTERNAL); 638 FrameScope scope(masm, StackFrame::INTERNAL);
639 __ Push(x1); 639 __ Push(x1);
640 __ CallRuntime(Runtime::kThrowConstructedNonConstructable, 1); 640 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
641 } 641 }
642 642
643 643
644 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; 644 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
645 645
646 646
647 // Clobbers x10, x15; preserves all other registers. 647 // Clobbers x10, x15; preserves all other registers.
648 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, 648 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
649 IsTagged argc_is_tagged) { 649 IsTagged argc_is_tagged) {
650 // Check the stack for overflow. 650 // Check the stack for overflow.
651 // We are not trying to catch interruptions (e.g. debug break and 651 // We are not trying to catch interruptions (e.g. debug break and
652 // preemption) here, so the "real stack limit" is checked. 652 // preemption) here, so the "real stack limit" is checked.
653 Label enough_stack_space; 653 Label enough_stack_space;
654 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); 654 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
655 // Make x10 the space we have left. The stack might already be overflowed 655 // Make x10 the space we have left. The stack might already be overflowed
656 // here which will cause x10 to become negative. 656 // here which will cause x10 to become negative.
657 // TODO(jbramley): Check that the stack usage here is safe. 657 // TODO(jbramley): Check that the stack usage here is safe.
658 __ Sub(x10, jssp, x10); 658 __ Sub(x10, jssp, x10);
659 // Check if the arguments will overflow the stack. 659 // Check if the arguments will overflow the stack.
660 if (argc_is_tagged == kArgcIsSmiTagged) { 660 if (argc_is_tagged == kArgcIsSmiTagged) {
661 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2)); 661 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
662 } else { 662 } else {
663 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); 663 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
664 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2)); 664 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
665 } 665 }
666 __ B(gt, &enough_stack_space); 666 __ B(gt, &enough_stack_space);
667 __ CallRuntime(Runtime::kThrowStackOverflow, 0); 667 __ CallRuntime(Runtime::kThrowStackOverflow);
668 // We should never return from the APPLY_OVERFLOW builtin. 668 // We should never return from the APPLY_OVERFLOW builtin.
669 if (__ emit_debug_code()) { 669 if (__ emit_debug_code()) {
670 __ Unreachable(); 670 __ Unreachable();
671 } 671 }
672 672
673 __ Bind(&enough_stack_space); 673 __ Bind(&enough_stack_space);
674 } 674 }
675 675
676 676
677 // Input: 677 // Input:
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
827 // Load frame size from the BytecodeArray object. 827 // Load frame size from the BytecodeArray object.
828 __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister, 828 __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
829 BytecodeArray::kFrameSizeOffset)); 829 BytecodeArray::kFrameSizeOffset));
830 830
831 // Do a stack check to ensure we don't go over the limit. 831 // Do a stack check to ensure we don't go over the limit.
832 Label ok; 832 Label ok;
833 DCHECK(jssp.Is(__ StackPointer())); 833 DCHECK(jssp.Is(__ StackPointer()));
834 __ Sub(x10, jssp, Operand(x11)); 834 __ Sub(x10, jssp, Operand(x11));
835 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex); 835 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
836 __ B(hs, &ok); 836 __ B(hs, &ok);
837 __ CallRuntime(Runtime::kThrowStackOverflow, 0); 837 __ CallRuntime(Runtime::kThrowStackOverflow);
838 __ Bind(&ok); 838 __ Bind(&ok);
839 839
840 // If ok, push undefined as the initial value for all register file entries. 840 // If ok, push undefined as the initial value for all register file entries.
841 // Note: there should always be at least one stack slot for the return 841 // Note: there should always be at least one stack slot for the return
842 // register in the register file. 842 // register in the register file.
843 Label loop_header; 843 Label loop_header;
844 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); 844 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
845 // TODO(rmcilroy): Ensure we always have an even number of registers to 845 // TODO(rmcilroy): Ensure we always have an even number of registers to
846 // allow stack to be 16 bit aligned (and remove need for jssp). 846 // allow stack to be 16 bit aligned (and remove need for jssp).
847 __ Lsr(x11, x11, kPointerSizeLog2); 847 __ Lsr(x11, x11, kPointerSizeLog2);
848 __ PushMultipleTimes(x10, x11); 848 __ PushMultipleTimes(x10, x11);
849 __ Bind(&loop_header); 849 __ Bind(&loop_header);
850 } 850 }
851 851
852 // TODO(rmcilroy): List of things not currently dealt with here but done in 852 // TODO(rmcilroy): List of things not currently dealt with here but done in
853 // fullcodegen's prologue: 853 // fullcodegen's prologue:
854 // - Support profiler (specifically profiling_counter). 854 // - Support profiler (specifically profiling_counter).
855 // - Call ProfileEntryHookStub when isolate has a function_entry_hook. 855 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
856 // - Allow simulator stop operations if FLAG_stop_at is set. 856 // - Allow simulator stop operations if FLAG_stop_at is set.
857 // - Code aging of the BytecodeArray object. 857 // - Code aging of the BytecodeArray object.
858 858
859 // Perform stack guard check. 859 // Perform stack guard check.
860 { 860 {
861 Label ok; 861 Label ok;
862 __ CompareRoot(jssp, Heap::kStackLimitRootIndex); 862 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
863 __ B(hs, &ok); 863 __ B(hs, &ok);
864 __ Push(kInterpreterBytecodeArrayRegister); 864 __ Push(kInterpreterBytecodeArrayRegister);
865 __ CallRuntime(Runtime::kStackGuard, 0); 865 __ CallRuntime(Runtime::kStackGuard);
866 __ Pop(kInterpreterBytecodeArrayRegister); 866 __ Pop(kInterpreterBytecodeArrayRegister);
867 __ Bind(&ok); 867 __ Bind(&ok);
868 } 868 }
869 869
870 // Load accumulator, register file, bytecode offset, dispatch table into 870 // Load accumulator, register file, bytecode offset, dispatch table into
871 // registers. 871 // registers.
872 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); 872 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
873 __ Add(kInterpreterRegisterFileRegister, fp, 873 __ Add(kInterpreterRegisterFileRegister, fp,
874 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); 874 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
875 __ Mov(kInterpreterBytecodeOffsetRegister, 875 __ Mov(kInterpreterBytecodeOffsetRegister,
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
914 static void Generate_InterpreterNotifyDeoptimizedHelper( 914 static void Generate_InterpreterNotifyDeoptimizedHelper(
915 MacroAssembler* masm, Deoptimizer::BailoutType type) { 915 MacroAssembler* masm, Deoptimizer::BailoutType type) {
916 // Enter an internal frame. 916 // Enter an internal frame.
917 { 917 {
918 FrameScope scope(masm, StackFrame::INTERNAL); 918 FrameScope scope(masm, StackFrame::INTERNAL);
919 __ Push(kInterpreterAccumulatorRegister); // Save accumulator register. 919 __ Push(kInterpreterAccumulatorRegister); // Save accumulator register.
920 920
921 // Pass the deoptimization type to the runtime system. 921 // Pass the deoptimization type to the runtime system.
922 __ Mov(x1, Operand(Smi::FromInt(static_cast<int>(type)))); 922 __ Mov(x1, Operand(Smi::FromInt(static_cast<int>(type))));
923 __ Push(x1); 923 __ Push(x1);
924 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); 924 __ CallRuntime(Runtime::kNotifyDeoptimized);
925 925
926 __ Pop(kInterpreterAccumulatorRegister); // Restore accumulator register. 926 __ Pop(kInterpreterAccumulatorRegister); // Restore accumulator register.
927 // Tear down internal frame. 927 // Tear down internal frame.
928 } 928 }
929 929
930 // Drop state (we don't use these for interpreter deopts) and push PC at top 930 // Drop state (we don't use these for interpreter deopts) and push PC at top
931 // of stack (to simulate initial call to bytecode handler in interpreter entry 931 // of stack (to simulate initial call to bytecode handler in interpreter entry
932 // trampoline). 932 // trampoline).
933 __ Pop(x1); 933 __ Pop(x1);
934 __ Drop(1); 934 __ Drop(1);
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
1108 FrameScope scope(masm, StackFrame::INTERNAL); 1108 FrameScope scope(masm, StackFrame::INTERNAL);
1109 1109
1110 // Preserve registers across notification, this is important for compiled 1110 // Preserve registers across notification, this is important for compiled
1111 // stubs that tail call the runtime on deopts passing their parameters in 1111 // stubs that tail call the runtime on deopts passing their parameters in
1112 // registers. 1112 // registers.
1113 // TODO(jbramley): Is it correct (and appropriate) to use safepoint 1113 // TODO(jbramley): Is it correct (and appropriate) to use safepoint
1114 // registers here? According to the comment above, we should only need to 1114 // registers here? According to the comment above, we should only need to
1115 // preserve the registers with parameters. 1115 // preserve the registers with parameters.
1116 __ PushXRegList(kSafepointSavedRegisters); 1116 __ PushXRegList(kSafepointSavedRegisters);
1117 // Pass the function and deoptimization type to the runtime system. 1117 // Pass the function and deoptimization type to the runtime system.
1118 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); 1118 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1119 __ PopXRegList(kSafepointSavedRegisters); 1119 __ PopXRegList(kSafepointSavedRegisters);
1120 } 1120 }
1121 1121
1122 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate). 1122 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
1123 __ Drop(1); 1123 __ Drop(1);
1124 1124
1125 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this 1125 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
1126 // into lr before it jumps here. 1126 // into lr before it jumps here.
1127 __ Br(lr); 1127 __ Br(lr);
1128 } 1128 }
1129 1129
1130 1130
1131 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { 1131 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1132 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); 1132 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1133 } 1133 }
1134 1134
1135 1135
1136 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { 1136 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1137 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); 1137 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1138 } 1138 }
1139 1139
1140 1140
1141 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 1141 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1142 Deoptimizer::BailoutType type) { 1142 Deoptimizer::BailoutType type) {
1143 { 1143 {
1144 FrameScope scope(masm, StackFrame::INTERNAL); 1144 FrameScope scope(masm, StackFrame::INTERNAL);
1145 // Pass the deoptimization type to the runtime system. 1145 // Pass the deoptimization type to the runtime system.
1146 __ Mov(x0, Smi::FromInt(static_cast<int>(type))); 1146 __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
1147 __ Push(x0); 1147 __ Push(x0);
1148 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); 1148 __ CallRuntime(Runtime::kNotifyDeoptimized);
1149 } 1149 }
1150 1150
1151 // Get the full codegen state from the stack and untag it. 1151 // Get the full codegen state from the stack and untag it.
1152 Register state = x6; 1152 Register state = x6;
1153 __ Peek(state, 0); 1153 __ Peek(state, 0);
1154 __ SmiUntag(state); 1154 __ SmiUntag(state);
1155 1155
1156 // Switch on the state. 1156 // Switch on the state.
1157 Label with_tos_register, unknown_state; 1157 Label with_tos_register, unknown_state;
1158 __ CompareAndBranch( 1158 __ CompareAndBranch(
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
1295 __ Bind(&set_global_proxy); 1295 __ Bind(&set_global_proxy);
1296 __ LoadGlobalProxy(x2); 1296 __ LoadGlobalProxy(x2);
1297 __ Str(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2)); 1297 __ Str(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
1298 __ B(&valid_receiver); 1298 __ B(&valid_receiver);
1299 1299
1300 // Compatible receiver check failed: throw an Illegal Invocation exception. 1300 // Compatible receiver check failed: throw an Illegal Invocation exception.
1301 __ Bind(&receiver_check_failed); 1301 __ Bind(&receiver_check_failed);
1302 // Drop the arguments (including the receiver) 1302 // Drop the arguments (including the receiver)
1303 __ add(x0, x0, Operand(1)); 1303 __ add(x0, x0, Operand(1));
1304 __ Drop(x0); 1304 __ Drop(x0);
1305 __ TailCallRuntime(Runtime::kThrowIllegalInvocation, 0); 1305 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1306 } 1306 }
1307 1307
1308 1308
1309 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1309 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1310 // Lookup the function in the JavaScript frame. 1310 // Lookup the function in the JavaScript frame.
1311 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1311 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1312 { 1312 {
1313 FrameScope scope(masm, StackFrame::INTERNAL); 1313 FrameScope scope(masm, StackFrame::INTERNAL);
1314 // Pass function as argument. 1314 // Pass function as argument.
1315 __ Push(x0); 1315 __ Push(x0);
1316 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); 1316 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1317 } 1317 }
1318 1318
1319 // If the code object is null, just return to the unoptimized code. 1319 // If the code object is null, just return to the unoptimized code.
1320 Label skip; 1320 Label skip;
1321 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip); 1321 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
1322 __ Ret(); 1322 __ Ret();
1323 1323
1324 __ Bind(&skip); 1324 __ Bind(&skip);
1325 1325
1326 // Load deoptimization data from the code object. 1326 // Load deoptimization data from the code object.
(...skipping 15 matching lines...) Expand all
1342 } 1342 }
1343 1343
1344 1344
1345 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { 1345 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1346 // We check the stack limit as indicator that recompilation might be done. 1346 // We check the stack limit as indicator that recompilation might be done.
1347 Label ok; 1347 Label ok;
1348 __ CompareRoot(jssp, Heap::kStackLimitRootIndex); 1348 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
1349 __ B(hs, &ok); 1349 __ B(hs, &ok);
1350 { 1350 {
1351 FrameScope scope(masm, StackFrame::INTERNAL); 1351 FrameScope scope(masm, StackFrame::INTERNAL);
1352 __ CallRuntime(Runtime::kStackGuard, 0); 1352 __ CallRuntime(Runtime::kStackGuard);
1353 } 1353 }
1354 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), 1354 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1355 RelocInfo::CODE_TARGET); 1355 RelocInfo::CODE_TARGET);
1356 1356
1357 __ Bind(&ok); 1357 __ Bind(&ok);
1358 __ Ret(); 1358 __ Ret();
1359 } 1359 }
1360 1360
1361 1361
1362 // static 1362 // static
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
1417 __ Bind(&no_arguments); 1417 __ Bind(&no_arguments);
1418 { 1418 {
1419 __ Mov(x0, 0); 1419 __ Mov(x0, 0);
1420 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1420 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1421 } 1421 }
1422 1422
1423 // 4c. The receiver is not callable, throw an appropriate TypeError. 1423 // 4c. The receiver is not callable, throw an appropriate TypeError.
1424 __ Bind(&receiver_not_callable); 1424 __ Bind(&receiver_not_callable);
1425 { 1425 {
1426 __ Poke(x1, 0); 1426 __ Poke(x1, 0);
1427 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1); 1427 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1428 } 1428 }
1429 } 1429 }
1430 1430
1431 1431
1432 // static 1432 // static
1433 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { 1433 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1434 Register argc = x0; 1434 Register argc = x0;
1435 Register function = x1; 1435 Register function = x1;
1436 Register scratch1 = x10; 1436 Register scratch1 = x10;
1437 Register scratch2 = x11; 1437 Register scratch2 = x11;
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1524 1524
1525 // 3a. Apply the target to the given argumentsList (passing undefined for 1525 // 3a. Apply the target to the given argumentsList (passing undefined for
1526 // new.target). 1526 // new.target).
1527 __ LoadRoot(x3, Heap::kUndefinedValueRootIndex); 1527 __ LoadRoot(x3, Heap::kUndefinedValueRootIndex);
1528 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1528 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1529 1529
1530 // 3b. The target is not callable, throw an appropriate TypeError. 1530 // 3b. The target is not callable, throw an appropriate TypeError.
1531 __ Bind(&target_not_callable); 1531 __ Bind(&target_not_callable);
1532 { 1532 {
1533 __ Poke(x1, 0); 1533 __ Poke(x1, 0);
1534 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1); 1534 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1535 } 1535 }
1536 } 1536 }
1537 1537
1538 1538
1539 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { 1539 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1540 // ----------- S t a t e ------------- 1540 // ----------- S t a t e -------------
1541 // -- x0 : argc 1541 // -- x0 : argc
1542 // -- sp[0] : new.target (optional) 1542 // -- sp[0] : new.target (optional)
1543 // -- sp[8] : argumentsList 1543 // -- sp[8] : argumentsList
1544 // -- sp[16] : target 1544 // -- sp[16] : target
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1593 __ TestAndBranchIfAllClear(x4, 1 << Map::kIsConstructor, 1593 __ TestAndBranchIfAllClear(x4, 1 << Map::kIsConstructor,
1594 &new_target_not_constructor); 1594 &new_target_not_constructor);
1595 1595
1596 // 4a. Construct the target with the given new.target and argumentsList. 1596 // 4a. Construct the target with the given new.target and argumentsList.
1597 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1597 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1598 1598
1599 // 4b. The target is not a constructor, throw an appropriate TypeError. 1599 // 4b. The target is not a constructor, throw an appropriate TypeError.
1600 __ Bind(&target_not_constructor); 1600 __ Bind(&target_not_constructor);
1601 { 1601 {
1602 __ Poke(x1, 0); 1602 __ Poke(x1, 0);
1603 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1); 1603 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1604 } 1604 }
1605 1605
1606 // 4c. The new.target is not a constructor, throw an appropriate TypeError. 1606 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1607 __ Bind(&new_target_not_constructor); 1607 __ Bind(&new_target_not_constructor);
1608 { 1608 {
1609 __ Poke(x3, 0); 1609 __ Poke(x3, 0);
1610 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1); 1610 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1611 } 1611 }
1612 } 1612 }
1613 1613
1614 1614
1615 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, 1615 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1616 Label* stack_overflow) { 1616 Label* stack_overflow) {
1617 // ----------- S t a t e ------------- 1617 // ----------- S t a t e -------------
1618 // -- x0 : actual number of arguments 1618 // -- x0 : actual number of arguments
1619 // -- x1 : function (passed through to callee) 1619 // -- x1 : function (passed through to callee)
1620 // -- x2 : expected number of arguments 1620 // -- x2 : expected number of arguments
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
1689 1689
1690 // Check if argumentsList is a fast JSArray. 1690 // Check if argumentsList is a fast JSArray.
1691 __ CompareInstanceType(x2, x4, JS_ARRAY_TYPE); 1691 __ CompareInstanceType(x2, x4, JS_ARRAY_TYPE);
1692 __ B(eq, &create_array); 1692 __ B(eq, &create_array);
1693 1693
1694 // Ask the runtime to create the list (actually a FixedArray). 1694 // Ask the runtime to create the list (actually a FixedArray).
1695 __ Bind(&create_runtime); 1695 __ Bind(&create_runtime);
1696 { 1696 {
1697 FrameScope scope(masm, StackFrame::INTERNAL); 1697 FrameScope scope(masm, StackFrame::INTERNAL);
1698 __ Push(x1, x3, x0); 1698 __ Push(x1, x3, x0);
1699 __ CallRuntime(Runtime::kCreateListFromArrayLike, 1); 1699 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1700 __ Pop(x3, x1); 1700 __ Pop(x3, x1);
1701 __ Ldrsw(x2, UntagSmiFieldMemOperand(x0, FixedArray::kLengthOffset)); 1701 __ Ldrsw(x2, UntagSmiFieldMemOperand(x0, FixedArray::kLengthOffset));
1702 } 1702 }
1703 __ B(&done_create); 1703 __ B(&done_create);
1704 1704
1705 // Try to create the list from an arguments object. 1705 // Try to create the list from an arguments object.
1706 __ Bind(&create_arguments); 1706 __ Bind(&create_arguments);
1707 __ Ldr(x2, 1707 __ Ldr(x2,
1708 FieldMemOperand(x0, JSObject::kHeaderSize + 1708 FieldMemOperand(x0, JSObject::kHeaderSize +
1709 Heap::kArgumentsLengthIndex * kPointerSize)); 1709 Heap::kArgumentsLengthIndex * kPointerSize));
(...skipping 27 matching lines...) Expand all
1737 // Check the stack for overflow. We are not trying to catch interruptions 1737 // Check the stack for overflow. We are not trying to catch interruptions
1738 // (i.e. debug break and preemption) here, so check the "real stack limit". 1738 // (i.e. debug break and preemption) here, so check the "real stack limit".
1739 Label done; 1739 Label done;
1740 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); 1740 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1741 // Make x10 the space we have left. The stack might already be overflowed 1741 // Make x10 the space we have left. The stack might already be overflowed
1742 // here which will cause x10 to become negative. 1742 // here which will cause x10 to become negative.
1743 __ Sub(x10, jssp, x10); 1743 __ Sub(x10, jssp, x10);
1744 // Check if the arguments will overflow the stack. 1744 // Check if the arguments will overflow the stack.
1745 __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2)); 1745 __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
1746 __ B(gt, &done); // Signed comparison. 1746 __ B(gt, &done); // Signed comparison.
1747 __ TailCallRuntime(Runtime::kThrowStackOverflow, 1); 1747 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1748 __ Bind(&done); 1748 __ Bind(&done);
1749 } 1749 }
1750 1750
1751 // ----------- S t a t e ------------- 1751 // ----------- S t a t e -------------
1752 // -- x1 : target 1752 // -- x1 : target
1753 // -- x0 : args (a FixedArray built from argumentsList) 1753 // -- x0 : args (a FixedArray built from argumentsList)
1754 // -- x2 : len (number of elements to push from args) 1754 // -- x2 : len (number of elements to push from args)
1755 // -- x3 : new.target (checked to be constructor or undefined) 1755 // -- x3 : new.target (checked to be constructor or undefined)
1756 // -- sp[0] : thisArgument 1756 // -- sp[0] : thisArgument
1757 // ----------------------------------- 1757 // -----------------------------------
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
1875 ParameterCount actual(x0); 1875 ParameterCount actual(x0);
1876 ParameterCount expected(x2); 1876 ParameterCount expected(x2);
1877 __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION, 1877 __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION,
1878 CheckDebugStepCallWrapper()); 1878 CheckDebugStepCallWrapper());
1879 1879
1880 // The function is a "classConstructor", need to raise an exception. 1880 // The function is a "classConstructor", need to raise an exception.
1881 __ bind(&class_constructor); 1881 __ bind(&class_constructor);
1882 { 1882 {
1883 FrameScope frame(masm, StackFrame::INTERNAL); 1883 FrameScope frame(masm, StackFrame::INTERNAL);
1884 __ Push(x1); 1884 __ Push(x1);
1885 __ CallRuntime(Runtime::kThrowConstructorNonCallableError, 1); 1885 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
1886 } 1886 }
1887 } 1887 }
1888 1888
1889 1889
1890 namespace { 1890 namespace {
1891 1891
1892 void Generate_PushBoundArguments(MacroAssembler* masm) { 1892 void Generate_PushBoundArguments(MacroAssembler* masm) {
1893 // ----------- S t a t e ------------- 1893 // ----------- S t a t e -------------
1894 // -- x0 : the number of arguments (not including the receiver) 1894 // -- x0 : the number of arguments (not including the receiver)
1895 // -- x1 : target (checked to be a JSBoundFunction) 1895 // -- x1 : target (checked to be a JSBoundFunction)
(...skipping 22 matching lines...) Expand all
1918 // Check the stack for overflow. We are not trying to catch interruptions 1918 // Check the stack for overflow. We are not trying to catch interruptions
1919 // (i.e. debug break and preemption) here, so check the "real stack 1919 // (i.e. debug break and preemption) here, so check the "real stack
1920 // limit". 1920 // limit".
1921 __ CompareRoot(jssp, Heap::kRealStackLimitRootIndex); 1921 __ CompareRoot(jssp, Heap::kRealStackLimitRootIndex);
1922 __ B(gt, &done); // Signed comparison. 1922 __ B(gt, &done); // Signed comparison.
1923 // Restore the stack pointer. 1923 // Restore the stack pointer.
1924 __ Drop(x4); 1924 __ Drop(x4);
1925 { 1925 {
1926 FrameScope scope(masm, StackFrame::MANUAL); 1926 FrameScope scope(masm, StackFrame::MANUAL);
1927 __ EnterFrame(StackFrame::INTERNAL); 1927 __ EnterFrame(StackFrame::INTERNAL);
1928 __ CallRuntime(Runtime::kThrowStackOverflow, 0); 1928 __ CallRuntime(Runtime::kThrowStackOverflow);
1929 } 1929 }
1930 __ Bind(&done); 1930 __ Bind(&done);
1931 } 1931 }
1932 1932
1933 // Relocate arguments down the stack. 1933 // Relocate arguments down the stack.
1934 { 1934 {
1935 Label loop, done_loop; 1935 Label loop, done_loop;
1936 __ Mov(x5, 0); 1936 __ Mov(x5, 0);
1937 __ Bind(&loop); 1937 __ Bind(&loop);
1938 __ Cmp(x5, x0); 1938 __ Cmp(x5, x0);
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
2030 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1); 2030 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
2031 __ Jump(masm->isolate()->builtins()->CallFunction( 2031 __ Jump(masm->isolate()->builtins()->CallFunction(
2032 ConvertReceiverMode::kNotNullOrUndefined), 2032 ConvertReceiverMode::kNotNullOrUndefined),
2033 RelocInfo::CODE_TARGET); 2033 RelocInfo::CODE_TARGET);
2034 2034
2035 // 3. Call to something that is not callable. 2035 // 3. Call to something that is not callable.
2036 __ bind(&non_callable); 2036 __ bind(&non_callable);
2037 { 2037 {
2038 FrameScope scope(masm, StackFrame::INTERNAL); 2038 FrameScope scope(masm, StackFrame::INTERNAL);
2039 __ Push(x1); 2039 __ Push(x1);
2040 __ CallRuntime(Runtime::kThrowCalledNonCallable, 1); 2040 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2041 } 2041 }
2042 } 2042 }
2043 2043
2044 2044
2045 // static 2045 // static
2046 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { 2046 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2047 // ----------- S t a t e ------------- 2047 // ----------- S t a t e -------------
2048 // -- x0 : the number of arguments (not including the receiver) 2048 // -- x0 : the number of arguments (not including the receiver)
2049 // -- x1 : the constructor to call (checked to be a JSFunction) 2049 // -- x1 : the constructor to call (checked to be a JSFunction)
2050 // -- x3 : the new target (checked to be a constructor) 2050 // -- x3 : the new target (checked to be a constructor)
(...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after
2319 // What we really care about is the required number of arguments. 2319 // What we really care about is the required number of arguments.
2320 DCHECK_EQ(kPointerSize, kInt64Size); 2320 DCHECK_EQ(kPointerSize, kInt64Size);
2321 __ Ldr(scratch2.W(), 2321 __ Ldr(scratch2.W(),
2322 FieldMemOperand(scratch1, SharedFunctionInfo::kLengthOffset)); 2322 FieldMemOperand(scratch1, SharedFunctionInfo::kLengthOffset));
2323 __ Cmp(argc_actual, Operand(scratch2, LSR, 1)); 2323 __ Cmp(argc_actual, Operand(scratch2, LSR, 1));
2324 __ B(ge, &no_strong_error); 2324 __ B(ge, &no_strong_error);
2325 2325
2326 { 2326 {
2327 FrameScope frame(masm, StackFrame::MANUAL); 2327 FrameScope frame(masm, StackFrame::MANUAL);
2328 EnterArgumentsAdaptorFrame(masm); 2328 EnterArgumentsAdaptorFrame(masm);
2329 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0); 2329 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
2330 } 2330 }
2331 2331
2332 __ Bind(&no_strong_error); 2332 __ Bind(&no_strong_error);
2333 EnterArgumentsAdaptorFrame(masm); 2333 EnterArgumentsAdaptorFrame(masm);
2334 ArgumentAdaptorStackCheck(masm, &stack_overflow); 2334 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2335 2335
2336 __ Lsl(scratch2, argc_expected, kPointerSizeLog2); 2336 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
2337 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2); 2337 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
2338 2338
2339 // Adjust for fp, lr, and the receiver. 2339 // Adjust for fp, lr, and the receiver.
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
2393 __ Ret(); 2393 __ Ret();
2394 2394
2395 // Call the entry point without adapting the arguments. 2395 // Call the entry point without adapting the arguments.
2396 __ Bind(&dont_adapt_arguments); 2396 __ Bind(&dont_adapt_arguments);
2397 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); 2397 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
2398 __ Jump(code_entry); 2398 __ Jump(code_entry);
2399 2399
2400 __ Bind(&stack_overflow); 2400 __ Bind(&stack_overflow);
2401 { 2401 {
2402 FrameScope frame(masm, StackFrame::MANUAL); 2402 FrameScope frame(masm, StackFrame::MANUAL);
2403 __ CallRuntime(Runtime::kThrowStackOverflow, 0); 2403 __ CallRuntime(Runtime::kThrowStackOverflow);
2404 __ Unreachable(); 2404 __ Unreachable();
2405 } 2405 }
2406 } 2406 }
2407 2407
2408 2408
2409 #undef __ 2409 #undef __
2410 2410
2411 } // namespace internal 2411 } // namespace internal
2412 } // namespace v8 2412 } // namespace v8
2413 2413
2414 #endif // V8_TARGET_ARCH_ARM 2414 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698