Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(294)

Side by Side Diff: src/arm64/builtins-arm64.cc

Issue 346413004: Remove distinction between hidden and normal runtime functions (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Fix natives fuzzing Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM64 7 #if V8_TARGET_ARCH_ARM64
8 8
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/debug.h" 10 #include "src/debug.h"
(...skipping 276 matching lines...) Expand 10 before | Expand all | Expand 10 after
287 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { 287 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
288 // Checking whether the queued function is ready for install is optional, 288 // Checking whether the queued function is ready for install is optional,
289 // since we come across interrupts and stack checks elsewhere. However, not 289 // since we come across interrupts and stack checks elsewhere. However, not
290 // checking may delay installing ready functions, and always checking would be 290 // checking may delay installing ready functions, and always checking would be
291 // quite expensive. A good compromise is to first check against stack limit as 291 // quite expensive. A good compromise is to first check against stack limit as
292 // a cue for an interrupt signal. 292 // a cue for an interrupt signal.
293 Label ok; 293 Label ok;
294 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex); 294 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
295 __ B(hs, &ok); 295 __ B(hs, &ok);
296 296
297 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode); 297 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
298 GenerateTailCallToReturnedCode(masm); 298 GenerateTailCallToReturnedCode(masm);
299 299
300 __ Bind(&ok); 300 __ Bind(&ok);
301 GenerateTailCallToSharedCode(masm); 301 GenerateTailCallToSharedCode(masm);
302 } 302 }
303 303
304 304
305 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 305 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
306 bool is_api_function, 306 bool is_api_function,
307 bool create_memento) { 307 bool create_memento) {
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
373 __ B(eq, &allocate); 373 __ B(eq, &allocate);
374 // Decrease generous allocation count. 374 // Decrease generous allocation count.
375 __ Subs(x4, x4, Operand(1 << Map::ConstructionCount::kShift)); 375 __ Subs(x4, x4, Operand(1 << Map::ConstructionCount::kShift));
376 __ Str(x4, bit_field3); 376 __ Str(x4, bit_field3);
377 __ Cmp(constructon_count, Operand(JSFunction::kFinishSlackTracking)); 377 __ Cmp(constructon_count, Operand(JSFunction::kFinishSlackTracking));
378 __ B(ne, &allocate); 378 __ B(ne, &allocate);
379 379
380 // Push the constructor and map to the stack, and the constructor again 380 // Push the constructor and map to the stack, and the constructor again
381 // as argument to the runtime call. 381 // as argument to the runtime call.
382 __ Push(constructor, init_map, constructor); 382 __ Push(constructor, init_map, constructor);
383 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1); 383 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
384 __ Pop(init_map, constructor); 384 __ Pop(init_map, constructor);
385 __ Mov(constructon_count, Operand(JSFunction::kNoSlackTracking)); 385 __ Mov(constructon_count, Operand(JSFunction::kNoSlackTracking));
386 __ Bind(&allocate); 386 __ Bind(&allocate);
387 } 387 }
388 388
389 // Now allocate the JSObject on the heap. 389 // Now allocate the JSObject on the heap.
390 Register obj_size = x3; 390 Register obj_size = x3;
391 Register new_obj = x4; 391 Register new_obj = x4;
392 __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset)); 392 __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset));
393 if (create_memento) { 393 if (create_memento) {
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
535 } 535 }
536 536
537 // Allocate the new receiver object using the runtime call. 537 // Allocate the new receiver object using the runtime call.
538 __ Bind(&rt_call); 538 __ Bind(&rt_call);
539 Label count_incremented; 539 Label count_incremented;
540 if (create_memento) { 540 if (create_memento) {
541 // Get the cell or allocation site. 541 // Get the cell or allocation site.
542 __ Peek(x4, 2 * kXRegSize); 542 __ Peek(x4, 2 * kXRegSize);
543 __ Push(x4); 543 __ Push(x4);
544 __ Push(constructor); // Argument for Runtime_NewObject. 544 __ Push(constructor); // Argument for Runtime_NewObject.
545 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2); 545 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
546 __ Mov(x4, x0); 546 __ Mov(x4, x0);
547 // If we ended up using the runtime, and we want a memento, then the 547 // If we ended up using the runtime, and we want a memento, then the
548 // runtime call made it for us, and we shouldn't do create count 548 // runtime call made it for us, and we shouldn't do create count
549 // increment. 549 // increment.
550 __ jmp(&count_incremented); 550 __ jmp(&count_incremented);
551 } else { 551 } else {
552 __ Push(constructor); // Argument for Runtime_NewObject. 552 __ Push(constructor); // Argument for Runtime_NewObject.
553 __ CallRuntime(Runtime::kHiddenNewObject, 1); 553 __ CallRuntime(Runtime::kNewObject, 1);
554 __ Mov(x4, x0); 554 __ Mov(x4, x0);
555 } 555 }
556 556
557 // Receiver for constructor call allocated. 557 // Receiver for constructor call allocated.
558 // x4: JSObject 558 // x4: JSObject
559 __ Bind(&allocated); 559 __ Bind(&allocated);
560 560
561 if (create_memento) { 561 if (create_memento) {
562 __ Peek(x10, 2 * kXRegSize); 562 __ Peek(x10, 2 * kXRegSize);
563 __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented); 563 __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented);
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after
775 Generate_JSEntryTrampolineHelper(masm, false); 775 Generate_JSEntryTrampolineHelper(masm, false);
776 } 776 }
777 777
778 778
779 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 779 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
780 Generate_JSEntryTrampolineHelper(masm, true); 780 Generate_JSEntryTrampolineHelper(masm, true);
781 } 781 }
782 782
783 783
784 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { 784 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
785 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized); 785 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
786 GenerateTailCallToReturnedCode(masm); 786 GenerateTailCallToReturnedCode(masm);
787 } 787 }
788 788
789 789
790 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { 790 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
791 FrameScope scope(masm, StackFrame::INTERNAL); 791 FrameScope scope(masm, StackFrame::INTERNAL);
792 Register function = x1; 792 Register function = x1;
793 793
794 // Preserve function. At the same time, push arguments for 794 // Preserve function. At the same time, push arguments for
795 // kHiddenCompileOptimized. 795 // kCompileOptimized.
796 __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent)); 796 __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent));
797 __ Push(function, function, x10); 797 __ Push(function, function, x10);
798 798
799 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2); 799 __ CallRuntime(Runtime::kCompileOptimized, 2);
800 800
801 // Restore receiver. 801 // Restore receiver.
802 __ Pop(function); 802 __ Pop(function);
803 } 803 }
804 804
805 805
806 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { 806 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
807 CallCompileOptimized(masm, false); 807 CallCompileOptimized(masm, false);
808 GenerateTailCallToReturnedCode(masm); 808 GenerateTailCallToReturnedCode(masm);
809 } 809 }
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
899 FrameScope scope(masm, StackFrame::INTERNAL); 899 FrameScope scope(masm, StackFrame::INTERNAL);
900 900
901 // Preserve registers across notification, this is important for compiled 901 // Preserve registers across notification, this is important for compiled
902 // stubs that tail call the runtime on deopts passing their parameters in 902 // stubs that tail call the runtime on deopts passing their parameters in
903 // registers. 903 // registers.
904 // TODO(jbramley): Is it correct (and appropriate) to use safepoint 904 // TODO(jbramley): Is it correct (and appropriate) to use safepoint
905 // registers here? According to the comment above, we should only need to 905 // registers here? According to the comment above, we should only need to
906 // preserve the registers with parameters. 906 // preserve the registers with parameters.
907 __ PushXRegList(kSafepointSavedRegisters); 907 __ PushXRegList(kSafepointSavedRegisters);
908 // Pass the function and deoptimization type to the runtime system. 908 // Pass the function and deoptimization type to the runtime system.
909 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles); 909 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
910 __ PopXRegList(kSafepointSavedRegisters); 910 __ PopXRegList(kSafepointSavedRegisters);
911 } 911 }
912 912
913 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate). 913 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
914 __ Drop(1); 914 __ Drop(1);
915 915
916 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this 916 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
917 // into lr before it jumps here. 917 // into lr before it jumps here.
918 __ Br(lr); 918 __ Br(lr);
919 } 919 }
920 920
921 921
922 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { 922 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
923 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); 923 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
924 } 924 }
925 925
926 926
927 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { 927 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
928 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); 928 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
929 } 929 }
930 930
931 931
932 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 932 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
933 Deoptimizer::BailoutType type) { 933 Deoptimizer::BailoutType type) {
934 { 934 {
935 FrameScope scope(masm, StackFrame::INTERNAL); 935 FrameScope scope(masm, StackFrame::INTERNAL);
936 // Pass the deoptimization type to the runtime system. 936 // Pass the deoptimization type to the runtime system.
937 __ Mov(x0, Smi::FromInt(static_cast<int>(type))); 937 __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
938 __ Push(x0); 938 __ Push(x0);
939 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1); 939 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
940 } 940 }
941 941
942 // Get the full codegen state from the stack and untag it. 942 // Get the full codegen state from the stack and untag it.
943 Register state = x6; 943 Register state = x6;
944 __ Peek(state, 0); 944 __ Peek(state, 0);
945 __ SmiUntag(state); 945 __ SmiUntag(state);
946 946
947 // Switch on the state. 947 // Switch on the state.
948 Label with_tos_register, unknown_state; 948 Label with_tos_register, unknown_state;
949 __ CompareAndBranch( 949 __ CompareAndBranch(
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
1014 } 1014 }
1015 1015
1016 1016
1017 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { 1017 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1018 // We check the stack limit as indicator that recompilation might be done. 1018 // We check the stack limit as indicator that recompilation might be done.
1019 Label ok; 1019 Label ok;
1020 __ CompareRoot(jssp, Heap::kStackLimitRootIndex); 1020 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
1021 __ B(hs, &ok); 1021 __ B(hs, &ok);
1022 { 1022 {
1023 FrameScope scope(masm, StackFrame::INTERNAL); 1023 FrameScope scope(masm, StackFrame::INTERNAL);
1024 __ CallRuntime(Runtime::kHiddenStackGuard, 0); 1024 __ CallRuntime(Runtime::kStackGuard, 0);
1025 } 1025 }
1026 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), 1026 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1027 RelocInfo::CODE_TARGET); 1027 RelocInfo::CODE_TARGET);
1028 1028
1029 __ Bind(&ok); 1029 __ Bind(&ok);
1030 __ Ret(); 1030 __ Ret();
1031 } 1031 }
1032 1032
1033 1033
1034 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { 1034 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
(...skipping 521 matching lines...) Expand 10 before | Expand all | Expand 10 after
1556 __ Unreachable(); 1556 __ Unreachable();
1557 } 1557 }
1558 } 1558 }
1559 1559
1560 1560
1561 #undef __ 1561 #undef __
1562 1562
1563 } } // namespace v8::internal 1563 } } // namespace v8::internal
1564 1564
1565 #endif // V8_TARGET_ARCH_ARM 1565 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698