OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 473 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
484 // and may not have contained the exponent value in the first place when the | 484 // and may not have contained the exponent value in the first place when the |
485 // input was a smi. We reset it with exponent value before bailing out. | 485 // input was a smi. We reset it with exponent value before bailing out. |
486 __ j(not_equal, &done); | 486 __ j(not_equal, &done); |
487 __ Cvtlsi2sd(double_exponent, exponent); | 487 __ Cvtlsi2sd(double_exponent, exponent); |
488 | 488 |
489 // Returning or bailing out. | 489 // Returning or bailing out. |
490 Counters* counters = isolate()->counters(); | 490 Counters* counters = isolate()->counters(); |
491 if (exponent_type() == ON_STACK) { | 491 if (exponent_type() == ON_STACK) { |
492 // The arguments are still on the stack. | 492 // The arguments are still on the stack. |
493 __ bind(&call_runtime); | 493 __ bind(&call_runtime); |
494 __ TailCallRuntime(Runtime::kMathPowRT, 2); | 494 __ TailCallRuntime(Runtime::kMathPowRT); |
495 | 495 |
496 // The stub is called from non-optimized code, which expects the result | 496 // The stub is called from non-optimized code, which expects the result |
497 // as heap number in rax. | 497 // as heap number in rax. |
498 __ bind(&done); | 498 __ bind(&done); |
499 __ AllocateHeapNumber(rax, rcx, &call_runtime); | 499 __ AllocateHeapNumber(rax, rcx, &call_runtime); |
500 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); | 500 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); |
501 __ IncrementCounter(counters->math_pow(), 1); | 501 __ IncrementCounter(counters->math_pow(), 1); |
502 __ ret(2 * kPointerSize); | 502 __ ret(2 * kPointerSize); |
503 } else { | 503 } else { |
504 __ bind(&call_runtime); | 504 __ bind(&call_runtime); |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
584 ARGUMENTS_DONT_CONTAIN_RECEIVER); | 584 ARGUMENTS_DONT_CONTAIN_RECEIVER); |
585 __ movp(rax, adaptor_args.GetArgumentOperand(0)); | 585 __ movp(rax, adaptor_args.GetArgumentOperand(0)); |
586 __ Ret(); | 586 __ Ret(); |
587 | 587 |
588 // Slow-case: Handle non-smi or out-of-bounds access to arguments | 588 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
589 // by calling the runtime system. | 589 // by calling the runtime system. |
590 __ bind(&slow); | 590 __ bind(&slow); |
591 __ PopReturnAddressTo(rbx); | 591 __ PopReturnAddressTo(rbx); |
592 __ Push(rdx); | 592 __ Push(rdx); |
593 __ PushReturnAddressFrom(rbx); | 593 __ PushReturnAddressFrom(rbx); |
594 __ TailCallRuntime(Runtime::kArguments, 1); | 594 __ TailCallRuntime(Runtime::kArguments); |
595 } | 595 } |
596 | 596 |
597 | 597 |
598 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { | 598 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
599 // rcx : number of parameters (tagged) | 599 // rcx : number of parameters (tagged) |
600 // rdx : parameters pointer | 600 // rdx : parameters pointer |
601 // rdi : function | 601 // rdi : function |
602 // rsp[0] : return address | 602 // rsp[0] : return address |
603 // Registers used over the whole function: | 603 // Registers used over the whole function: |
604 // rbx: the mapped parameter count (untagged) | 604 // rbx: the mapped parameter count (untagged) |
(...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
796 | 796 |
797 // Do the runtime call to allocate the arguments object. | 797 // Do the runtime call to allocate the arguments object. |
798 // r11 = argument count (untagged) | 798 // r11 = argument count (untagged) |
799 __ bind(&runtime); | 799 __ bind(&runtime); |
800 __ Integer32ToSmi(r11, r11); | 800 __ Integer32ToSmi(r11, r11); |
801 __ PopReturnAddressTo(rax); | 801 __ PopReturnAddressTo(rax); |
802 __ Push(rdi); // Push function. | 802 __ Push(rdi); // Push function. |
803 __ Push(rdx); // Push parameters pointer. | 803 __ Push(rdx); // Push parameters pointer. |
804 __ Push(r11); // Push parameter count. | 804 __ Push(r11); // Push parameter count. |
805 __ PushReturnAddressFrom(rax); | 805 __ PushReturnAddressFrom(rax); |
806 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); | 806 __ TailCallRuntime(Runtime::kNewSloppyArguments); |
807 } | 807 } |
808 | 808 |
809 | 809 |
810 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { | 810 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { |
811 // rcx : number of parameters (tagged) | 811 // rcx : number of parameters (tagged) |
812 // rdx : parameters pointer | 812 // rdx : parameters pointer |
813 // rdi : function | 813 // rdi : function |
814 // rsp[0] : return address | 814 // rsp[0] : return address |
815 | 815 |
816 DCHECK(rdi.is(ArgumentsAccessNewDescriptor::function())); | 816 DCHECK(rdi.is(ArgumentsAccessNewDescriptor::function())); |
(...skipping 13 matching lines...) Expand all Loading... |
830 __ SmiToInteger64(rax, rcx); | 830 __ SmiToInteger64(rax, rcx); |
831 __ leap(rdx, Operand(rbx, rax, times_pointer_size, | 831 __ leap(rdx, Operand(rbx, rax, times_pointer_size, |
832 StandardFrameConstants::kCallerSPOffset)); | 832 StandardFrameConstants::kCallerSPOffset)); |
833 | 833 |
834 __ bind(&runtime); | 834 __ bind(&runtime); |
835 __ PopReturnAddressTo(rax); | 835 __ PopReturnAddressTo(rax); |
836 __ Push(rdi); // Push function. | 836 __ Push(rdi); // Push function. |
837 __ Push(rdx); // Push parameters pointer. | 837 __ Push(rdx); // Push parameters pointer. |
838 __ Push(rcx); // Push parameter count. | 838 __ Push(rcx); // Push parameter count. |
839 __ PushReturnAddressFrom(rax); | 839 __ PushReturnAddressFrom(rax); |
840 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3); | 840 __ TailCallRuntime(Runtime::kNewSloppyArguments); |
841 } | 841 } |
842 | 842 |
843 | 843 |
844 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { | 844 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { |
845 // rsp[0] : return address | 845 // rsp[0] : return address |
846 // rsp[8] : language mode | 846 // rsp[8] : language mode |
847 // rsp[16] : index of rest parameter | 847 // rsp[16] : index of rest parameter |
848 // rsp[24] : number of parameters | 848 // rsp[24] : number of parameters |
849 // rsp[32] : receiver displacement | 849 // rsp[32] : receiver displacement |
850 | 850 |
851 // Check if the calling frame is an arguments adaptor frame. | 851 // Check if the calling frame is an arguments adaptor frame. |
852 Label runtime; | 852 Label runtime; |
853 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 853 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
854 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 854 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
855 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 855 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
856 __ j(not_equal, &runtime); | 856 __ j(not_equal, &runtime); |
857 | 857 |
858 // Patch the arguments.length and the parameters pointer. | 858 // Patch the arguments.length and the parameters pointer. |
859 StackArgumentsAccessor args(rsp, 4, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 859 StackArgumentsAccessor args(rsp, 4, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
860 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 860 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
861 __ movp(args.GetArgumentOperand(1), rcx); | 861 __ movp(args.GetArgumentOperand(1), rcx); |
862 __ SmiToInteger64(rcx, rcx); | 862 __ SmiToInteger64(rcx, rcx); |
863 __ leap(rdx, Operand(rdx, rcx, times_pointer_size, | 863 __ leap(rdx, Operand(rdx, rcx, times_pointer_size, |
864 StandardFrameConstants::kCallerSPOffset)); | 864 StandardFrameConstants::kCallerSPOffset)); |
865 __ movp(args.GetArgumentOperand(0), rdx); | 865 __ movp(args.GetArgumentOperand(0), rdx); |
866 | 866 |
867 __ bind(&runtime); | 867 __ bind(&runtime); |
868 __ TailCallRuntime(Runtime::kNewRestParam, 4); | 868 __ TailCallRuntime(Runtime::kNewRestParam); |
869 } | 869 } |
870 | 870 |
871 | 871 |
872 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { | 872 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { |
873 // Return address is on the stack. | 873 // Return address is on the stack. |
874 Label slow; | 874 Label slow; |
875 | 875 |
876 Register receiver = LoadDescriptor::ReceiverRegister(); | 876 Register receiver = LoadDescriptor::ReceiverRegister(); |
877 Register key = LoadDescriptor::NameRegister(); | 877 Register key = LoadDescriptor::NameRegister(); |
878 Register scratch = rax; | 878 Register scratch = rax; |
879 DCHECK(!scratch.is(receiver) && !scratch.is(key)); | 879 DCHECK(!scratch.is(receiver) && !scratch.is(key)); |
880 | 880 |
881 // Check that the key is an array index, that is Uint32. | 881 // Check that the key is an array index, that is Uint32. |
882 STATIC_ASSERT(kSmiValueSize <= 32); | 882 STATIC_ASSERT(kSmiValueSize <= 32); |
883 __ JumpUnlessNonNegativeSmi(key, &slow); | 883 __ JumpUnlessNonNegativeSmi(key, &slow); |
884 | 884 |
885 // Everything is fine, call runtime. | 885 // Everything is fine, call runtime. |
886 __ PopReturnAddressTo(scratch); | 886 __ PopReturnAddressTo(scratch); |
887 __ Push(receiver); // receiver | 887 __ Push(receiver); // receiver |
888 __ Push(key); // key | 888 __ Push(key); // key |
889 __ PushReturnAddressFrom(scratch); | 889 __ PushReturnAddressFrom(scratch); |
890 | 890 |
891 // Perform tail call to the entry. | 891 // Perform tail call to the entry. |
892 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2); | 892 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor); |
893 | 893 |
894 __ bind(&slow); | 894 __ bind(&slow); |
895 PropertyAccessCompiler::TailCallBuiltin( | 895 PropertyAccessCompiler::TailCallBuiltin( |
896 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 896 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
897 } | 897 } |
898 | 898 |
899 | 899 |
900 void LoadIndexedStringStub::Generate(MacroAssembler* masm) { | 900 void LoadIndexedStringStub::Generate(MacroAssembler* masm) { |
901 // Return address is on the stack. | 901 // Return address is on the stack. |
902 Label miss; | 902 Label miss; |
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1016 __ bind(&done); | 1016 __ bind(&done); |
1017 __ ret(0); | 1017 __ ret(0); |
1018 | 1018 |
1019 // Do the runtime call to allocate the arguments object. | 1019 // Do the runtime call to allocate the arguments object. |
1020 __ bind(&runtime); | 1020 __ bind(&runtime); |
1021 __ PopReturnAddressTo(rax); | 1021 __ PopReturnAddressTo(rax); |
1022 __ Push(rdi); // Push function. | 1022 __ Push(rdi); // Push function. |
1023 __ Push(rdx); // Push parameters pointer. | 1023 __ Push(rdx); // Push parameters pointer. |
1024 __ Push(rcx); // Push parameter count. | 1024 __ Push(rcx); // Push parameter count. |
1025 __ PushReturnAddressFrom(rax); | 1025 __ PushReturnAddressFrom(rax); |
1026 __ TailCallRuntime(Runtime::kNewStrictArguments, 3); | 1026 __ TailCallRuntime(Runtime::kNewStrictArguments); |
1027 } | 1027 } |
1028 | 1028 |
1029 | 1029 |
1030 void RegExpExecStub::Generate(MacroAssembler* masm) { | 1030 void RegExpExecStub::Generate(MacroAssembler* masm) { |
1031 // Just jump directly to runtime if native RegExp is not selected at compile | 1031 // Just jump directly to runtime if native RegExp is not selected at compile |
1032 // time or if regexp entry in generated code is turned off runtime switch or | 1032 // time or if regexp entry in generated code is turned off runtime switch or |
1033 // at compilation. | 1033 // at compilation. |
1034 #ifdef V8_INTERPRETED_REGEXP | 1034 #ifdef V8_INTERPRETED_REGEXP |
1035 __ TailCallRuntime(Runtime::kRegExpExec, 4); | 1035 __ TailCallRuntime(Runtime::kRegExpExec); |
1036 #else // V8_INTERPRETED_REGEXP | 1036 #else // V8_INTERPRETED_REGEXP |
1037 | 1037 |
1038 // Stack frame on entry. | 1038 // Stack frame on entry. |
1039 // rsp[0] : return address | 1039 // rsp[0] : return address |
1040 // rsp[8] : last_match_info (expected JSArray) | 1040 // rsp[8] : last_match_info (expected JSArray) |
1041 // rsp[16] : previous index | 1041 // rsp[16] : previous index |
1042 // rsp[24] : subject string | 1042 // rsp[24] : subject string |
1043 // rsp[32] : JSRegExp object | 1043 // rsp[32] : JSRegExp object |
1044 | 1044 |
1045 enum RegExpExecStubArgumentIndices { | 1045 enum RegExpExecStubArgumentIndices { |
(...skipping 362 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1408 ExternalReference pending_exception_address( | 1408 ExternalReference pending_exception_address( |
1409 Isolate::kPendingExceptionAddress, isolate()); | 1409 Isolate::kPendingExceptionAddress, isolate()); |
1410 Operand pending_exception_operand = | 1410 Operand pending_exception_operand = |
1411 masm->ExternalOperand(pending_exception_address, rbx); | 1411 masm->ExternalOperand(pending_exception_address, rbx); |
1412 __ movp(rax, pending_exception_operand); | 1412 __ movp(rax, pending_exception_operand); |
1413 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); | 1413 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); |
1414 __ cmpp(rax, rdx); | 1414 __ cmpp(rax, rdx); |
1415 __ j(equal, &runtime); | 1415 __ j(equal, &runtime); |
1416 | 1416 |
1417 // For exception, throw the exception again. | 1417 // For exception, throw the exception again. |
1418 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4); | 1418 __ TailCallRuntime(Runtime::kRegExpExecReThrow); |
1419 | 1419 |
1420 // Do the runtime call to execute the regexp. | 1420 // Do the runtime call to execute the regexp. |
1421 __ bind(&runtime); | 1421 __ bind(&runtime); |
1422 __ TailCallRuntime(Runtime::kRegExpExec, 4); | 1422 __ TailCallRuntime(Runtime::kRegExpExec); |
1423 | 1423 |
1424 // Deferred code for string handling. | 1424 // Deferred code for string handling. |
1425 // (7) Not a long external string? If yes, go to (10). | 1425 // (7) Not a long external string? If yes, go to (10). |
1426 __ bind(¬_seq_nor_cons); | 1426 __ bind(¬_seq_nor_cons); |
1427 // Compare flags are still set from (3). | 1427 // Compare flags are still set from (3). |
1428 __ j(greater, ¬_long_external, Label::kNear); // Go to (10). | 1428 __ j(greater, ¬_long_external, Label::kNear); // Go to (10). |
1429 | 1429 |
1430 // (8) External string. Short external strings have been ruled out. | 1430 // (8) External string. Short external strings have been ruled out. |
1431 __ bind(&external_string); | 1431 __ bind(&external_string); |
1432 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); | 1432 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); |
(...skipping 323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1756 __ bind(&runtime_call); | 1756 __ bind(&runtime_call); |
1757 | 1757 |
1758 // Push arguments below the return address to prepare jump to builtin. | 1758 // Push arguments below the return address to prepare jump to builtin. |
1759 __ PopReturnAddressTo(rcx); | 1759 __ PopReturnAddressTo(rcx); |
1760 __ Push(rdx); | 1760 __ Push(rdx); |
1761 __ Push(rax); | 1761 __ Push(rax); |
1762 | 1762 |
1763 // Figure out which native to call and setup the arguments. | 1763 // Figure out which native to call and setup the arguments. |
1764 if (cc == equal) { | 1764 if (cc == equal) { |
1765 __ PushReturnAddressFrom(rcx); | 1765 __ PushReturnAddressFrom(rcx); |
1766 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2); | 1766 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals); |
1767 } else { | 1767 } else { |
1768 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); | 1768 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); |
1769 __ PushReturnAddressFrom(rcx); | 1769 __ PushReturnAddressFrom(rcx); |
1770 __ TailCallRuntime( | 1770 __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong |
1771 is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, | 1771 : Runtime::kCompare); |
1772 3); | |
1773 } | 1772 } |
1774 | 1773 |
1775 __ bind(&miss); | 1774 __ bind(&miss); |
1776 GenerateMiss(masm); | 1775 GenerateMiss(masm); |
1777 } | 1776 } |
1778 | 1777 |
1779 | 1778 |
1780 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { | 1779 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { |
1781 // rax : number of arguments to the construct function | 1780 // rax : number of arguments to the construct function |
1782 // rbx : feedback vector | 1781 // rbx : feedback vector |
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2099 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 2098 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
2100 FrameScope scope(masm, StackFrame::INTERNAL); | 2099 FrameScope scope(masm, StackFrame::INTERNAL); |
2101 | 2100 |
2102 // Push the receiver and the function and feedback info. | 2101 // Push the receiver and the function and feedback info. |
2103 __ Push(rdi); | 2102 __ Push(rdi); |
2104 __ Push(rbx); | 2103 __ Push(rbx); |
2105 __ Integer32ToSmi(rdx, rdx); | 2104 __ Integer32ToSmi(rdx, rdx); |
2106 __ Push(rdx); | 2105 __ Push(rdx); |
2107 | 2106 |
2108 // Call the entry. | 2107 // Call the entry. |
2109 __ CallRuntime(Runtime::kCallIC_Miss, 3); | 2108 __ CallRuntime(Runtime::kCallIC_Miss); |
2110 | 2109 |
2111 // Move result to edi and exit the internal frame. | 2110 // Move result to edi and exit the internal frame. |
2112 __ movp(rdi, rax); | 2111 __ movp(rdi, rax); |
2113 } | 2112 } |
2114 | 2113 |
2115 | 2114 |
2116 bool CEntryStub::NeedsImmovableCode() { | 2115 bool CEntryStub::NeedsImmovableCode() { |
2117 return false; | 2116 return false; |
2118 } | 2117 } |
2119 | 2118 |
(...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2558 | 2557 |
2559 // Found Proxy or access check needed: Call the runtime. | 2558 // Found Proxy or access check needed: Call the runtime. |
2560 __ bind(&fast_runtime_fallback); | 2559 __ bind(&fast_runtime_fallback); |
2561 __ PopReturnAddressTo(kScratchRegister); | 2560 __ PopReturnAddressTo(kScratchRegister); |
2562 __ Push(object); | 2561 __ Push(object); |
2563 __ Push(function_prototype); | 2562 __ Push(function_prototype); |
2564 __ PushReturnAddressFrom(kScratchRegister); | 2563 __ PushReturnAddressFrom(kScratchRegister); |
2565 // Invalidate the instanceof cache. | 2564 // Invalidate the instanceof cache. |
2566 __ Move(rax, Smi::FromInt(0)); | 2565 __ Move(rax, Smi::FromInt(0)); |
2567 __ StoreRoot(rax, Heap::kInstanceofCacheFunctionRootIndex); | 2566 __ StoreRoot(rax, Heap::kInstanceofCacheFunctionRootIndex); |
2568 __ TailCallRuntime(Runtime::kHasInPrototypeChain, 2); | 2567 __ TailCallRuntime(Runtime::kHasInPrototypeChain); |
2569 | 2568 |
2570 // Slow-case: Call the %InstanceOf runtime function. | 2569 // Slow-case: Call the %InstanceOf runtime function. |
2571 __ bind(&slow_case); | 2570 __ bind(&slow_case); |
2572 __ PopReturnAddressTo(kScratchRegister); | 2571 __ PopReturnAddressTo(kScratchRegister); |
2573 __ Push(object); | 2572 __ Push(object); |
2574 __ Push(function); | 2573 __ Push(function); |
2575 __ PushReturnAddressFrom(kScratchRegister); | 2574 __ PushReturnAddressFrom(kScratchRegister); |
2576 __ TailCallRuntime(Runtime::kInstanceOf, 2); | 2575 __ TailCallRuntime(Runtime::kInstanceOf); |
2577 } | 2576 } |
2578 | 2577 |
2579 | 2578 |
2580 // ------------------------------------------------------------------------- | 2579 // ------------------------------------------------------------------------- |
2581 // StringCharCodeAtGenerator | 2580 // StringCharCodeAtGenerator |
2582 | 2581 |
2583 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { | 2582 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
2584 // If the receiver is a smi trigger the non-string case. | 2583 // If the receiver is a smi trigger the non-string case. |
2585 if (check_mode_ == RECEIVER_IS_UNKNOWN) { | 2584 if (check_mode_ == RECEIVER_IS_UNKNOWN) { |
2586 __ JumpIfSmi(object_, receiver_not_string_); | 2585 __ JumpIfSmi(object_, receiver_not_string_); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2625 index_not_number_, | 2624 index_not_number_, |
2626 DONT_DO_SMI_CHECK); | 2625 DONT_DO_SMI_CHECK); |
2627 call_helper.BeforeCall(masm); | 2626 call_helper.BeforeCall(masm); |
2628 if (embed_mode == PART_OF_IC_HANDLER) { | 2627 if (embed_mode == PART_OF_IC_HANDLER) { |
2629 __ Push(LoadWithVectorDescriptor::VectorRegister()); | 2628 __ Push(LoadWithVectorDescriptor::VectorRegister()); |
2630 __ Push(LoadDescriptor::SlotRegister()); | 2629 __ Push(LoadDescriptor::SlotRegister()); |
2631 } | 2630 } |
2632 __ Push(object_); | 2631 __ Push(object_); |
2633 __ Push(index_); // Consumed by runtime conversion function. | 2632 __ Push(index_); // Consumed by runtime conversion function. |
2634 if (index_flags_ == STRING_INDEX_IS_NUMBER) { | 2633 if (index_flags_ == STRING_INDEX_IS_NUMBER) { |
2635 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); | 2634 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero); |
2636 } else { | 2635 } else { |
2637 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); | 2636 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); |
2638 // NumberToSmi discards numbers that are not exact integers. | 2637 // NumberToSmi discards numbers that are not exact integers. |
2639 __ CallRuntime(Runtime::kNumberToSmi, 1); | 2638 __ CallRuntime(Runtime::kNumberToSmi); |
2640 } | 2639 } |
2641 if (!index_.is(rax)) { | 2640 if (!index_.is(rax)) { |
2642 // Save the conversion result before the pop instructions below | 2641 // Save the conversion result before the pop instructions below |
2643 // have a chance to overwrite it. | 2642 // have a chance to overwrite it. |
2644 __ movp(index_, rax); | 2643 __ movp(index_, rax); |
2645 } | 2644 } |
2646 __ Pop(object_); | 2645 __ Pop(object_); |
2647 if (embed_mode == PART_OF_IC_HANDLER) { | 2646 if (embed_mode == PART_OF_IC_HANDLER) { |
2648 __ Pop(LoadDescriptor::SlotRegister()); | 2647 __ Pop(LoadDescriptor::SlotRegister()); |
2649 __ Pop(LoadWithVectorDescriptor::VectorRegister()); | 2648 __ Pop(LoadWithVectorDescriptor::VectorRegister()); |
2650 } | 2649 } |
2651 // Reload the instance type. | 2650 // Reload the instance type. |
2652 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); | 2651 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); |
2653 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); | 2652 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); |
2654 call_helper.AfterCall(masm); | 2653 call_helper.AfterCall(masm); |
2655 // If index is still not a smi, it must be out of range. | 2654 // If index is still not a smi, it must be out of range. |
2656 __ JumpIfNotSmi(index_, index_out_of_range_); | 2655 __ JumpIfNotSmi(index_, index_out_of_range_); |
2657 // Otherwise, return to the fast path. | 2656 // Otherwise, return to the fast path. |
2658 __ jmp(&got_smi_index_); | 2657 __ jmp(&got_smi_index_); |
2659 | 2658 |
2660 // Call runtime. We get here when the receiver is a string and the | 2659 // Call runtime. We get here when the receiver is a string and the |
2661 // index is a number, but the code of getting the actual character | 2660 // index is a number, but the code of getting the actual character |
2662 // is too complex (e.g., when the string needs to be flattened). | 2661 // is too complex (e.g., when the string needs to be flattened). |
2663 __ bind(&call_runtime_); | 2662 __ bind(&call_runtime_); |
2664 call_helper.BeforeCall(masm); | 2663 call_helper.BeforeCall(masm); |
2665 __ Push(object_); | 2664 __ Push(object_); |
2666 __ Integer32ToSmi(index_, index_); | 2665 __ Integer32ToSmi(index_, index_); |
2667 __ Push(index_); | 2666 __ Push(index_); |
2668 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2); | 2667 __ CallRuntime(Runtime::kStringCharCodeAtRT); |
2669 if (!result_.is(rax)) { | 2668 if (!result_.is(rax)) { |
2670 __ movp(result_, rax); | 2669 __ movp(result_, rax); |
2671 } | 2670 } |
2672 call_helper.AfterCall(masm); | 2671 call_helper.AfterCall(masm); |
2673 __ jmp(&exit_); | 2672 __ jmp(&exit_); |
2674 | 2673 |
2675 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); | 2674 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); |
2676 } | 2675 } |
2677 | 2676 |
2678 | 2677 |
(...skipping 17 matching lines...) Expand all Loading... |
2696 | 2695 |
2697 | 2696 |
2698 void StringCharFromCodeGenerator::GenerateSlow( | 2697 void StringCharFromCodeGenerator::GenerateSlow( |
2699 MacroAssembler* masm, | 2698 MacroAssembler* masm, |
2700 const RuntimeCallHelper& call_helper) { | 2699 const RuntimeCallHelper& call_helper) { |
2701 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); | 2700 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); |
2702 | 2701 |
2703 __ bind(&slow_case_); | 2702 __ bind(&slow_case_); |
2704 call_helper.BeforeCall(masm); | 2703 call_helper.BeforeCall(masm); |
2705 __ Push(code_); | 2704 __ Push(code_); |
2706 __ CallRuntime(Runtime::kStringCharFromCode, 1); | 2705 __ CallRuntime(Runtime::kStringCharFromCode); |
2707 if (!result_.is(rax)) { | 2706 if (!result_.is(rax)) { |
2708 __ movp(result_, rax); | 2707 __ movp(result_, rax); |
2709 } | 2708 } |
2710 call_helper.AfterCall(masm); | 2709 call_helper.AfterCall(masm); |
2711 __ jmp(&exit_); | 2710 __ jmp(&exit_); |
2712 | 2711 |
2713 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); | 2712 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); |
2714 } | 2713 } |
2715 | 2714 |
2716 | 2715 |
(...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2943 // rcx: result length | 2942 // rcx: result length |
2944 // rdi: first character of result | 2943 // rdi: first character of result |
2945 // r14: character of sub string start | 2944 // r14: character of sub string start |
2946 StringHelper::GenerateCopyCharacters( | 2945 StringHelper::GenerateCopyCharacters( |
2947 masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING); | 2946 masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING); |
2948 __ IncrementCounter(counters->sub_string_native(), 1); | 2947 __ IncrementCounter(counters->sub_string_native(), 1); |
2949 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); | 2948 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); |
2950 | 2949 |
2951 // Just jump to runtime to create the sub string. | 2950 // Just jump to runtime to create the sub string. |
2952 __ bind(&runtime); | 2951 __ bind(&runtime); |
2953 __ TailCallRuntime(Runtime::kSubString, 3); | 2952 __ TailCallRuntime(Runtime::kSubString); |
2954 | 2953 |
2955 __ bind(&single_char); | 2954 __ bind(&single_char); |
2956 // rax: string | 2955 // rax: string |
2957 // rbx: instance type | 2956 // rbx: instance type |
2958 // rcx: sub string length (smi) | 2957 // rcx: sub string length (smi) |
2959 // rdx: from index (smi) | 2958 // rdx: from index (smi) |
2960 StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime, | 2959 StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime, |
2961 &runtime, STRING_INDEX_IS_NUMBER, | 2960 &runtime, STRING_INDEX_IS_NUMBER, |
2962 RECEIVER_IS_STRING); | 2961 RECEIVER_IS_STRING); |
2963 generator.GenerateFast(masm); | 2962 generator.GenerateFast(masm); |
(...skipping 25 matching lines...) Expand all Loading... |
2989 __ testl(FieldOperand(rax, String::kHashFieldOffset), | 2988 __ testl(FieldOperand(rax, String::kHashFieldOffset), |
2990 Immediate(String::kContainsCachedArrayIndexMask)); | 2989 Immediate(String::kContainsCachedArrayIndexMask)); |
2991 __ j(not_zero, &slow_string, Label::kNear); | 2990 __ j(not_zero, &slow_string, Label::kNear); |
2992 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset)); | 2991 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset)); |
2993 __ IndexFromHash(rax, rax); | 2992 __ IndexFromHash(rax, rax); |
2994 __ Ret(); | 2993 __ Ret(); |
2995 __ bind(&slow_string); | 2994 __ bind(&slow_string); |
2996 __ PopReturnAddressTo(rcx); // Pop return address. | 2995 __ PopReturnAddressTo(rcx); // Pop return address. |
2997 __ Push(rax); // Push argument. | 2996 __ Push(rax); // Push argument. |
2998 __ PushReturnAddressFrom(rcx); // Push return address. | 2997 __ PushReturnAddressFrom(rcx); // Push return address. |
2999 __ TailCallRuntime(Runtime::kStringToNumber, 1); | 2998 __ TailCallRuntime(Runtime::kStringToNumber); |
3000 __ bind(¬_string); | 2999 __ bind(¬_string); |
3001 | 3000 |
3002 Label not_oddball; | 3001 Label not_oddball; |
3003 __ CmpInstanceType(rdi, ODDBALL_TYPE); | 3002 __ CmpInstanceType(rdi, ODDBALL_TYPE); |
3004 __ j(not_equal, ¬_oddball, Label::kNear); | 3003 __ j(not_equal, ¬_oddball, Label::kNear); |
3005 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset)); | 3004 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset)); |
3006 __ Ret(); | 3005 __ Ret(); |
3007 __ bind(¬_oddball); | 3006 __ bind(¬_oddball); |
3008 | 3007 |
3009 __ PopReturnAddressTo(rcx); // Pop return address. | 3008 __ PopReturnAddressTo(rcx); // Pop return address. |
3010 __ Push(rax); // Push argument. | 3009 __ Push(rax); // Push argument. |
3011 __ PushReturnAddressFrom(rcx); // Push return address. | 3010 __ PushReturnAddressFrom(rcx); // Push return address. |
3012 __ TailCallRuntime(Runtime::kToNumber, 1); | 3011 __ TailCallRuntime(Runtime::kToNumber); |
3013 } | 3012 } |
3014 | 3013 |
3015 | 3014 |
3016 void ToLengthStub::Generate(MacroAssembler* masm) { | 3015 void ToLengthStub::Generate(MacroAssembler* masm) { |
3017 // The ToLength stub takes on argument in rax. | 3016 // The ToLength stub takes on argument in rax. |
3018 Label not_smi, positive_smi; | 3017 Label not_smi, positive_smi; |
3019 __ JumpIfNotSmi(rax, ¬_smi, Label::kNear); | 3018 __ JumpIfNotSmi(rax, ¬_smi, Label::kNear); |
3020 STATIC_ASSERT(kSmiTag == 0); | 3019 STATIC_ASSERT(kSmiTag == 0); |
3021 __ testp(rax, rax); | 3020 __ testp(rax, rax); |
3022 __ j(greater_equal, &positive_smi, Label::kNear); | 3021 __ j(greater_equal, &positive_smi, Label::kNear); |
3023 __ xorl(rax, rax); | 3022 __ xorl(rax, rax); |
3024 __ bind(&positive_smi); | 3023 __ bind(&positive_smi); |
3025 __ Ret(); | 3024 __ Ret(); |
3026 __ bind(¬_smi); | 3025 __ bind(¬_smi); |
3027 | 3026 |
3028 __ PopReturnAddressTo(rcx); // Pop return address. | 3027 __ PopReturnAddressTo(rcx); // Pop return address. |
3029 __ Push(rax); // Push argument. | 3028 __ Push(rax); // Push argument. |
3030 __ PushReturnAddressFrom(rcx); // Push return address. | 3029 __ PushReturnAddressFrom(rcx); // Push return address. |
3031 __ TailCallRuntime(Runtime::kToLength, 1); | 3030 __ TailCallRuntime(Runtime::kToLength); |
3032 } | 3031 } |
3033 | 3032 |
3034 | 3033 |
3035 void ToStringStub::Generate(MacroAssembler* masm) { | 3034 void ToStringStub::Generate(MacroAssembler* masm) { |
3036 // The ToString stub takes one argument in rax. | 3035 // The ToString stub takes one argument in rax. |
3037 Label is_number; | 3036 Label is_number; |
3038 __ JumpIfSmi(rax, &is_number, Label::kNear); | 3037 __ JumpIfSmi(rax, &is_number, Label::kNear); |
3039 | 3038 |
3040 Label not_string; | 3039 Label not_string; |
3041 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi); | 3040 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi); |
(...skipping 14 matching lines...) Expand all Loading... |
3056 Label not_oddball; | 3055 Label not_oddball; |
3057 __ CmpInstanceType(rdi, ODDBALL_TYPE); | 3056 __ CmpInstanceType(rdi, ODDBALL_TYPE); |
3058 __ j(not_equal, ¬_oddball, Label::kNear); | 3057 __ j(not_equal, ¬_oddball, Label::kNear); |
3059 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset)); | 3058 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset)); |
3060 __ Ret(); | 3059 __ Ret(); |
3061 __ bind(¬_oddball); | 3060 __ bind(¬_oddball); |
3062 | 3061 |
3063 __ PopReturnAddressTo(rcx); // Pop return address. | 3062 __ PopReturnAddressTo(rcx); // Pop return address. |
3064 __ Push(rax); // Push argument. | 3063 __ Push(rax); // Push argument. |
3065 __ PushReturnAddressFrom(rcx); // Push return address. | 3064 __ PushReturnAddressFrom(rcx); // Push return address. |
3066 __ TailCallRuntime(Runtime::kToString, 1); | 3065 __ TailCallRuntime(Runtime::kToString); |
3067 } | 3066 } |
3068 | 3067 |
3069 | 3068 |
3070 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm, | 3069 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm, |
3071 Register left, | 3070 Register left, |
3072 Register right, | 3071 Register right, |
3073 Register scratch1, | 3072 Register scratch1, |
3074 Register scratch2) { | 3073 Register scratch2) { |
3075 Register length = scratch1; | 3074 Register length = scratch1; |
3076 | 3075 |
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3231 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx, rdi, | 3230 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx, rdi, |
3232 r8); | 3231 r8); |
3233 | 3232 |
3234 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 3233 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
3235 // tagged as a small integer. | 3234 // tagged as a small integer. |
3236 __ bind(&runtime); | 3235 __ bind(&runtime); |
3237 __ PopReturnAddressTo(rcx); | 3236 __ PopReturnAddressTo(rcx); |
3238 __ Push(rdx); | 3237 __ Push(rdx); |
3239 __ Push(rax); | 3238 __ Push(rax); |
3240 __ PushReturnAddressFrom(rcx); | 3239 __ PushReturnAddressFrom(rcx); |
3241 __ TailCallRuntime(Runtime::kStringCompare, 2); | 3240 __ TailCallRuntime(Runtime::kStringCompare); |
3242 } | 3241 } |
3243 | 3242 |
3244 | 3243 |
3245 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { | 3244 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { |
3246 // ----------- S t a t e ------------- | 3245 // ----------- S t a t e ------------- |
3247 // -- rdx : left | 3246 // -- rdx : left |
3248 // -- rax : right | 3247 // -- rax : right |
3249 // -- rsp[0] : return address | 3248 // -- rsp[0] : return address |
3250 // ----------------------------------- | 3249 // ----------------------------------- |
3251 | 3250 |
(...skipping 24 matching lines...) Expand all Loading... |
3276 Label::Distance const miss_distance = | 3275 Label::Distance const miss_distance = |
3277 masm->emit_debug_code() ? Label::kFar : Label::kNear; | 3276 masm->emit_debug_code() ? Label::kFar : Label::kNear; |
3278 | 3277 |
3279 __ JumpIfSmi(rdx, &miss, miss_distance); | 3278 __ JumpIfSmi(rdx, &miss, miss_distance); |
3280 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); | 3279 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); |
3281 __ JumpIfSmi(rax, &miss, miss_distance); | 3280 __ JumpIfSmi(rax, &miss, miss_distance); |
3282 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 3281 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
3283 __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance); | 3282 __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance); |
3284 __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance); | 3283 __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance); |
3285 if (op() != Token::EQ_STRICT && is_strong(strength())) { | 3284 if (op() != Token::EQ_STRICT && is_strong(strength())) { |
3286 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); | 3285 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion); |
3287 } else { | 3286 } else { |
3288 if (!Token::IsEqualityOp(op())) { | 3287 if (!Token::IsEqualityOp(op())) { |
3289 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset)); | 3288 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset)); |
3290 __ AssertSmi(rax); | 3289 __ AssertSmi(rax); |
3291 __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset)); | 3290 __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset)); |
3292 __ AssertSmi(rdx); | 3291 __ AssertSmi(rdx); |
3293 __ xchgp(rax, rdx); | 3292 __ xchgp(rax, rdx); |
3294 } | 3293 } |
3295 __ subp(rax, rdx); | 3294 __ subp(rax, rdx); |
3296 __ Ret(); | 3295 __ Ret(); |
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3558 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister); | 3557 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister); |
3559 } | 3558 } |
3560 | 3559 |
3561 // Handle more complex cases in runtime. | 3560 // Handle more complex cases in runtime. |
3562 __ bind(&runtime); | 3561 __ bind(&runtime); |
3563 __ PopReturnAddressTo(tmp1); | 3562 __ PopReturnAddressTo(tmp1); |
3564 __ Push(left); | 3563 __ Push(left); |
3565 __ Push(right); | 3564 __ Push(right); |
3566 __ PushReturnAddressFrom(tmp1); | 3565 __ PushReturnAddressFrom(tmp1); |
3567 if (equality) { | 3566 if (equality) { |
3568 __ TailCallRuntime(Runtime::kStringEquals, 2); | 3567 __ TailCallRuntime(Runtime::kStringEquals); |
3569 } else { | 3568 } else { |
3570 __ TailCallRuntime(Runtime::kStringCompare, 2); | 3569 __ TailCallRuntime(Runtime::kStringCompare); |
3571 } | 3570 } |
3572 | 3571 |
3573 __ bind(&miss); | 3572 __ bind(&miss); |
3574 GenerateMiss(masm); | 3573 GenerateMiss(masm); |
3575 } | 3574 } |
3576 | 3575 |
3577 | 3576 |
3578 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { | 3577 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { |
3579 DCHECK_EQ(CompareICState::RECEIVER, state()); | 3578 DCHECK_EQ(CompareICState::RECEIVER, state()); |
3580 Label miss; | 3579 Label miss; |
(...skipping 24 matching lines...) Expand all Loading... |
3605 __ GetWeakValue(rdi, cell); | 3604 __ GetWeakValue(rdi, cell); |
3606 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi); | 3605 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi); |
3607 __ j(not_equal, &miss, Label::kNear); | 3606 __ j(not_equal, &miss, Label::kNear); |
3608 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi); | 3607 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi); |
3609 __ j(not_equal, &miss, Label::kNear); | 3608 __ j(not_equal, &miss, Label::kNear); |
3610 | 3609 |
3611 if (Token::IsEqualityOp(op())) { | 3610 if (Token::IsEqualityOp(op())) { |
3612 __ subp(rax, rdx); | 3611 __ subp(rax, rdx); |
3613 __ ret(0); | 3612 __ ret(0); |
3614 } else if (is_strong(strength())) { | 3613 } else if (is_strong(strength())) { |
3615 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0); | 3614 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion); |
3616 } else { | 3615 } else { |
3617 __ PopReturnAddressTo(rcx); | 3616 __ PopReturnAddressTo(rcx); |
3618 __ Push(rdx); | 3617 __ Push(rdx); |
3619 __ Push(rax); | 3618 __ Push(rax); |
3620 __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition()))); | 3619 __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition()))); |
3621 __ PushReturnAddressFrom(rcx); | 3620 __ PushReturnAddressFrom(rcx); |
3622 __ TailCallRuntime(Runtime::kCompare, 3); | 3621 __ TailCallRuntime(Runtime::kCompare); |
3623 } | 3622 } |
3624 | 3623 |
3625 __ bind(&miss); | 3624 __ bind(&miss); |
3626 GenerateMiss(masm); | 3625 GenerateMiss(masm); |
3627 } | 3626 } |
3628 | 3627 |
3629 | 3628 |
3630 void CompareICStub::GenerateMiss(MacroAssembler* masm) { | 3629 void CompareICStub::GenerateMiss(MacroAssembler* masm) { |
3631 { | 3630 { |
3632 // Call the runtime system in a fresh internal frame. | 3631 // Call the runtime system in a fresh internal frame. |
3633 FrameScope scope(masm, StackFrame::INTERNAL); | 3632 FrameScope scope(masm, StackFrame::INTERNAL); |
3634 __ Push(rdx); | 3633 __ Push(rdx); |
3635 __ Push(rax); | 3634 __ Push(rax); |
3636 __ Push(rdx); | 3635 __ Push(rdx); |
3637 __ Push(rax); | 3636 __ Push(rax); |
3638 __ Push(Smi::FromInt(op())); | 3637 __ Push(Smi::FromInt(op())); |
3639 __ CallRuntime(Runtime::kCompareIC_Miss, 3); | 3638 __ CallRuntime(Runtime::kCompareIC_Miss); |
3640 | 3639 |
3641 // Compute the entry point of the rewritten stub. | 3640 // Compute the entry point of the rewritten stub. |
3642 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize)); | 3641 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize)); |
3643 __ Pop(rax); | 3642 __ Pop(rax); |
3644 __ Pop(rdx); | 3643 __ Pop(rdx); |
3645 } | 3644 } |
3646 | 3645 |
3647 // Do a tail call to the rewritten stub. | 3646 // Do a tail call to the rewritten stub. |
3648 __ jmp(rdi); | 3647 __ jmp(rdi); |
3649 } | 3648 } |
(...skipping 1234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4884 __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex); | 4883 __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex); |
4885 __ j(equal, &slow_case, Label::kNear); | 4884 __ j(equal, &slow_case, Label::kNear); |
4886 __ Ret(); | 4885 __ Ret(); |
4887 | 4886 |
4888 // Fallback to the runtime. | 4887 // Fallback to the runtime. |
4889 __ bind(&slow_case); | 4888 __ bind(&slow_case); |
4890 __ Integer32ToSmi(slot_reg, slot_reg); | 4889 __ Integer32ToSmi(slot_reg, slot_reg); |
4891 __ PopReturnAddressTo(kScratchRegister); | 4890 __ PopReturnAddressTo(kScratchRegister); |
4892 __ Push(slot_reg); | 4891 __ Push(slot_reg); |
4893 __ Push(kScratchRegister); | 4892 __ Push(kScratchRegister); |
4894 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1); | 4893 __ TailCallRuntime(Runtime::kLoadGlobalViaContext); |
4895 } | 4894 } |
4896 | 4895 |
4897 | 4896 |
4898 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { | 4897 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { |
4899 Register context_reg = rsi; | 4898 Register context_reg = rsi; |
4900 Register slot_reg = rbx; | 4899 Register slot_reg = rbx; |
4901 Register value_reg = rax; | 4900 Register value_reg = rax; |
4902 Register cell_reg = r8; | 4901 Register cell_reg = r8; |
4903 Register cell_details_reg = rdx; | 4902 Register cell_details_reg = rdx; |
4904 Register cell_value_reg = r9; | 4903 Register cell_value_reg = r9; |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5007 | 5006 |
5008 // Fallback to the runtime. | 5007 // Fallback to the runtime. |
5009 __ bind(&slow_case); | 5008 __ bind(&slow_case); |
5010 __ Integer32ToSmi(slot_reg, slot_reg); | 5009 __ Integer32ToSmi(slot_reg, slot_reg); |
5011 __ PopReturnAddressTo(kScratchRegister); | 5010 __ PopReturnAddressTo(kScratchRegister); |
5012 __ Push(slot_reg); | 5011 __ Push(slot_reg); |
5013 __ Push(value_reg); | 5012 __ Push(value_reg); |
5014 __ Push(kScratchRegister); | 5013 __ Push(kScratchRegister); |
5015 __ TailCallRuntime(is_strict(language_mode()) | 5014 __ TailCallRuntime(is_strict(language_mode()) |
5016 ? Runtime::kStoreGlobalViaContext_Strict | 5015 ? Runtime::kStoreGlobalViaContext_Strict |
5017 : Runtime::kStoreGlobalViaContext_Sloppy, | 5016 : Runtime::kStoreGlobalViaContext_Sloppy); |
5018 2); | |
5019 } | 5017 } |
5020 | 5018 |
5021 | 5019 |
5022 static int Offset(ExternalReference ref0, ExternalReference ref1) { | 5020 static int Offset(ExternalReference ref0, ExternalReference ref1) { |
5023 int64_t offset = (ref0.address() - ref1.address()); | 5021 int64_t offset = (ref0.address() - ref1.address()); |
5024 // Check that fits into int. | 5022 // Check that fits into int. |
5025 DCHECK(static_cast<int>(offset) == offset); | 5023 DCHECK(static_cast<int>(offset) == offset); |
5026 return static_cast<int>(offset); | 5024 return static_cast<int>(offset); |
5027 } | 5025 } |
5028 | 5026 |
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5182 DCHECK_EQ(stack_space, 0); | 5180 DCHECK_EQ(stack_space, 0); |
5183 __ PopReturnAddressTo(rcx); | 5181 __ PopReturnAddressTo(rcx); |
5184 __ addq(rsp, rbx); | 5182 __ addq(rsp, rbx); |
5185 __ jmp(rcx); | 5183 __ jmp(rcx); |
5186 } else { | 5184 } else { |
5187 __ ret(stack_space * kPointerSize); | 5185 __ ret(stack_space * kPointerSize); |
5188 } | 5186 } |
5189 | 5187 |
5190 // Re-throw by promoting a scheduled exception. | 5188 // Re-throw by promoting a scheduled exception. |
5191 __ bind(&promote_scheduled_exception); | 5189 __ bind(&promote_scheduled_exception); |
5192 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0); | 5190 __ TailCallRuntime(Runtime::kPromoteScheduledException); |
5193 | 5191 |
5194 // HandleScope limit has changed. Delete allocated extensions. | 5192 // HandleScope limit has changed. Delete allocated extensions. |
5195 __ bind(&delete_allocated_handles); | 5193 __ bind(&delete_allocated_handles); |
5196 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg); | 5194 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg); |
5197 __ movp(prev_limit_reg, rax); | 5195 __ movp(prev_limit_reg, rax); |
5198 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate)); | 5196 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate)); |
5199 __ LoadAddress(rax, | 5197 __ LoadAddress(rax, |
5200 ExternalReference::delete_handle_scope_extensions(isolate)); | 5198 ExternalReference::delete_handle_scope_extensions(isolate)); |
5201 __ call(rax); | 5199 __ call(rax); |
5202 __ movp(rax, prev_limit_reg); | 5200 __ movp(rax, prev_limit_reg); |
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5414 kStackSpace, nullptr, return_value_operand, NULL); | 5412 kStackSpace, nullptr, return_value_operand, NULL); |
5415 } | 5413 } |
5416 | 5414 |
5417 | 5415 |
5418 #undef __ | 5416 #undef __ |
5419 | 5417 |
5420 } // namespace internal | 5418 } // namespace internal |
5421 } // namespace v8 | 5419 } // namespace v8 |
5422 | 5420 |
5423 #endif // V8_TARGET_ARCH_X64 | 5421 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |