Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(352)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 17229005: Convert UnaryOpStub to a HydrogenCodeStub (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: rebase Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
215 215
216 void ToBooleanStub::InitializeInterfaceDescriptor( 216 void ToBooleanStub::InitializeInterfaceDescriptor(
217 Isolate* isolate, 217 Isolate* isolate,
218 CodeStubInterfaceDescriptor* descriptor) { 218 CodeStubInterfaceDescriptor* descriptor) {
219 static Register registers[] = { rax }; 219 static Register registers[] = { rax };
220 descriptor->register_param_count_ = 1; 220 descriptor->register_param_count_ = 1;
221 descriptor->register_params_ = registers; 221 descriptor->register_params_ = registers;
222 descriptor->deoptimization_handler_ = 222 descriptor->deoptimization_handler_ =
223 FUNCTION_ADDR(ToBooleanIC_Miss); 223 FUNCTION_ADDR(ToBooleanIC_Miss);
224 descriptor->SetMissHandler( 224 descriptor->SetMissHandler(
225 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); 225 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
226 } 226 }
227 227
228 228
229 void UnaryOpStub::InitializeInterfaceDescriptor(
230 Isolate* isolate,
231 CodeStubInterfaceDescriptor* descriptor) {
232 static Register registers[] = { rax };
233 descriptor->register_param_count_ = 1;
234 descriptor->register_params_ = registers;
235 descriptor->deoptimization_handler_ =
236 FUNCTION_ADDR(UnaryOpIC_Miss);
237 }
238
239
229 #define __ ACCESS_MASM(masm) 240 #define __ ACCESS_MASM(masm)
230 241
231 242
232 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { 243 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
233 // Update the static counter each time a new code stub is generated. 244 // Update the static counter each time a new code stub is generated.
234 Isolate* isolate = masm->isolate(); 245 Isolate* isolate = masm->isolate();
235 isolate->counters()->code_stubs()->Increment(); 246 isolate->counters()->code_stubs()->Increment();
236 247
237 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); 248 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
238 int param_count = descriptor->register_param_count_; 249 int param_count = descriptor->register_param_count_;
(...skipping 397 matching lines...) Expand 10 before | Expand all | Expand 10 after
636 // As the then-branch, but move double-value to result before shifting. 647 // As the then-branch, but move double-value to result before shifting.
637 __ xorl(result, double_value); 648 __ xorl(result, double_value);
638 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1)); 649 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
639 __ shll_cl(result); 650 __ shll_cl(result);
640 } 651 }
641 652
642 __ bind(&done); 653 __ bind(&done);
643 } 654 }
644 655
645 656
646 void UnaryOpStub::Generate(MacroAssembler* masm) {
647 switch (operand_type_) {
648 case UnaryOpIC::UNINITIALIZED:
649 GenerateTypeTransition(masm);
650 break;
651 case UnaryOpIC::SMI:
652 GenerateSmiStub(masm);
653 break;
654 case UnaryOpIC::NUMBER:
655 GenerateNumberStub(masm);
656 break;
657 case UnaryOpIC::GENERIC:
658 GenerateGenericStub(masm);
659 break;
660 }
661 }
662
663
664 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
665 __ pop(rcx); // Save return address.
666
667 __ push(rax); // the operand
668 __ Push(Smi::FromInt(op_));
669 __ Push(Smi::FromInt(mode_));
670 __ Push(Smi::FromInt(operand_type_));
671
672 __ push(rcx); // Push return address.
673
674 // Patch the caller to an appropriate specialized stub and return the
675 // operation result to the caller of the stub.
676 __ TailCallExternalReference(
677 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
678 }
679
680
681 // TODO(svenpanne): Use virtual functions instead of switch.
682 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
683 switch (op_) {
684 case Token::SUB:
685 GenerateSmiStubSub(masm);
686 break;
687 case Token::BIT_NOT:
688 GenerateSmiStubBitNot(masm);
689 break;
690 default:
691 UNREACHABLE();
692 }
693 }
694
695
696 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
697 Label slow;
698 GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear);
699 __ bind(&slow);
700 GenerateTypeTransition(masm);
701 }
702
703
704 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
705 Label non_smi;
706 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
707 __ bind(&non_smi);
708 GenerateTypeTransition(masm);
709 }
710
711
712 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
713 Label* non_smi,
714 Label* slow,
715 Label::Distance non_smi_near,
716 Label::Distance slow_near) {
717 Label done;
718 __ JumpIfNotSmi(rax, non_smi, non_smi_near);
719 __ SmiNeg(rax, rax, &done, Label::kNear);
720 __ jmp(slow, slow_near);
721 __ bind(&done);
722 __ ret(0);
723 }
724
725
726 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
727 Label* non_smi,
728 Label::Distance non_smi_near) {
729 __ JumpIfNotSmi(rax, non_smi, non_smi_near);
730 __ SmiNot(rax, rax);
731 __ ret(0);
732 }
733
734
735 // TODO(svenpanne): Use virtual functions instead of switch.
736 void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
737 switch (op_) {
738 case Token::SUB:
739 GenerateNumberStubSub(masm);
740 break;
741 case Token::BIT_NOT:
742 GenerateNumberStubBitNot(masm);
743 break;
744 default:
745 UNREACHABLE();
746 }
747 }
748
749
750 void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) {
751 Label non_smi, slow, call_builtin;
752 GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear);
753 __ bind(&non_smi);
754 GenerateHeapNumberCodeSub(masm, &slow);
755 __ bind(&slow);
756 GenerateTypeTransition(masm);
757 __ bind(&call_builtin);
758 GenerateGenericCodeFallback(masm);
759 }
760
761
762 void UnaryOpStub::GenerateNumberStubBitNot(
763 MacroAssembler* masm) {
764 Label non_smi, slow;
765 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
766 __ bind(&non_smi);
767 GenerateHeapNumberCodeBitNot(masm, &slow);
768 __ bind(&slow);
769 GenerateTypeTransition(masm);
770 }
771
772
773 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
774 Label* slow) {
775 // Check if the operand is a heap number.
776 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
777 Heap::kHeapNumberMapRootIndex);
778 __ j(not_equal, slow);
779
780 // Operand is a float, negate its value by flipping the sign bit.
781 if (mode_ == UNARY_OVERWRITE) {
782 __ Set(kScratchRegister, 0x01);
783 __ shl(kScratchRegister, Immediate(63));
784 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister);
785 } else {
786 // Allocate a heap number before calculating the answer,
787 // so we don't have an untagged double around during GC.
788 Label slow_allocate_heapnumber, heapnumber_allocated;
789 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber);
790 __ jmp(&heapnumber_allocated);
791
792 __ bind(&slow_allocate_heapnumber);
793 {
794 FrameScope scope(masm, StackFrame::INTERNAL);
795 __ push(rax);
796 __ CallRuntime(Runtime::kNumberAlloc, 0);
797 __ movq(rcx, rax);
798 __ pop(rax);
799 }
800 __ bind(&heapnumber_allocated);
801 // rcx: allocated 'empty' number
802
803 // Copy the double value to the new heap number, flipping the sign.
804 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
805 __ Set(kScratchRegister, 0x01);
806 __ shl(kScratchRegister, Immediate(63));
807 __ xor_(rdx, kScratchRegister); // Flip sign.
808 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
809 __ movq(rax, rcx);
810 }
811 __ ret(0);
812 }
813
814
815 void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
816 Label* slow) {
817 // Check if the operand is a heap number.
818 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
819 Heap::kHeapNumberMapRootIndex);
820 __ j(not_equal, slow);
821
822 // Convert the heap number in rax to an untagged integer in rcx.
823 IntegerConvert(masm, rax, rax);
824
825 // Do the bitwise operation and smi tag the result.
826 __ notl(rax);
827 __ Integer32ToSmi(rax, rax);
828 __ ret(0);
829 }
830
831
832 // TODO(svenpanne): Use virtual functions instead of switch.
833 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
834 switch (op_) {
835 case Token::SUB:
836 GenerateGenericStubSub(masm);
837 break;
838 case Token::BIT_NOT:
839 GenerateGenericStubBitNot(masm);
840 break;
841 default:
842 UNREACHABLE();
843 }
844 }
845
846
847 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
848 Label non_smi, slow;
849 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear);
850 __ bind(&non_smi);
851 GenerateHeapNumberCodeSub(masm, &slow);
852 __ bind(&slow);
853 GenerateGenericCodeFallback(masm);
854 }
855
856
857 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
858 Label non_smi, slow;
859 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
860 __ bind(&non_smi);
861 GenerateHeapNumberCodeBitNot(masm, &slow);
862 __ bind(&slow);
863 GenerateGenericCodeFallback(masm);
864 }
865
866
867 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
868 // Handle the slow case by jumping to the JavaScript builtin.
869 __ pop(rcx); // pop return address
870 __ push(rax);
871 __ push(rcx); // push return address
872 switch (op_) {
873 case Token::SUB:
874 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
875 break;
876 case Token::BIT_NOT:
877 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
878 break;
879 default:
880 UNREACHABLE();
881 }
882 }
883
884
885 void UnaryOpStub::PrintName(StringStream* stream) {
886 const char* op_name = Token::Name(op_);
887 const char* overwrite_name = NULL; // Make g++ happy.
888 switch (mode_) {
889 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
890 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
891 }
892 stream->Add("UnaryOpStub_%s_%s_%s",
893 op_name,
894 overwrite_name,
895 UnaryOpIC::GetName(operand_type_));
896 }
897
898
899 void BinaryOpStub::Initialize() {} 657 void BinaryOpStub::Initialize() {}
900 658
901 659
902 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 660 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
903 __ pop(rcx); // Save return address. 661 __ pop(rcx); // Save return address.
904 __ push(rdx); 662 __ push(rdx);
905 __ push(rax); 663 __ push(rax);
906 // Left and right arguments are now on top. 664 // Left and right arguments are now on top.
907 __ Push(Smi::FromInt(MinorKey())); 665 __ Push(Smi::FromInt(MinorKey()));
908 666
(...skipping 6089 matching lines...) Expand 10 before | Expand all | Expand 10 after
6998 __ bind(&fast_elements_case); 6756 __ bind(&fast_elements_case);
6999 GenerateCase(masm, FAST_ELEMENTS); 6757 GenerateCase(masm, FAST_ELEMENTS);
7000 } 6758 }
7001 6759
7002 6760
7003 #undef __ 6761 #undef __
7004 6762
7005 } } // namespace v8::internal 6763 } } // namespace v8::internal
7006 6764
7007 #endif // V8_TARGET_ARCH_X64 6765 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698