Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(522)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 18712002: Convert UnaryOpStub to a HydrogenCodeStub (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: address review Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after
223 static Register registers[] = { eax }; 223 static Register registers[] = { eax };
224 descriptor->register_param_count_ = 1; 224 descriptor->register_param_count_ = 1;
225 descriptor->register_params_ = registers; 225 descriptor->register_params_ = registers;
226 descriptor->deoptimization_handler_ = 226 descriptor->deoptimization_handler_ =
227 FUNCTION_ADDR(ToBooleanIC_Miss); 227 FUNCTION_ADDR(ToBooleanIC_Miss);
228 descriptor->SetMissHandler( 228 descriptor->SetMissHandler(
229 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); 229 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
230 } 230 }
231 231
232 232
233 void UnaryOpStub::InitializeInterfaceDescriptor(
234 Isolate* isolate,
235 CodeStubInterfaceDescriptor* descriptor) {
236 static Register registers[] = { eax };
237 descriptor->register_param_count_ = 1;
238 descriptor->register_params_ = registers;
239 descriptor->deoptimization_handler_ =
240 FUNCTION_ADDR(UnaryOpIC_Miss);
241 }
242
243
233 #define __ ACCESS_MASM(masm) 244 #define __ ACCESS_MASM(masm)
234 245
235 246
236 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { 247 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
237 // Update the static counter each time a new code stub is generated. 248 // Update the static counter each time a new code stub is generated.
238 Isolate* isolate = masm->isolate(); 249 Isolate* isolate = masm->isolate();
239 isolate->counters()->code_stubs()->Increment(); 250 isolate->counters()->code_stubs()->Increment();
240 251
241 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); 252 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
242 int param_count = descriptor->register_param_count_; 253 int param_count = descriptor->register_param_count_;
(...skipping 509 matching lines...) Expand 10 before | Expand all | Expand 10 after
752 // Result is in ecx. Trashes ebx, xmm0, and xmm1. 763 // Result is in ecx. Trashes ebx, xmm0, and xmm1.
753 static void ConvertHeapNumberToInt32(MacroAssembler* masm, 764 static void ConvertHeapNumberToInt32(MacroAssembler* masm,
754 Register source, 765 Register source,
755 Label* conversion_failure) { 766 Label* conversion_failure) {
756 __ movdbl(xmm0, FieldOperand(source, HeapNumber::kValueOffset)); 767 __ movdbl(xmm0, FieldOperand(source, HeapNumber::kValueOffset));
757 FloatingPointHelper::CheckSSE2OperandIsInt32( 768 FloatingPointHelper::CheckSSE2OperandIsInt32(
758 masm, conversion_failure, xmm0, ecx, ebx, xmm1); 769 masm, conversion_failure, xmm0, ecx, ebx, xmm1);
759 } 770 }
760 771
761 772
762 void UnaryOpStub::PrintName(StringStream* stream) {
763 const char* op_name = Token::Name(op_);
764 const char* overwrite_name = NULL; // Make g++ happy.
765 switch (mode_) {
766 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
767 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
768 }
769 stream->Add("UnaryOpStub_%s_%s_%s",
770 op_name,
771 overwrite_name,
772 UnaryOpIC::GetName(operand_type_));
773 }
774
775
776 // TODO(svenpanne): Use virtual functions instead of switch.
777 void UnaryOpStub::Generate(MacroAssembler* masm) {
778 switch (operand_type_) {
779 case UnaryOpIC::UNINITIALIZED:
780 GenerateTypeTransition(masm);
781 break;
782 case UnaryOpIC::SMI:
783 GenerateSmiStub(masm);
784 break;
785 case UnaryOpIC::NUMBER:
786 GenerateNumberStub(masm);
787 break;
788 case UnaryOpIC::GENERIC:
789 GenerateGenericStub(masm);
790 break;
791 }
792 }
793
794
795 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
796 __ pop(ecx); // Save return address.
797
798 __ push(eax); // the operand
799 __ push(Immediate(Smi::FromInt(op_)));
800 __ push(Immediate(Smi::FromInt(mode_)));
801 __ push(Immediate(Smi::FromInt(operand_type_)));
802
803 __ push(ecx); // Push return address.
804
805 // Patch the caller to an appropriate specialized stub and return the
806 // operation result to the caller of the stub.
807 __ TailCallExternalReference(
808 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
809 }
810
811
812 // TODO(svenpanne): Use virtual functions instead of switch.
813 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
814 switch (op_) {
815 case Token::SUB:
816 GenerateSmiStubSub(masm);
817 break;
818 case Token::BIT_NOT:
819 GenerateSmiStubBitNot(masm);
820 break;
821 default:
822 UNREACHABLE();
823 }
824 }
825
826
827 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
828 Label non_smi, undo, slow;
829 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow,
830 Label::kNear, Label::kNear, Label::kNear);
831 __ bind(&undo);
832 GenerateSmiCodeUndo(masm);
833 __ bind(&non_smi);
834 __ bind(&slow);
835 GenerateTypeTransition(masm);
836 }
837
838
839 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
840 Label non_smi;
841 GenerateSmiCodeBitNot(masm, &non_smi);
842 __ bind(&non_smi);
843 GenerateTypeTransition(masm);
844 }
845
846
847 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
848 Label* non_smi,
849 Label* undo,
850 Label* slow,
851 Label::Distance non_smi_near,
852 Label::Distance undo_near,
853 Label::Distance slow_near) {
854 // Check whether the value is a smi.
855 __ JumpIfNotSmi(eax, non_smi, non_smi_near);
856
857 // We can't handle -0 with smis, so use a type transition for that case.
858 __ test(eax, eax);
859 __ j(zero, slow, slow_near);
860
861 // Try optimistic subtraction '0 - value', saving operand in eax for undo.
862 __ mov(edx, eax);
863 __ Set(eax, Immediate(0));
864 __ sub(eax, edx);
865 __ j(overflow, undo, undo_near);
866 __ ret(0);
867 }
868
869
870 void UnaryOpStub::GenerateSmiCodeBitNot(
871 MacroAssembler* masm,
872 Label* non_smi,
873 Label::Distance non_smi_near) {
874 // Check whether the value is a smi.
875 __ JumpIfNotSmi(eax, non_smi, non_smi_near);
876
877 // Flip bits and revert inverted smi-tag.
878 __ not_(eax);
879 __ and_(eax, ~kSmiTagMask);
880 __ ret(0);
881 }
882
883
884 void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) {
885 __ mov(eax, edx);
886 }
887
888
889 // TODO(svenpanne): Use virtual functions instead of switch.
890 void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
891 switch (op_) {
892 case Token::SUB:
893 GenerateNumberStubSub(masm);
894 break;
895 case Token::BIT_NOT:
896 GenerateNumberStubBitNot(masm);
897 break;
898 default:
899 UNREACHABLE();
900 }
901 }
902
903
904 void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) {
905 Label non_smi, undo, slow, call_builtin;
906 GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear);
907 __ bind(&non_smi);
908 GenerateHeapNumberCodeSub(masm, &slow);
909 __ bind(&undo);
910 GenerateSmiCodeUndo(masm);
911 __ bind(&slow);
912 GenerateTypeTransition(masm);
913 __ bind(&call_builtin);
914 GenerateGenericCodeFallback(masm);
915 }
916
917
918 void UnaryOpStub::GenerateNumberStubBitNot(
919 MacroAssembler* masm) {
920 Label non_smi, slow;
921 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
922 __ bind(&non_smi);
923 GenerateHeapNumberCodeBitNot(masm, &slow);
924 __ bind(&slow);
925 GenerateTypeTransition(masm);
926 }
927
928
929 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
930 Label* slow) {
931 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
932 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
933 __ j(not_equal, slow);
934
935 if (mode_ == UNARY_OVERWRITE) {
936 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset),
937 Immediate(HeapNumber::kSignMask)); // Flip sign.
938 } else {
939 __ mov(edx, eax);
940 // edx: operand
941
942 Label slow_allocate_heapnumber, heapnumber_allocated;
943 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber);
944 __ jmp(&heapnumber_allocated, Label::kNear);
945
946 __ bind(&slow_allocate_heapnumber);
947 {
948 FrameScope scope(masm, StackFrame::INTERNAL);
949 __ push(edx);
950 __ CallRuntime(Runtime::kNumberAlloc, 0);
951 __ pop(edx);
952 }
953
954 __ bind(&heapnumber_allocated);
955 // eax: allocated 'empty' number
956 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
957 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
958 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
959 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
960 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
961 }
962 __ ret(0);
963 }
964
965
966 void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
967 Label* slow) {
968 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
969 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
970 __ j(not_equal, slow);
971
972 // Convert the heap number in eax to an untagged integer in ecx.
973 IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow);
974
975 // Do the bitwise operation and check if the result fits in a smi.
976 Label try_float;
977 __ not_(ecx);
978 __ cmp(ecx, 0xc0000000);
979 __ j(sign, &try_float, Label::kNear);
980
981 // Tag the result as a smi and we're done.
982 STATIC_ASSERT(kSmiTagSize == 1);
983 __ lea(eax, Operand(ecx, times_2, kSmiTag));
984 __ ret(0);
985
986 // Try to store the result in a heap number.
987 __ bind(&try_float);
988 if (mode_ == UNARY_NO_OVERWRITE) {
989 Label slow_allocate_heapnumber, heapnumber_allocated;
990 __ mov(ebx, eax);
991 __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber);
992 __ jmp(&heapnumber_allocated);
993
994 __ bind(&slow_allocate_heapnumber);
995 {
996 FrameScope scope(masm, StackFrame::INTERNAL);
997 // Push the original HeapNumber on the stack. The integer value can't
998 // be stored since it's untagged and not in the smi range (so we can't
999 // smi-tag it). We'll recalculate the value after the GC instead.
1000 __ push(ebx);
1001 __ CallRuntime(Runtime::kNumberAlloc, 0);
1002 // New HeapNumber is in eax.
1003 __ pop(edx);
1004 }
1005 // IntegerConvert uses ebx and edi as scratch registers.
1006 // This conversion won't go slow-case.
1007 IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow);
1008 __ not_(ecx);
1009
1010 __ bind(&heapnumber_allocated);
1011 }
1012 if (CpuFeatures::IsSupported(SSE2)) {
1013 CpuFeatureScope use_sse2(masm, SSE2);
1014 __ cvtsi2sd(xmm0, ecx);
1015 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1016 } else {
1017 __ push(ecx);
1018 __ fild_s(Operand(esp, 0));
1019 __ pop(ecx);
1020 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1021 }
1022 __ ret(0);
1023 }
1024
1025
1026 // TODO(svenpanne): Use virtual functions instead of switch.
1027 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
1028 switch (op_) {
1029 case Token::SUB:
1030 GenerateGenericStubSub(masm);
1031 break;
1032 case Token::BIT_NOT:
1033 GenerateGenericStubBitNot(masm);
1034 break;
1035 default:
1036 UNREACHABLE();
1037 }
1038 }
1039
1040
1041 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
1042 Label non_smi, undo, slow;
1043 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear);
1044 __ bind(&non_smi);
1045 GenerateHeapNumberCodeSub(masm, &slow);
1046 __ bind(&undo);
1047 GenerateSmiCodeUndo(masm);
1048 __ bind(&slow);
1049 GenerateGenericCodeFallback(masm);
1050 }
1051
1052
1053 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
1054 Label non_smi, slow;
1055 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
1056 __ bind(&non_smi);
1057 GenerateHeapNumberCodeBitNot(masm, &slow);
1058 __ bind(&slow);
1059 GenerateGenericCodeFallback(masm);
1060 }
1061
1062
1063 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
1064 // Handle the slow case by jumping to the corresponding JavaScript builtin.
1065 __ pop(ecx); // pop return address.
1066 __ push(eax);
1067 __ push(ecx); // push return address
1068 switch (op_) {
1069 case Token::SUB:
1070 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
1071 break;
1072 case Token::BIT_NOT:
1073 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
1074 break;
1075 default:
1076 UNREACHABLE();
1077 }
1078 }
1079
1080
1081 void BinaryOpStub::Initialize() { 773 void BinaryOpStub::Initialize() {
1082 platform_specific_bit_ = CpuFeatures::IsSupported(SSE3); 774 platform_specific_bit_ = CpuFeatures::IsSupported(SSE3);
1083 } 775 }
1084 776
1085 777
1086 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 778 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1087 __ pop(ecx); // Save return address. 779 __ pop(ecx); // Save return address.
1088 __ push(edx); 780 __ push(edx);
1089 __ push(eax); 781 __ push(eax);
1090 // Left and right arguments are now on top. 782 // Left and right arguments are now on top.
(...skipping 6928 matching lines...) Expand 10 before | Expand all | Expand 10 after
8019 __ bind(&fast_elements_case); 7711 __ bind(&fast_elements_case);
8020 GenerateCase(masm, FAST_ELEMENTS); 7712 GenerateCase(masm, FAST_ELEMENTS);
8021 } 7713 }
8022 7714
8023 7715
8024 #undef __ 7716 #undef __
8025 7717
8026 } } // namespace v8::internal 7718 } } // namespace v8::internal
8027 7719
8028 #endif // V8_TARGET_ARCH_IA32 7720 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698