Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(81)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 148503002: A64: Synchronize with r15545. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_IA32) 30 #if V8_TARGET_ARCH_IA32
31 31
32 #include "bootstrapper.h" 32 #include "bootstrapper.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "isolate.h" 34 #include "isolate.h"
35 #include "jsregexp.h" 35 #include "jsregexp.h"
36 #include "regexp-macro-assembler.h" 36 #include "regexp-macro-assembler.h"
37 #include "runtime.h" 37 #include "runtime.h"
38 #include "stub-cache.h" 38 #include "stub-cache.h"
39 #include "codegen.h" 39 #include "codegen.h"
40 #include "runtime.h" 40 #include "runtime.h"
(...skipping 17 matching lines...) Expand all
58 Isolate* isolate, 58 Isolate* isolate,
59 CodeStubInterfaceDescriptor* descriptor) { 59 CodeStubInterfaceDescriptor* descriptor) {
60 static Register registers[] = { eax, ebx, ecx, edx }; 60 static Register registers[] = { eax, ebx, ecx, edx };
61 descriptor->register_param_count_ = 4; 61 descriptor->register_param_count_ = 4;
62 descriptor->register_params_ = registers; 62 descriptor->register_params_ = registers;
63 descriptor->deoptimization_handler_ = 63 descriptor->deoptimization_handler_ =
64 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; 64 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry;
65 } 65 }
66 66
67 67
68 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
69 Isolate* isolate,
70 CodeStubInterfaceDescriptor* descriptor) {
71 static Register registers[] = { ebx };
72 descriptor->register_param_count_ = 1;
73 descriptor->register_params_ = registers;
74 descriptor->deoptimization_handler_ = NULL;
75 }
76
77
68 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( 78 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
69 Isolate* isolate, 79 Isolate* isolate,
70 CodeStubInterfaceDescriptor* descriptor) { 80 CodeStubInterfaceDescriptor* descriptor) {
71 static Register registers[] = { edx, ecx }; 81 static Register registers[] = { edx, ecx };
72 descriptor->register_param_count_ = 2; 82 descriptor->register_param_count_ = 2;
73 descriptor->register_params_ = registers; 83 descriptor->register_params_ = registers;
74 descriptor->deoptimization_handler_ = 84 descriptor->deoptimization_handler_ =
75 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 85 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
76 } 86 }
77 87
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
223 static Register registers[] = { eax }; 233 static Register registers[] = { eax };
224 descriptor->register_param_count_ = 1; 234 descriptor->register_param_count_ = 1;
225 descriptor->register_params_ = registers; 235 descriptor->register_params_ = registers;
226 descriptor->deoptimization_handler_ = 236 descriptor->deoptimization_handler_ =
227 FUNCTION_ADDR(ToBooleanIC_Miss); 237 FUNCTION_ADDR(ToBooleanIC_Miss);
228 descriptor->SetMissHandler( 238 descriptor->SetMissHandler(
229 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); 239 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
230 } 240 }
231 241
232 242
243 void UnaryOpStub::InitializeInterfaceDescriptor(
244 Isolate* isolate,
245 CodeStubInterfaceDescriptor* descriptor) {
246 static Register registers[] = { eax };
247 descriptor->register_param_count_ = 1;
248 descriptor->register_params_ = registers;
249 descriptor->deoptimization_handler_ =
250 FUNCTION_ADDR(UnaryOpIC_Miss);
251 }
252
253
254 void StoreGlobalStub::InitializeInterfaceDescriptor(
255 Isolate* isolate,
256 CodeStubInterfaceDescriptor* descriptor) {
257 static Register registers[] = { edx, ecx, eax };
258 descriptor->register_param_count_ = 3;
259 descriptor->register_params_ = registers;
260 descriptor->deoptimization_handler_ =
261 FUNCTION_ADDR(StoreIC_MissFromStubFailure);
262 }
263
264
233 #define __ ACCESS_MASM(masm) 265 #define __ ACCESS_MASM(masm)
234 266
235 267
236 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { 268 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
237 // Update the static counter each time a new code stub is generated. 269 // Update the static counter each time a new code stub is generated.
238 Isolate* isolate = masm->isolate(); 270 Isolate* isolate = masm->isolate();
239 isolate->counters()->code_stubs()->Increment(); 271 isolate->counters()->code_stubs()->Increment();
240 272
241 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); 273 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
242 int param_count = descriptor->register_param_count_; 274 int param_count = descriptor->register_param_count_;
(...skipping 509 matching lines...) Expand 10 before | Expand all | Expand 10 after
752 // Result is in ecx. Trashes ebx, xmm0, and xmm1. 784 // Result is in ecx. Trashes ebx, xmm0, and xmm1.
753 static void ConvertHeapNumberToInt32(MacroAssembler* masm, 785 static void ConvertHeapNumberToInt32(MacroAssembler* masm,
754 Register source, 786 Register source,
755 Label* conversion_failure) { 787 Label* conversion_failure) {
756 __ movdbl(xmm0, FieldOperand(source, HeapNumber::kValueOffset)); 788 __ movdbl(xmm0, FieldOperand(source, HeapNumber::kValueOffset));
757 FloatingPointHelper::CheckSSE2OperandIsInt32( 789 FloatingPointHelper::CheckSSE2OperandIsInt32(
758 masm, conversion_failure, xmm0, ecx, ebx, xmm1); 790 masm, conversion_failure, xmm0, ecx, ebx, xmm1);
759 } 791 }
760 792
761 793
762 void UnaryOpStub::PrintName(StringStream* stream) {
763 const char* op_name = Token::Name(op_);
764 const char* overwrite_name = NULL; // Make g++ happy.
765 switch (mode_) {
766 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
767 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
768 }
769 stream->Add("UnaryOpStub_%s_%s_%s",
770 op_name,
771 overwrite_name,
772 UnaryOpIC::GetName(operand_type_));
773 }
774
775
776 // TODO(svenpanne): Use virtual functions instead of switch.
777 void UnaryOpStub::Generate(MacroAssembler* masm) {
778 switch (operand_type_) {
779 case UnaryOpIC::UNINITIALIZED:
780 GenerateTypeTransition(masm);
781 break;
782 case UnaryOpIC::SMI:
783 GenerateSmiStub(masm);
784 break;
785 case UnaryOpIC::NUMBER:
786 GenerateNumberStub(masm);
787 break;
788 case UnaryOpIC::GENERIC:
789 GenerateGenericStub(masm);
790 break;
791 }
792 }
793
794
795 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
796 __ pop(ecx); // Save return address.
797
798 __ push(eax); // the operand
799 __ push(Immediate(Smi::FromInt(op_)));
800 __ push(Immediate(Smi::FromInt(mode_)));
801 __ push(Immediate(Smi::FromInt(operand_type_)));
802
803 __ push(ecx); // Push return address.
804
805 // Patch the caller to an appropriate specialized stub and return the
806 // operation result to the caller of the stub.
807 __ TailCallExternalReference(
808 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
809 }
810
811
812 // TODO(svenpanne): Use virtual functions instead of switch.
813 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
814 switch (op_) {
815 case Token::SUB:
816 GenerateSmiStubSub(masm);
817 break;
818 case Token::BIT_NOT:
819 GenerateSmiStubBitNot(masm);
820 break;
821 default:
822 UNREACHABLE();
823 }
824 }
825
826
827 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
828 Label non_smi, undo, slow;
829 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow,
830 Label::kNear, Label::kNear, Label::kNear);
831 __ bind(&undo);
832 GenerateSmiCodeUndo(masm);
833 __ bind(&non_smi);
834 __ bind(&slow);
835 GenerateTypeTransition(masm);
836 }
837
838
839 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
840 Label non_smi;
841 GenerateSmiCodeBitNot(masm, &non_smi);
842 __ bind(&non_smi);
843 GenerateTypeTransition(masm);
844 }
845
846
847 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
848 Label* non_smi,
849 Label* undo,
850 Label* slow,
851 Label::Distance non_smi_near,
852 Label::Distance undo_near,
853 Label::Distance slow_near) {
854 // Check whether the value is a smi.
855 __ JumpIfNotSmi(eax, non_smi, non_smi_near);
856
857 // We can't handle -0 with smis, so use a type transition for that case.
858 __ test(eax, eax);
859 __ j(zero, slow, slow_near);
860
861 // Try optimistic subtraction '0 - value', saving operand in eax for undo.
862 __ mov(edx, eax);
863 __ Set(eax, Immediate(0));
864 __ sub(eax, edx);
865 __ j(overflow, undo, undo_near);
866 __ ret(0);
867 }
868
869
870 void UnaryOpStub::GenerateSmiCodeBitNot(
871 MacroAssembler* masm,
872 Label* non_smi,
873 Label::Distance non_smi_near) {
874 // Check whether the value is a smi.
875 __ JumpIfNotSmi(eax, non_smi, non_smi_near);
876
877 // Flip bits and revert inverted smi-tag.
878 __ not_(eax);
879 __ and_(eax, ~kSmiTagMask);
880 __ ret(0);
881 }
882
883
884 void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) {
885 __ mov(eax, edx);
886 }
887
888
889 // TODO(svenpanne): Use virtual functions instead of switch.
890 void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
891 switch (op_) {
892 case Token::SUB:
893 GenerateNumberStubSub(masm);
894 break;
895 case Token::BIT_NOT:
896 GenerateNumberStubBitNot(masm);
897 break;
898 default:
899 UNREACHABLE();
900 }
901 }
902
903
904 void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) {
905 Label non_smi, undo, slow, call_builtin;
906 GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear);
907 __ bind(&non_smi);
908 GenerateHeapNumberCodeSub(masm, &slow);
909 __ bind(&undo);
910 GenerateSmiCodeUndo(masm);
911 __ bind(&slow);
912 GenerateTypeTransition(masm);
913 __ bind(&call_builtin);
914 GenerateGenericCodeFallback(masm);
915 }
916
917
918 void UnaryOpStub::GenerateNumberStubBitNot(
919 MacroAssembler* masm) {
920 Label non_smi, slow;
921 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
922 __ bind(&non_smi);
923 GenerateHeapNumberCodeBitNot(masm, &slow);
924 __ bind(&slow);
925 GenerateTypeTransition(masm);
926 }
927
928
929 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
930 Label* slow) {
931 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
932 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
933 __ j(not_equal, slow);
934
935 if (mode_ == UNARY_OVERWRITE) {
936 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset),
937 Immediate(HeapNumber::kSignMask)); // Flip sign.
938 } else {
939 __ mov(edx, eax);
940 // edx: operand
941
942 Label slow_allocate_heapnumber, heapnumber_allocated;
943 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber);
944 __ jmp(&heapnumber_allocated, Label::kNear);
945
946 __ bind(&slow_allocate_heapnumber);
947 {
948 FrameScope scope(masm, StackFrame::INTERNAL);
949 __ push(edx);
950 __ CallRuntime(Runtime::kNumberAlloc, 0);
951 __ pop(edx);
952 }
953
954 __ bind(&heapnumber_allocated);
955 // eax: allocated 'empty' number
956 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
957 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
958 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
959 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
960 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
961 }
962 __ ret(0);
963 }
964
965
966 void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
967 Label* slow) {
968 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
969 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
970 __ j(not_equal, slow);
971
972 // Convert the heap number in eax to an untagged integer in ecx.
973 IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow);
974
975 // Do the bitwise operation and check if the result fits in a smi.
976 Label try_float;
977 __ not_(ecx);
978 __ cmp(ecx, 0xc0000000);
979 __ j(sign, &try_float, Label::kNear);
980
981 // Tag the result as a smi and we're done.
982 STATIC_ASSERT(kSmiTagSize == 1);
983 __ lea(eax, Operand(ecx, times_2, kSmiTag));
984 __ ret(0);
985
986 // Try to store the result in a heap number.
987 __ bind(&try_float);
988 if (mode_ == UNARY_NO_OVERWRITE) {
989 Label slow_allocate_heapnumber, heapnumber_allocated;
990 __ mov(ebx, eax);
991 __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber);
992 __ jmp(&heapnumber_allocated);
993
994 __ bind(&slow_allocate_heapnumber);
995 {
996 FrameScope scope(masm, StackFrame::INTERNAL);
997 // Push the original HeapNumber on the stack. The integer value can't
998 // be stored since it's untagged and not in the smi range (so we can't
999 // smi-tag it). We'll recalculate the value after the GC instead.
1000 __ push(ebx);
1001 __ CallRuntime(Runtime::kNumberAlloc, 0);
1002 // New HeapNumber is in eax.
1003 __ pop(edx);
1004 }
1005 // IntegerConvert uses ebx and edi as scratch registers.
1006 // This conversion won't go slow-case.
1007 IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow);
1008 __ not_(ecx);
1009
1010 __ bind(&heapnumber_allocated);
1011 }
1012 if (CpuFeatures::IsSupported(SSE2)) {
1013 CpuFeatureScope use_sse2(masm, SSE2);
1014 __ cvtsi2sd(xmm0, ecx);
1015 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1016 } else {
1017 __ push(ecx);
1018 __ fild_s(Operand(esp, 0));
1019 __ pop(ecx);
1020 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1021 }
1022 __ ret(0);
1023 }
1024
1025
1026 // TODO(svenpanne): Use virtual functions instead of switch.
1027 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
1028 switch (op_) {
1029 case Token::SUB:
1030 GenerateGenericStubSub(masm);
1031 break;
1032 case Token::BIT_NOT:
1033 GenerateGenericStubBitNot(masm);
1034 break;
1035 default:
1036 UNREACHABLE();
1037 }
1038 }
1039
1040
1041 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
1042 Label non_smi, undo, slow;
1043 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear);
1044 __ bind(&non_smi);
1045 GenerateHeapNumberCodeSub(masm, &slow);
1046 __ bind(&undo);
1047 GenerateSmiCodeUndo(masm);
1048 __ bind(&slow);
1049 GenerateGenericCodeFallback(masm);
1050 }
1051
1052
1053 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
1054 Label non_smi, slow;
1055 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
1056 __ bind(&non_smi);
1057 GenerateHeapNumberCodeBitNot(masm, &slow);
1058 __ bind(&slow);
1059 GenerateGenericCodeFallback(masm);
1060 }
1061
1062
1063 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
1064 // Handle the slow case by jumping to the corresponding JavaScript builtin.
1065 __ pop(ecx); // pop return address.
1066 __ push(eax);
1067 __ push(ecx); // push return address
1068 switch (op_) {
1069 case Token::SUB:
1070 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
1071 break;
1072 case Token::BIT_NOT:
1073 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
1074 break;
1075 default:
1076 UNREACHABLE();
1077 }
1078 }
1079
1080
1081 void BinaryOpStub::Initialize() { 794 void BinaryOpStub::Initialize() {
1082 platform_specific_bit_ = CpuFeatures::IsSupported(SSE3); 795 platform_specific_bit_ = CpuFeatures::IsSupported(SSE3);
1083 } 796 }
1084 797
1085 798
1086 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 799 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1087 __ pop(ecx); // Save return address. 800 __ pop(ecx); // Save return address.
1088 __ push(edx); 801 __ push(edx);
1089 __ push(eax); 802 __ push(eax);
1090 // Left and right arguments are now on top. 803 // Left and right arguments are now on top.
(...skipping 3599 matching lines...) Expand 10 before | Expand all | Expand 10 after
4690 // Load the cache state into ecx. 4403 // Load the cache state into ecx.
4691 __ mov(ecx, FieldOperand(ebx, Cell::kValueOffset)); 4404 __ mov(ecx, FieldOperand(ebx, Cell::kValueOffset));
4692 4405
4693 // A monomorphic cache hit or an already megamorphic state: invoke the 4406 // A monomorphic cache hit or an already megamorphic state: invoke the
4694 // function without changing the state. 4407 // function without changing the state.
4695 __ cmp(ecx, edi); 4408 __ cmp(ecx, edi);
4696 __ j(equal, &done); 4409 __ j(equal, &done);
4697 __ cmp(ecx, Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate))); 4410 __ cmp(ecx, Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
4698 __ j(equal, &done); 4411 __ j(equal, &done);
4699 4412
4700 // Special handling of the Array() function, which caches not only the 4413 // If we came here, we need to see if we are the array function.
4701 // monomorphic Array function but the initial ElementsKind with special 4414 // If we didn't have a matching function, and we didn't find the megamorph
4702 // sentinels 4415 // sentinel, then we have in the cell either some other function or an
4703 Handle<Object> terminal_kind_sentinel = 4416 // AllocationSite. Do a map check on the object in ecx.
4704 TypeFeedbackCells::MonomorphicArraySentinel(isolate, 4417 Handle<Map> allocation_site_map(
4705 LAST_FAST_ELEMENTS_KIND); 4418 masm->isolate()->heap()->allocation_site_map(),
4706 __ JumpIfNotSmi(ecx, &miss); 4419 masm->isolate());
4707 __ cmp(ecx, Immediate(terminal_kind_sentinel)); 4420 __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
4708 __ j(above, &miss); 4421 __ j(not_equal, &miss);
4422
4709 // Load the global or builtins object from the current context 4423 // Load the global or builtins object from the current context
4710 __ LoadGlobalContext(ecx); 4424 __ LoadGlobalContext(ecx);
4711 // Make sure the function is the Array() function 4425 // Make sure the function is the Array() function
4712 __ cmp(edi, Operand(ecx, 4426 __ cmp(edi, Operand(ecx,
4713 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); 4427 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
4714 __ j(not_equal, &megamorphic); 4428 __ j(not_equal, &megamorphic);
4715 __ jmp(&done); 4429 __ jmp(&done);
4716 4430
4717 __ bind(&miss); 4431 __ bind(&miss);
4718 4432
(...skipping 10 matching lines...) Expand all
4729 4443
4730 // An uninitialized cache is patched with the function or sentinel to 4444 // An uninitialized cache is patched with the function or sentinel to
4731 // indicate the ElementsKind if function is the Array constructor. 4445 // indicate the ElementsKind if function is the Array constructor.
4732 __ bind(&initialize); 4446 __ bind(&initialize);
4733 __ LoadGlobalContext(ecx); 4447 __ LoadGlobalContext(ecx);
4734 // Make sure the function is the Array() function 4448 // Make sure the function is the Array() function
4735 __ cmp(edi, Operand(ecx, 4449 __ cmp(edi, Operand(ecx,
4736 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); 4450 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
4737 __ j(not_equal, &not_array_function); 4451 __ j(not_equal, &not_array_function);
4738 4452
4739 // The target function is the Array constructor, install a sentinel value in 4453 // The target function is the Array constructor,
4740 // the constructor's type info cell that will track the initial ElementsKind 4454 // Create an AllocationSite if we don't already have it, store it in the cell
4741 // that should be used for the array when its constructed. 4455 {
4742 Handle<Object> initial_kind_sentinel = 4456 FrameScope scope(masm, StackFrame::INTERNAL);
4743 TypeFeedbackCells::MonomorphicArraySentinel(isolate, 4457
4744 GetInitialFastElementsKind()); 4458 __ push(eax);
4745 __ mov(FieldOperand(ebx, Cell::kValueOffset), 4459 __ push(edi);
4746 Immediate(initial_kind_sentinel)); 4460 __ push(ebx);
4461
4462 CreateAllocationSiteStub create_stub;
4463 __ CallStub(&create_stub);
4464
4465 __ pop(ebx);
4466 __ pop(edi);
4467 __ pop(eax);
4468 }
4747 __ jmp(&done); 4469 __ jmp(&done);
4748 4470
4749 __ bind(&not_array_function); 4471 __ bind(&not_array_function);
4750 __ mov(FieldOperand(ebx, Cell::kValueOffset), edi); 4472 __ mov(FieldOperand(ebx, Cell::kValueOffset), edi);
4751 // No need for a write barrier here - cells are rescanned. 4473 // No need for a write barrier here - cells are rescanned.
4752 4474
4753 __ bind(&done); 4475 __ bind(&done);
4754 } 4476 }
4755 4477
4756 4478
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
4902 } 4624 }
4903 4625
4904 4626
4905 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 4627 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
4906 CEntryStub::GenerateAheadOfTime(isolate); 4628 CEntryStub::GenerateAheadOfTime(isolate);
4907 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 4629 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
4908 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 4630 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
4909 // It is important that the store buffer overflow stubs are generated first. 4631 // It is important that the store buffer overflow stubs are generated first.
4910 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); 4632 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
4911 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 4633 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
4634 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
4912 } 4635 }
4913 4636
4914 4637
4915 void CodeStub::GenerateFPStubs(Isolate* isolate) { 4638 void CodeStub::GenerateFPStubs(Isolate* isolate) {
4916 if (CpuFeatures::IsSupported(SSE2)) { 4639 if (CpuFeatures::IsSupported(SSE2)) {
4917 CEntryStub save_doubles(1, kSaveFPRegs); 4640 CEntryStub save_doubles(1, kSaveFPRegs);
4918 // Stubs might already be in the snapshot, detect that and don't regenerate, 4641 // Stubs might already be in the snapshot, detect that and don't regenerate,
4919 // which would lead to code stub initialization state being messed up. 4642 // which would lead to code stub initialization state being messed up.
4920 Code* save_doubles_code; 4643 Code* save_doubles_code;
4921 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { 4644 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) {
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
5070 4793
5071 4794
5072 void CEntryStub::Generate(MacroAssembler* masm) { 4795 void CEntryStub::Generate(MacroAssembler* masm) {
5073 // eax: number of arguments including receiver 4796 // eax: number of arguments including receiver
5074 // ebx: pointer to C function (C callee-saved) 4797 // ebx: pointer to C function (C callee-saved)
5075 // ebp: frame pointer (restored after C call) 4798 // ebp: frame pointer (restored after C call)
5076 // esp: stack pointer (restored after C call) 4799 // esp: stack pointer (restored after C call)
5077 // esi: current context (C callee-saved) 4800 // esi: current context (C callee-saved)
5078 // edi: JS function of the caller (C callee-saved) 4801 // edi: JS function of the caller (C callee-saved)
5079 4802
4803 ProfileEntryHookStub::MaybeCallEntryHook(masm);
4804
5080 // NOTE: Invocations of builtins may return failure objects instead 4805 // NOTE: Invocations of builtins may return failure objects instead
5081 // of a proper result. The builtin entry handles this by performing 4806 // of a proper result. The builtin entry handles this by performing
5082 // a garbage collection and retrying the builtin (twice). 4807 // a garbage collection and retrying the builtin (twice).
5083 4808
5084 // Enter the exit frame that transitions from JavaScript to C++. 4809 // Enter the exit frame that transitions from JavaScript to C++.
5085 __ EnterExitFrame(save_doubles_ == kSaveFPRegs); 4810 __ EnterExitFrame(save_doubles_ == kSaveFPRegs);
5086 4811
5087 // eax: result parameter for PerformGC, if any (setup below) 4812 // eax: result parameter for PerformGC, if any (setup below)
5088 // ebx: pointer to builtin function (C callee-saved) 4813 // ebx: pointer to builtin function (C callee-saved)
5089 // ebp: frame pointer (restored after C call) 4814 // ebp: frame pointer (restored after C call)
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
5143 4868
5144 __ bind(&throw_normal_exception); 4869 __ bind(&throw_normal_exception);
5145 __ Throw(eax); 4870 __ Throw(eax);
5146 } 4871 }
5147 4872
5148 4873
5149 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { 4874 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
5150 Label invoke, handler_entry, exit; 4875 Label invoke, handler_entry, exit;
5151 Label not_outermost_js, not_outermost_js_2; 4876 Label not_outermost_js, not_outermost_js_2;
5152 4877
4878 ProfileEntryHookStub::MaybeCallEntryHook(masm);
4879
5153 // Set up frame. 4880 // Set up frame.
5154 __ push(ebp); 4881 __ push(ebp);
5155 __ mov(ebp, esp); 4882 __ mov(ebp, esp);
5156 4883
5157 // Push marker in two places. 4884 // Push marker in two places.
5158 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; 4885 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
5159 __ push(Immediate(Smi::FromInt(marker))); // context slot 4886 __ push(Immediate(Smi::FromInt(marker))); // context slot
5160 __ push(Immediate(Smi::FromInt(marker))); // function slot 4887 __ push(Immediate(Smi::FromInt(marker))); // function slot
5161 // Save callee-saved registers (C calling conventions). 4888 // Save callee-saved registers (C calling conventions).
5162 __ push(edi); 4889 __ push(edi);
(...skipping 2521 matching lines...) Expand 10 before | Expand all | Expand 10 after
7684 __ pop(ecx); 7411 __ pop(ecx);
7685 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE 7412 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
7686 ? kPointerSize 7413 ? kPointerSize
7687 : 0; 7414 : 0;
7688 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); 7415 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
7689 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. 7416 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack.
7690 } 7417 }
7691 7418
7692 7419
7693 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 7420 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
7694 if (entry_hook_ != NULL) { 7421 if (masm->isolate()->function_entry_hook() != NULL) {
7422 // It's always safe to call the entry hook stub, as the hook itself
7423 // is not allowed to call back to V8.
7424 AllowStubCallsScope allow_stub_calls(masm, true);
7425
7695 ProfileEntryHookStub stub; 7426 ProfileEntryHookStub stub;
7696 masm->CallStub(&stub); 7427 masm->CallStub(&stub);
7697 } 7428 }
7698 } 7429 }
7699 7430
7700 7431
7701 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 7432 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
7702 // Ecx is the only volatile register we must save. 7433 // Save volatile registers.
7703 const int kNumSavedRegisters = 1; 7434 const int kNumSavedRegisters = 3;
7435 __ push(eax);
7704 __ push(ecx); 7436 __ push(ecx);
7437 __ push(edx);
7705 7438
7706 // Calculate and push the original stack pointer. 7439 // Calculate and push the original stack pointer.
7707 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); 7440 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
7708 __ push(eax); 7441 __ push(eax);
7709 7442
7710 // Retrieve our return address and use it to calculate the calling 7443 // Retrieve our return address and use it to calculate the calling
7711 // function's address. 7444 // function's address.
7712 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); 7445 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
7713 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); 7446 __ sub(eax, Immediate(Assembler::kCallInstructionLength));
7714 __ push(eax); 7447 __ push(eax);
7715 7448
7716 // Call the entry hook. 7449 // Call the entry hook.
7717 int32_t hook_location = reinterpret_cast<int32_t>(&entry_hook_); 7450 ASSERT(masm->isolate()->function_entry_hook() != NULL);
7718 __ call(Operand(hook_location, RelocInfo::NONE32)); 7451 __ call(FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
7452 RelocInfo::RUNTIME_ENTRY);
7719 __ add(esp, Immediate(2 * kPointerSize)); 7453 __ add(esp, Immediate(2 * kPointerSize));
7720 7454
7721 // Restore ecx. 7455 // Restore ecx.
7456 __ pop(edx);
7722 __ pop(ecx); 7457 __ pop(ecx);
7458 __ pop(eax);
7459
7723 __ ret(0); 7460 __ ret(0);
7724 } 7461 }
7725 7462
7726 7463
7727 template<class T> 7464 template<class T>
7728 static void CreateArrayDispatch(MacroAssembler* masm) { 7465 static void CreateArrayDispatch(MacroAssembler* masm) {
7729 int last_index = GetSequenceIndexFromFastElementsKind( 7466 int last_index = GetSequenceIndexFromFastElementsKind(
7730 TERMINAL_FAST_ELEMENTS_KIND); 7467 TERMINAL_FAST_ELEMENTS_KIND);
7731 for (int i = 0; i <= last_index; ++i) { 7468 for (int i = 0; i <= last_index; ++i) {
7732 Label next; 7469 Label next;
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
7765 __ test_b(edx, 1); 7502 __ test_b(edx, 1);
7766 Label normal_sequence; 7503 Label normal_sequence;
7767 __ j(not_zero, &normal_sequence); 7504 __ j(not_zero, &normal_sequence);
7768 7505
7769 // look at the first argument 7506 // look at the first argument
7770 __ mov(ecx, Operand(esp, kPointerSize)); 7507 __ mov(ecx, Operand(esp, kPointerSize));
7771 __ test(ecx, ecx); 7508 __ test(ecx, ecx);
7772 __ j(zero, &normal_sequence); 7509 __ j(zero, &normal_sequence);
7773 7510
7774 // We are going to create a holey array, but our kind is non-holey. 7511 // We are going to create a holey array, but our kind is non-holey.
7775 // Fix kind and retry 7512 // Fix kind and retry (only if we have an allocation site in the cell).
7776 __ inc(edx); 7513 __ inc(edx);
7777 __ cmp(ebx, Immediate(undefined_sentinel)); 7514 __ cmp(ebx, Immediate(undefined_sentinel));
7778 __ j(equal, &normal_sequence); 7515 __ j(equal, &normal_sequence);
7516 __ mov(ecx, FieldOperand(ebx, Cell::kValueOffset));
7517 Handle<Map> allocation_site_map(
7518 masm->isolate()->heap()->allocation_site_map(),
7519 masm->isolate());
7520 __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
7521 __ j(not_equal, &normal_sequence);
7779 7522
7780 // Save the resulting elements kind in type info 7523 // Save the resulting elements kind in type info
7781 __ SmiTag(edx); 7524 __ SmiTag(edx);
7782 __ mov(FieldOperand(ebx, kPointerSize), edx); 7525 __ mov(FieldOperand(ecx, AllocationSite::kPayloadOffset), edx);
7783 __ SmiUntag(edx); 7526 __ SmiUntag(edx);
7784 7527
7785 __ bind(&normal_sequence); 7528 __ bind(&normal_sequence);
7786 int last_index = GetSequenceIndexFromFastElementsKind( 7529 int last_index = GetSequenceIndexFromFastElementsKind(
7787 TERMINAL_FAST_ELEMENTS_KIND); 7530 TERMINAL_FAST_ELEMENTS_KIND);
7788 for (int i = 0; i <= last_index; ++i) { 7531 for (int i = 0; i <= last_index; ++i) {
7789 Label next; 7532 Label next;
7790 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 7533 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
7791 __ cmp(edx, kind); 7534 __ cmp(edx, kind);
7792 __ j(not_equal, &next); 7535 __ j(not_equal, &next);
7793 ArraySingleArgumentConstructorStub stub(kind); 7536 ArraySingleArgumentConstructorStub stub(kind);
7794 __ TailCallStub(&stub); 7537 __ TailCallStub(&stub);
7795 __ bind(&next); 7538 __ bind(&next);
7796 } 7539 }
7797 7540
7798 // If we reached this point there is a problem. 7541 // If we reached this point there is a problem.
7799 __ Abort("Unexpected ElementsKind in array constructor"); 7542 __ Abort("Unexpected ElementsKind in array constructor");
7800 } 7543 }
7801 7544
7802 7545
7803 template<class T> 7546 template<class T>
7804 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 7547 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
7805 int to_index = GetSequenceIndexFromFastElementsKind( 7548 int to_index = GetSequenceIndexFromFastElementsKind(
7806 TERMINAL_FAST_ELEMENTS_KIND); 7549 TERMINAL_FAST_ELEMENTS_KIND);
7807 for (int i = 0; i <= to_index; ++i) { 7550 for (int i = 0; i <= to_index; ++i) {
7808 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 7551 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
7809 T stub(kind, false); 7552 T stub(kind);
7810 stub.GetCode(isolate)->set_is_pregenerated(true); 7553 stub.GetCode(isolate)->set_is_pregenerated(true);
7811 if (AllocationSiteInfo::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { 7554 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
7812 T stub1(kind, true); 7555 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
7813 stub1.GetCode(isolate)->set_is_pregenerated(true); 7556 stub1.GetCode(isolate)->set_is_pregenerated(true);
7814 } 7557 }
7815 } 7558 }
7816 } 7559 }
7817 7560
7818 7561
7819 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { 7562 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
7820 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( 7563 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
7821 isolate); 7564 isolate);
7822 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( 7565 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
7873 __ cmp(FieldOperand(ebx, 0), Immediate(cell_map)); 7616 __ cmp(FieldOperand(ebx, 0), Immediate(cell_map));
7874 __ Assert(equal, "Expected property cell in register ebx"); 7617 __ Assert(equal, "Expected property cell in register ebx");
7875 __ bind(&okay_here); 7618 __ bind(&okay_here);
7876 } 7619 }
7877 7620
7878 Label no_info, switch_ready; 7621 Label no_info, switch_ready;
7879 // Get the elements kind and case on that. 7622 // Get the elements kind and case on that.
7880 __ cmp(ebx, Immediate(undefined_sentinel)); 7623 __ cmp(ebx, Immediate(undefined_sentinel));
7881 __ j(equal, &no_info); 7624 __ j(equal, &no_info);
7882 __ mov(edx, FieldOperand(ebx, Cell::kValueOffset)); 7625 __ mov(edx, FieldOperand(ebx, Cell::kValueOffset));
7883 __ JumpIfNotSmi(edx, &no_info); 7626
7627 // The type cell may have undefined in its value.
7628 __ cmp(edx, Immediate(undefined_sentinel));
7629 __ j(equal, &no_info);
7630
7631 // We should have an allocation site object
7632 if (FLAG_debug_code) {
7633 __ cmp(FieldOperand(edx, 0),
7634 Immediate(Handle<Map>(
7635 masm->isolate()->heap()->allocation_site_map())));
7636 __ Assert(equal, "Expected AllocationSite object in register edx");
7637 }
7638
7639 __ mov(edx, FieldOperand(edx, AllocationSite::kPayloadOffset));
7884 __ SmiUntag(edx); 7640 __ SmiUntag(edx);
7885 __ jmp(&switch_ready); 7641 __ jmp(&switch_ready);
7886 __ bind(&no_info); 7642 __ bind(&no_info);
7887 __ mov(edx, Immediate(GetInitialFastElementsKind())); 7643 __ mov(edx, Immediate(GetInitialFastElementsKind()));
7888 __ bind(&switch_ready); 7644 __ bind(&switch_ready);
7889 7645
7890 if (argument_count_ == ANY) { 7646 if (argument_count_ == ANY) {
7891 Label not_zero_case, not_one_case; 7647 Label not_zero_case, not_one_case;
7892 __ test(eax, eax); 7648 __ test(eax, eax);
7893 __ j(not_zero, &not_zero_case); 7649 __ j(not_zero, &not_zero_case);
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
7998 __ bind(&fast_elements_case); 7754 __ bind(&fast_elements_case);
7999 GenerateCase(masm, FAST_ELEMENTS); 7755 GenerateCase(masm, FAST_ELEMENTS);
8000 } 7756 }
8001 7757
8002 7758
8003 #undef __ 7759 #undef __
8004 7760
8005 } } // namespace v8::internal 7761 } } // namespace v8::internal
8006 7762
8007 #endif // V8_TARGET_ARCH_IA32 7763 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698