| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 323 Isolate* isolate, | 323 Isolate* isolate, |
| 324 CodeStubInterfaceDescriptor* descriptor) { | 324 CodeStubInterfaceDescriptor* descriptor) { |
| 325 static Register registers[] = { eax, ebx, ecx, edx }; | 325 static Register registers[] = { eax, ebx, ecx, edx }; |
| 326 descriptor->register_param_count_ = 4; | 326 descriptor->register_param_count_ = 4; |
| 327 descriptor->register_params_ = registers; | 327 descriptor->register_params_ = registers; |
| 328 descriptor->deoptimization_handler_ = | 328 descriptor->deoptimization_handler_ = |
| 329 FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss); | 329 FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss); |
| 330 } | 330 } |
| 331 | 331 |
| 332 | 332 |
| 333 void BinaryOpStub::InitializeInterfaceDescriptor( | 333 void BinaryOpICStub::InitializeInterfaceDescriptor( |
| 334 Isolate* isolate, | 334 Isolate* isolate, |
| 335 CodeStubInterfaceDescriptor* descriptor) { | 335 CodeStubInterfaceDescriptor* descriptor) { |
| 336 static Register registers[] = { edx, eax }; | 336 static Register registers[] = { edx, eax }; |
| 337 descriptor->register_param_count_ = 2; | 337 descriptor->register_param_count_ = 2; |
| 338 descriptor->register_params_ = registers; | 338 descriptor->register_params_ = registers; |
| 339 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss); | 339 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss); |
| 340 descriptor->SetMissHandler( | 340 descriptor->SetMissHandler( |
| 341 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate)); | 341 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate)); |
| 342 } | 342 } |
| 343 | 343 |
| (...skipping 545 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 889 __ CallRuntime(RuntimeFunction(), 1); | 889 __ CallRuntime(RuntimeFunction(), 1); |
| 890 } | 890 } |
| 891 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); | 891 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); |
| 892 __ Ret(); | 892 __ Ret(); |
| 893 } | 893 } |
| 894 } | 894 } |
| 895 | 895 |
| 896 | 896 |
| 897 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { | 897 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { |
| 898 switch (type_) { | 898 switch (type_) { |
| 899 case TranscendentalCache::SIN: return Runtime::kMath_sin; | |
| 900 case TranscendentalCache::COS: return Runtime::kMath_cos; | |
| 901 case TranscendentalCache::TAN: return Runtime::kMath_tan; | |
| 902 case TranscendentalCache::LOG: return Runtime::kMath_log; | 899 case TranscendentalCache::LOG: return Runtime::kMath_log; |
| 903 default: | 900 default: |
| 904 UNIMPLEMENTED(); | 901 UNIMPLEMENTED(); |
| 905 return Runtime::kAbort; | 902 return Runtime::kAbort; |
| 906 } | 903 } |
| 907 } | 904 } |
| 908 | 905 |
| 909 | 906 |
| 910 void TranscendentalCacheStub::GenerateOperation( | 907 void TranscendentalCacheStub::GenerateOperation( |
| 911 MacroAssembler* masm, TranscendentalCache::Type type) { | 908 MacroAssembler* masm, TranscendentalCache::Type type) { |
| 912 // Only free register is edi. | 909 // Only free register is edi. |
| 913 // Input value is on FP stack, and also in ebx/edx. | 910 // Input value is on FP stack, and also in ebx/edx. |
| 914 // Input value is possibly in xmm1. | 911 // Input value is possibly in xmm1. |
| 915 // Address of result (a newly allocated HeapNumber) may be in eax. | 912 // Address of result (a newly allocated HeapNumber) may be in eax. |
| 916 if (type == TranscendentalCache::SIN || | 913 ASSERT(type == TranscendentalCache::LOG); |
| 917 type == TranscendentalCache::COS || | 914 __ fldln2(); |
| 918 type == TranscendentalCache::TAN) { | 915 __ fxch(); |
| 919 // Both fsin and fcos require arguments in the range +/-2^63 and | 916 __ fyl2x(); |
| 920 // return NaN for infinities and NaN. They can share all code except | |
| 921 // the actual fsin/fcos operation. | |
| 922 Label in_range, done; | |
| 923 // If argument is outside the range -2^63..2^63, fsin/cos doesn't | |
| 924 // work. We must reduce it to the appropriate range. | |
| 925 __ mov(edi, edx); | |
| 926 __ and_(edi, Immediate(0x7ff00000)); // Exponent only. | |
| 927 int supported_exponent_limit = | |
| 928 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift; | |
| 929 __ cmp(edi, Immediate(supported_exponent_limit)); | |
| 930 __ j(below, &in_range, Label::kNear); | |
| 931 // Check for infinity and NaN. Both return NaN for sin. | |
| 932 __ cmp(edi, Immediate(0x7ff00000)); | |
| 933 Label non_nan_result; | |
| 934 __ j(not_equal, &non_nan_result, Label::kNear); | |
| 935 // Input is +/-Infinity or NaN. Result is NaN. | |
| 936 __ fstp(0); | |
| 937 // NaN is represented by 0x7ff8000000000000. | |
| 938 __ push(Immediate(0x7ff80000)); | |
| 939 __ push(Immediate(0)); | |
| 940 __ fld_d(Operand(esp, 0)); | |
| 941 __ add(esp, Immediate(2 * kPointerSize)); | |
| 942 __ jmp(&done, Label::kNear); | |
| 943 | |
| 944 __ bind(&non_nan_result); | |
| 945 | |
| 946 // Use fpmod to restrict argument to the range +/-2*PI. | |
| 947 __ mov(edi, eax); // Save eax before using fnstsw_ax. | |
| 948 __ fldpi(); | |
| 949 __ fadd(0); | |
| 950 __ fld(1); | |
| 951 // FPU Stack: input, 2*pi, input. | |
| 952 { | |
| 953 Label no_exceptions; | |
| 954 __ fwait(); | |
| 955 __ fnstsw_ax(); | |
| 956 // Clear if Illegal Operand or Zero Division exceptions are set. | |
| 957 __ test(eax, Immediate(5)); | |
| 958 __ j(zero, &no_exceptions, Label::kNear); | |
| 959 __ fnclex(); | |
| 960 __ bind(&no_exceptions); | |
| 961 } | |
| 962 | |
| 963 // Compute st(0) % st(1) | |
| 964 { | |
| 965 Label partial_remainder_loop; | |
| 966 __ bind(&partial_remainder_loop); | |
| 967 __ fprem1(); | |
| 968 __ fwait(); | |
| 969 __ fnstsw_ax(); | |
| 970 __ test(eax, Immediate(0x400 /* C2 */)); | |
| 971 // If C2 is set, computation only has partial result. Loop to | |
| 972 // continue computation. | |
| 973 __ j(not_zero, &partial_remainder_loop); | |
| 974 } | |
| 975 // FPU Stack: input, 2*pi, input % 2*pi | |
| 976 __ fstp(2); | |
| 977 __ fstp(0); | |
| 978 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer). | |
| 979 | |
| 980 // FPU Stack: input % 2*pi | |
| 981 __ bind(&in_range); | |
| 982 switch (type) { | |
| 983 case TranscendentalCache::SIN: | |
| 984 __ fsin(); | |
| 985 break; | |
| 986 case TranscendentalCache::COS: | |
| 987 __ fcos(); | |
| 988 break; | |
| 989 case TranscendentalCache::TAN: | |
| 990 // FPTAN calculates tangent onto st(0) and pushes 1.0 onto the | |
| 991 // FP register stack. | |
| 992 __ fptan(); | |
| 993 __ fstp(0); // Pop FP register stack. | |
| 994 break; | |
| 995 default: | |
| 996 UNREACHABLE(); | |
| 997 } | |
| 998 __ bind(&done); | |
| 999 } else { | |
| 1000 ASSERT(type == TranscendentalCache::LOG); | |
| 1001 __ fldln2(); | |
| 1002 __ fxch(); | |
| 1003 __ fyl2x(); | |
| 1004 } | |
| 1005 } | 917 } |
| 1006 | 918 |
| 1007 | 919 |
| 1008 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, | 920 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, |
| 1009 Register number) { | 921 Register number) { |
| 1010 Label load_smi, done; | 922 Label load_smi, done; |
| 1011 | 923 |
| 1012 __ JumpIfSmi(number, &load_smi, Label::kNear); | 924 __ JumpIfSmi(number, &load_smi, Label::kNear); |
| 1013 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset)); | 925 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset)); |
| 1014 __ jmp(&done, Label::kNear); | 926 __ jmp(&done, Label::kNear); |
| (...skipping 1930 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2945 __ SetCallKind(ecx, CALL_AS_METHOD); | 2857 __ SetCallKind(ecx, CALL_AS_METHOD); |
| 2946 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET); | 2858 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET); |
| 2947 } | 2859 } |
| 2948 | 2860 |
| 2949 | 2861 |
| 2950 bool CEntryStub::NeedsImmovableCode() { | 2862 bool CEntryStub::NeedsImmovableCode() { |
| 2951 return false; | 2863 return false; |
| 2952 } | 2864 } |
| 2953 | 2865 |
| 2954 | 2866 |
| 2955 bool CEntryStub::IsPregenerated(Isolate* isolate) { | |
| 2956 return (!save_doubles_ || isolate->fp_stubs_generated()) && | |
| 2957 result_size_ == 1; | |
| 2958 } | |
| 2959 | |
| 2960 | |
| 2961 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 2867 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 2962 CEntryStub::GenerateAheadOfTime(isolate); | 2868 CEntryStub::GenerateAheadOfTime(isolate); |
| 2963 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2869 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 2964 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 2870 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
| 2965 // It is important that the store buffer overflow stubs are generated first. | 2871 // It is important that the store buffer overflow stubs are generated first. |
| 2966 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); | |
| 2967 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 2872 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
| 2968 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 2873 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
| 2969 if (Serializer::enabled()) { | 2874 if (Serializer::enabled()) { |
| 2970 PlatformFeatureScope sse2(SSE2); | 2875 PlatformFeatureScope sse2(SSE2); |
| 2971 BinaryOpStub::GenerateAheadOfTime(isolate); | 2876 BinaryOpICStub::GenerateAheadOfTime(isolate); |
| 2972 } else { | 2877 } else { |
| 2973 BinaryOpStub::GenerateAheadOfTime(isolate); | 2878 BinaryOpICStub::GenerateAheadOfTime(isolate); |
| 2974 } | 2879 } |
| 2975 } | 2880 } |
| 2976 | 2881 |
| 2977 | 2882 |
| 2978 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 2883 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
| 2979 if (CpuFeatures::IsSupported(SSE2)) { | 2884 if (CpuFeatures::IsSupported(SSE2)) { |
| 2980 CEntryStub save_doubles(1, kSaveFPRegs); | 2885 CEntryStub save_doubles(1, kSaveFPRegs); |
| 2981 // Stubs might already be in the snapshot, detect that and don't regenerate, | 2886 // Stubs might already be in the snapshot, detect that and don't regenerate, |
| 2982 // which would lead to code stub initialization state being messed up. | 2887 // which would lead to code stub initialization state being messed up. |
| 2983 Code* save_doubles_code; | 2888 Code* save_doubles_code; |
| 2984 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { | 2889 if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) { |
| 2985 save_doubles_code = *(save_doubles.GetCode(isolate)); | 2890 save_doubles_code = *(save_doubles.GetCode(isolate)); |
| 2986 } | 2891 } |
| 2987 save_doubles_code->set_is_pregenerated(true); | |
| 2988 isolate->set_fp_stubs_generated(true); | 2892 isolate->set_fp_stubs_generated(true); |
| 2989 } | 2893 } |
| 2990 } | 2894 } |
| 2991 | 2895 |
| 2992 | 2896 |
| 2993 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { | 2897 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { |
| 2994 CEntryStub stub(1, kDontSaveFPRegs); | 2898 CEntryStub stub(1, kDontSaveFPRegs); |
| 2995 Handle<Code> code = stub.GetCode(isolate); | 2899 stub.GetCode(isolate); |
| 2996 code->set_is_pregenerated(true); | |
| 2997 } | 2900 } |
| 2998 | 2901 |
| 2999 | 2902 |
| 3000 static void JumpIfOOM(MacroAssembler* masm, | 2903 static void JumpIfOOM(MacroAssembler* masm, |
| 3001 Register value, | 2904 Register value, |
| 3002 Register scratch, | 2905 Register scratch, |
| 3003 Label* oom_label) { | 2906 Label* oom_label) { |
| 3004 __ mov(scratch, value); | 2907 __ mov(scratch, value); |
| 3005 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3); | 2908 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3); |
| 3006 STATIC_ASSERT(kFailureTag == 3); | 2909 STATIC_ASSERT(kFailureTag == 3); |
| (...skipping 2273 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5280 __ Drop(1); | 5183 __ Drop(1); |
| 5281 __ ret(2 * kPointerSize); | 5184 __ ret(2 * kPointerSize); |
| 5282 | 5185 |
| 5283 __ bind(¬_in_dictionary); | 5186 __ bind(¬_in_dictionary); |
| 5284 __ mov(result_, Immediate(0)); | 5187 __ mov(result_, Immediate(0)); |
| 5285 __ Drop(1); | 5188 __ Drop(1); |
| 5286 __ ret(2 * kPointerSize); | 5189 __ ret(2 * kPointerSize); |
| 5287 } | 5190 } |
| 5288 | 5191 |
| 5289 | 5192 |
| 5290 struct AheadOfTimeWriteBarrierStubList { | |
| 5291 Register object, value, address; | |
| 5292 RememberedSetAction action; | |
| 5293 }; | |
| 5294 | |
| 5295 | |
| 5296 #define REG(Name) { kRegister_ ## Name ## _Code } | |
| 5297 | |
| 5298 static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { | |
| 5299 // Used in RegExpExecStub. | |
| 5300 { REG(ebx), REG(eax), REG(edi), EMIT_REMEMBERED_SET }, | |
| 5301 // Used in CompileArrayPushCall. | |
| 5302 { REG(ebx), REG(ecx), REG(edx), EMIT_REMEMBERED_SET }, | |
| 5303 { REG(ebx), REG(edi), REG(edx), OMIT_REMEMBERED_SET }, | |
| 5304 // Used in StoreStubCompiler::CompileStoreField and | |
| 5305 // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField. | |
| 5306 { REG(edx), REG(ecx), REG(ebx), EMIT_REMEMBERED_SET }, | |
| 5307 // GenerateStoreField calls the stub with two different permutations of | |
| 5308 // registers. This is the second. | |
| 5309 { REG(ebx), REG(ecx), REG(edx), EMIT_REMEMBERED_SET }, | |
| 5310 // StoreIC::GenerateNormal via GenerateDictionaryStore | |
| 5311 { REG(ebx), REG(edi), REG(edx), EMIT_REMEMBERED_SET }, | |
| 5312 // KeyedStoreIC::GenerateGeneric. | |
| 5313 { REG(ebx), REG(edx), REG(ecx), EMIT_REMEMBERED_SET}, | |
| 5314 // KeyedStoreStubCompiler::GenerateStoreFastElement. | |
| 5315 { REG(edi), REG(ebx), REG(ecx), EMIT_REMEMBERED_SET}, | |
| 5316 { REG(edx), REG(edi), REG(ebx), EMIT_REMEMBERED_SET}, | |
| 5317 // ElementsTransitionGenerator::GenerateMapChangeElementTransition | |
| 5318 // and ElementsTransitionGenerator::GenerateSmiToDouble | |
| 5319 // and ElementsTransitionGenerator::GenerateDoubleToObject | |
| 5320 { REG(edx), REG(ebx), REG(edi), EMIT_REMEMBERED_SET}, | |
| 5321 { REG(edx), REG(ebx), REG(edi), OMIT_REMEMBERED_SET}, | |
| 5322 // ElementsTransitionGenerator::GenerateDoubleToObject | |
| 5323 { REG(eax), REG(edx), REG(esi), EMIT_REMEMBERED_SET}, | |
| 5324 { REG(edx), REG(eax), REG(edi), EMIT_REMEMBERED_SET}, | |
| 5325 // StoreArrayLiteralElementStub::Generate | |
| 5326 { REG(ebx), REG(eax), REG(ecx), EMIT_REMEMBERED_SET}, | |
| 5327 // FastNewClosureStub and StringAddStub::Generate | |
| 5328 { REG(ecx), REG(edx), REG(ebx), EMIT_REMEMBERED_SET}, | |
| 5329 // StringAddStub::Generate | |
| 5330 { REG(ecx), REG(eax), REG(ebx), EMIT_REMEMBERED_SET}, | |
| 5331 // Null termination. | |
| 5332 { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET} | |
| 5333 }; | |
| 5334 | |
| 5335 #undef REG | |
| 5336 | |
| 5337 bool RecordWriteStub::IsPregenerated(Isolate* isolate) { | |
| 5338 for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | |
| 5339 !entry->object.is(no_reg); | |
| 5340 entry++) { | |
| 5341 if (object_.is(entry->object) && | |
| 5342 value_.is(entry->value) && | |
| 5343 address_.is(entry->address) && | |
| 5344 remembered_set_action_ == entry->action && | |
| 5345 save_fp_regs_mode_ == kDontSaveFPRegs) { | |
| 5346 return true; | |
| 5347 } | |
| 5348 } | |
| 5349 return false; | |
| 5350 } | |
| 5351 | |
| 5352 | |
| 5353 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( | 5193 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( |
| 5354 Isolate* isolate) { | 5194 Isolate* isolate) { |
| 5355 StoreBufferOverflowStub stub(kDontSaveFPRegs); | 5195 StoreBufferOverflowStub stub(kDontSaveFPRegs); |
| 5356 stub.GetCode(isolate)->set_is_pregenerated(true); | 5196 stub.GetCode(isolate); |
| 5357 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { | 5197 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { |
| 5358 StoreBufferOverflowStub stub2(kSaveFPRegs); | 5198 StoreBufferOverflowStub stub2(kSaveFPRegs); |
| 5359 stub2.GetCode(isolate)->set_is_pregenerated(true); | 5199 stub2.GetCode(isolate); |
| 5360 } | 5200 } |
| 5361 } | 5201 } |
| 5362 | 5202 |
| 5363 | |
| 5364 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) { | |
| 5365 for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | |
| 5366 !entry->object.is(no_reg); | |
| 5367 entry++) { | |
| 5368 RecordWriteStub stub(entry->object, | |
| 5369 entry->value, | |
| 5370 entry->address, | |
| 5371 entry->action, | |
| 5372 kDontSaveFPRegs); | |
| 5373 stub.GetCode(isolate)->set_is_pregenerated(true); | |
| 5374 } | |
| 5375 } | |
| 5376 | |
| 5377 | 5203 |
| 5378 bool CodeStub::CanUseFPRegisters() { | 5204 bool CodeStub::CanUseFPRegisters() { |
| 5379 return CpuFeatures::IsSupported(SSE2); | 5205 return CpuFeatures::IsSupported(SSE2); |
| 5380 } | 5206 } |
| 5381 | 5207 |
| 5382 | 5208 |
| 5383 // Takes the input in 3 registers: address_ value_ and object_. A pointer to | 5209 // Takes the input in 3 registers: address_ value_ and object_. A pointer to |
| 5384 // the value has just been written into the object, now this stub makes sure | 5210 // the value has just been written into the object, now this stub makes sure |
| 5385 // we keep the GC informed. The word in the object where the value has been | 5211 // we keep the GC informed. The word in the object where the value has been |
| 5386 // written is in the address register. | 5212 // written is in the address register. |
| (...skipping 309 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5696 __ sub(eax, Immediate(1)); | 5522 __ sub(eax, Immediate(1)); |
| 5697 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 5523 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
| 5698 ParameterCount argument_count(eax); | 5524 ParameterCount argument_count(eax); |
| 5699 __ InvokeFunction( | 5525 __ InvokeFunction( |
| 5700 edi, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); | 5526 edi, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); |
| 5701 } | 5527 } |
| 5702 | 5528 |
| 5703 | 5529 |
| 5704 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 5530 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
| 5705 if (masm->isolate()->function_entry_hook() != NULL) { | 5531 if (masm->isolate()->function_entry_hook() != NULL) { |
| 5706 // It's always safe to call the entry hook stub, as the hook itself | |
| 5707 // is not allowed to call back to V8. | |
| 5708 AllowStubCallsScope allow_stub_calls(masm, true); | |
| 5709 | |
| 5710 ProfileEntryHookStub stub; | 5532 ProfileEntryHookStub stub; |
| 5711 masm->CallStub(&stub); | 5533 masm->CallStub(&stub); |
| 5712 } | 5534 } |
| 5713 } | 5535 } |
| 5714 | 5536 |
| 5715 | 5537 |
| 5716 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { | 5538 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { |
| 5717 // Save volatile registers. | 5539 // Save volatile registers. |
| 5718 const int kNumSavedRegisters = 3; | 5540 const int kNumSavedRegisters = 3; |
| 5719 __ push(eax); | 5541 __ push(eax); |
| (...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5858 template<class T> | 5680 template<class T> |
| 5859 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { | 5681 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
| 5860 ElementsKind initial_kind = GetInitialFastElementsKind(); | 5682 ElementsKind initial_kind = GetInitialFastElementsKind(); |
| 5861 ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind); | 5683 ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind); |
| 5862 | 5684 |
| 5863 int to_index = GetSequenceIndexFromFastElementsKind( | 5685 int to_index = GetSequenceIndexFromFastElementsKind( |
| 5864 TERMINAL_FAST_ELEMENTS_KIND); | 5686 TERMINAL_FAST_ELEMENTS_KIND); |
| 5865 for (int i = 0; i <= to_index; ++i) { | 5687 for (int i = 0; i <= to_index; ++i) { |
| 5866 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 5688 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
| 5867 T stub(kind); | 5689 T stub(kind); |
| 5868 stub.GetCode(isolate)->set_is_pregenerated(true); | 5690 stub.GetCode(isolate); |
| 5869 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE || | 5691 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE || |
| 5870 (!FLAG_track_allocation_sites && | 5692 (!FLAG_track_allocation_sites && |
| 5871 (kind == initial_kind || kind == initial_holey_kind))) { | 5693 (kind == initial_kind || kind == initial_holey_kind))) { |
| 5872 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); | 5694 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); |
| 5873 stub1.GetCode(isolate)->set_is_pregenerated(true); | 5695 stub1.GetCode(isolate); |
| 5874 } | 5696 } |
| 5875 } | 5697 } |
| 5876 } | 5698 } |
| 5877 | 5699 |
| 5878 | 5700 |
| 5879 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { | 5701 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 5880 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( | 5702 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( |
| 5881 isolate); | 5703 isolate); |
| 5882 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( | 5704 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( |
| 5883 isolate); | 5705 isolate); |
| 5884 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( | 5706 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( |
| 5885 isolate); | 5707 isolate); |
| 5886 } | 5708 } |
| 5887 | 5709 |
| 5888 | 5710 |
| 5889 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( | 5711 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( |
| 5890 Isolate* isolate) { | 5712 Isolate* isolate) { |
| 5891 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; | 5713 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; |
| 5892 for (int i = 0; i < 2; i++) { | 5714 for (int i = 0; i < 2; i++) { |
| 5893 // For internal arrays we only need a few things | 5715 // For internal arrays we only need a few things |
| 5894 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); | 5716 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); |
| 5895 stubh1.GetCode(isolate)->set_is_pregenerated(true); | 5717 stubh1.GetCode(isolate); |
| 5896 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); | 5718 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); |
| 5897 stubh2.GetCode(isolate)->set_is_pregenerated(true); | 5719 stubh2.GetCode(isolate); |
| 5898 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); | 5720 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); |
| 5899 stubh3.GetCode(isolate)->set_is_pregenerated(true); | 5721 stubh3.GetCode(isolate); |
| 5900 } | 5722 } |
| 5901 } | 5723 } |
| 5902 | 5724 |
| 5903 | 5725 |
| 5904 void ArrayConstructorStub::GenerateDispatchToArrayStub( | 5726 void ArrayConstructorStub::GenerateDispatchToArrayStub( |
| 5905 MacroAssembler* masm, | 5727 MacroAssembler* masm, |
| 5906 AllocationSiteOverrideMode mode) { | 5728 AllocationSiteOverrideMode mode) { |
| 5907 if (argument_count_ == ANY) { | 5729 if (argument_count_ == ANY) { |
| 5908 Label not_zero_case, not_one_case; | 5730 Label not_zero_case, not_one_case; |
| 5909 __ test(eax, eax); | 5731 __ test(eax, eax); |
| (...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6071 __ bind(&fast_elements_case); | 5893 __ bind(&fast_elements_case); |
| 6072 GenerateCase(masm, FAST_ELEMENTS); | 5894 GenerateCase(masm, FAST_ELEMENTS); |
| 6073 } | 5895 } |
| 6074 | 5896 |
| 6075 | 5897 |
| 6076 #undef __ | 5898 #undef __ |
| 6077 | 5899 |
| 6078 } } // namespace v8::internal | 5900 } } // namespace v8::internal |
| 6079 | 5901 |
| 6080 #endif // V8_TARGET_ARCH_IA32 | 5902 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |