| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 173 Isolate* isolate, | 173 Isolate* isolate, |
| 174 CodeStubInterfaceDescriptor* descriptor) { | 174 CodeStubInterfaceDescriptor* descriptor) { |
| 175 static Register registers[] = { rax, rbx }; | 175 static Register registers[] = { rax, rbx }; |
| 176 descriptor->register_param_count_ = 2; | 176 descriptor->register_param_count_ = 2; |
| 177 descriptor->register_params_ = registers; | 177 descriptor->register_params_ = registers; |
| 178 descriptor->deoptimization_handler_ = | 178 descriptor->deoptimization_handler_ = |
| 179 Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry; | 179 Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry; |
| 180 } | 180 } |
| 181 | 181 |
| 182 | 182 |
| 183 void BinaryOpStub::InitializeInterfaceDescriptor( | 183 void BinaryOpICStub::InitializeInterfaceDescriptor( |
| 184 Isolate* isolate, | 184 Isolate* isolate, |
| 185 CodeStubInterfaceDescriptor* descriptor) { | 185 CodeStubInterfaceDescriptor* descriptor) { |
| 186 static Register registers[] = { rdx, rax }; | 186 static Register registers[] = { rdx, rax }; |
| 187 descriptor->register_param_count_ = 2; | 187 descriptor->register_param_count_ = 2; |
| 188 descriptor->register_params_ = registers; | 188 descriptor->register_params_ = registers; |
| 189 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss); | 189 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss); |
| 190 descriptor->SetMissHandler( | 190 descriptor->SetMissHandler( |
| 191 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate)); | 191 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate)); |
| 192 } | 192 } |
| 193 | 193 |
| (...skipping 588 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 782 } | 782 } |
| 783 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); | 783 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 784 __ Ret(); | 784 __ Ret(); |
| 785 } | 785 } |
| 786 } | 786 } |
| 787 | 787 |
| 788 | 788 |
| 789 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { | 789 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { |
| 790 switch (type_) { | 790 switch (type_) { |
| 791 // Add more cases when necessary. | 791 // Add more cases when necessary. |
| 792 case TranscendentalCache::SIN: return Runtime::kMath_sin; | |
| 793 case TranscendentalCache::COS: return Runtime::kMath_cos; | |
| 794 case TranscendentalCache::TAN: return Runtime::kMath_tan; | |
| 795 case TranscendentalCache::LOG: return Runtime::kMath_log; | 792 case TranscendentalCache::LOG: return Runtime::kMath_log; |
| 796 default: | 793 default: |
| 797 UNIMPLEMENTED(); | 794 UNIMPLEMENTED(); |
| 798 return Runtime::kAbort; | 795 return Runtime::kAbort; |
| 799 } | 796 } |
| 800 } | 797 } |
| 801 | 798 |
| 802 | 799 |
| 803 void TranscendentalCacheStub::GenerateOperation( | 800 void TranscendentalCacheStub::GenerateOperation( |
| 804 MacroAssembler* masm, TranscendentalCache::Type type) { | 801 MacroAssembler* masm, TranscendentalCache::Type type) { |
| 805 // Registers: | 802 // Registers: |
| 806 // rax: Newly allocated HeapNumber, which must be preserved. | 803 // rax: Newly allocated HeapNumber, which must be preserved. |
| 807 // rbx: Bits of input double. Must be preserved. | 804 // rbx: Bits of input double. Must be preserved. |
| 808 // rcx: Pointer to cache entry. Must be preserved. | 805 // rcx: Pointer to cache entry. Must be preserved. |
| 809 // st(0): Input double | 806 // st(0): Input double |
| 810 Label done; | 807 ASSERT(type == TranscendentalCache::LOG); |
| 811 if (type == TranscendentalCache::SIN || | 808 __ fldln2(); |
| 812 type == TranscendentalCache::COS || | 809 __ fxch(); |
| 813 type == TranscendentalCache::TAN) { | 810 __ fyl2x(); |
| 814 // Both fsin and fcos require arguments in the range +/-2^63 and | |
| 815 // return NaN for infinities and NaN. They can share all code except | |
| 816 // the actual fsin/fcos operation. | |
| 817 Label in_range; | |
| 818 // If argument is outside the range -2^63..2^63, fsin/cos doesn't | |
| 819 // work. We must reduce it to the appropriate range. | |
| 820 __ movq(rdi, rbx); | |
| 821 // Move exponent and sign bits to low bits. | |
| 822 __ shr(rdi, Immediate(HeapNumber::kMantissaBits)); | |
| 823 // Remove sign bit. | |
| 824 __ andl(rdi, Immediate((1 << HeapNumber::kExponentBits) - 1)); | |
| 825 int supported_exponent_limit = (63 + HeapNumber::kExponentBias); | |
| 826 __ cmpl(rdi, Immediate(supported_exponent_limit)); | |
| 827 __ j(below, &in_range); | |
| 828 // Check for infinity and NaN. Both return NaN for sin. | |
| 829 __ cmpl(rdi, Immediate(0x7ff)); | |
| 830 Label non_nan_result; | |
| 831 __ j(not_equal, &non_nan_result, Label::kNear); | |
| 832 // Input is +/-Infinity or NaN. Result is NaN. | |
| 833 __ fstp(0); | |
| 834 // NaN is represented by 0x7ff8000000000000. | |
| 835 __ subq(rsp, Immediate(kPointerSize)); | |
| 836 __ movl(Operand(rsp, 4), Immediate(0x7ff80000)); | |
| 837 __ movl(Operand(rsp, 0), Immediate(0x00000000)); | |
| 838 __ fld_d(Operand(rsp, 0)); | |
| 839 __ addq(rsp, Immediate(kPointerSize)); | |
| 840 __ jmp(&done); | |
| 841 | |
| 842 __ bind(&non_nan_result); | |
| 843 | |
| 844 // Use fpmod to restrict argument to the range +/-2*PI. | |
| 845 __ movq(rdi, rax); // Save rax before using fnstsw_ax. | |
| 846 __ fldpi(); | |
| 847 __ fadd(0); | |
| 848 __ fld(1); | |
| 849 // FPU Stack: input, 2*pi, input. | |
| 850 { | |
| 851 Label no_exceptions; | |
| 852 __ fwait(); | |
| 853 __ fnstsw_ax(); | |
| 854 // Clear if Illegal Operand or Zero Division exceptions are set. | |
| 855 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word. | |
| 856 __ j(zero, &no_exceptions); | |
| 857 __ fnclex(); | |
| 858 __ bind(&no_exceptions); | |
| 859 } | |
| 860 | |
| 861 // Compute st(0) % st(1) | |
| 862 { | |
| 863 Label partial_remainder_loop; | |
| 864 __ bind(&partial_remainder_loop); | |
| 865 __ fprem1(); | |
| 866 __ fwait(); | |
| 867 __ fnstsw_ax(); | |
| 868 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word. | |
| 869 // If C2 is set, computation only has partial result. Loop to | |
| 870 // continue computation. | |
| 871 __ j(not_zero, &partial_remainder_loop); | |
| 872 } | |
| 873 // FPU Stack: input, 2*pi, input % 2*pi | |
| 874 __ fstp(2); | |
| 875 // FPU Stack: input % 2*pi, 2*pi, | |
| 876 __ fstp(0); | |
| 877 // FPU Stack: input % 2*pi | |
| 878 __ movq(rax, rdi); // Restore rax, pointer to the new HeapNumber. | |
| 879 __ bind(&in_range); | |
| 880 switch (type) { | |
| 881 case TranscendentalCache::SIN: | |
| 882 __ fsin(); | |
| 883 break; | |
| 884 case TranscendentalCache::COS: | |
| 885 __ fcos(); | |
| 886 break; | |
| 887 case TranscendentalCache::TAN: | |
| 888 // FPTAN calculates tangent onto st(0) and pushes 1.0 onto the | |
| 889 // FP register stack. | |
| 890 __ fptan(); | |
| 891 __ fstp(0); // Pop FP register stack. | |
| 892 break; | |
| 893 default: | |
| 894 UNREACHABLE(); | |
| 895 } | |
| 896 __ bind(&done); | |
| 897 } else { | |
| 898 ASSERT(type == TranscendentalCache::LOG); | |
| 899 __ fldln2(); | |
| 900 __ fxch(); | |
| 901 __ fyl2x(); | |
| 902 } | |
| 903 } | 811 } |
| 904 | 812 |
| 905 | 813 |
| 906 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm, | 814 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm, |
| 907 Label* not_numbers) { | 815 Label* not_numbers) { |
| 908 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done; | 816 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done; |
| 909 // Load operand in rdx into xmm0, or branch to not_numbers. | 817 // Load operand in rdx into xmm0, or branch to not_numbers. |
| 910 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); | 818 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); |
| 911 __ JumpIfSmi(rdx, &load_smi_rdx); | 819 __ JumpIfSmi(rdx, &load_smi_rdx); |
| 912 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx); | 820 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx); |
| (...skipping 1861 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2774 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 2682 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 2775 RelocInfo::CODE_TARGET); | 2683 RelocInfo::CODE_TARGET); |
| 2776 } | 2684 } |
| 2777 | 2685 |
| 2778 | 2686 |
| 2779 bool CEntryStub::NeedsImmovableCode() { | 2687 bool CEntryStub::NeedsImmovableCode() { |
| 2780 return false; | 2688 return false; |
| 2781 } | 2689 } |
| 2782 | 2690 |
| 2783 | 2691 |
| 2784 bool CEntryStub::IsPregenerated(Isolate* isolate) { | |
| 2785 #ifdef _WIN64 | |
| 2786 return result_size_ == 1; | |
| 2787 #else | |
| 2788 return true; | |
| 2789 #endif | |
| 2790 } | |
| 2791 | |
| 2792 | |
| 2793 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 2692 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 2794 CEntryStub::GenerateAheadOfTime(isolate); | 2693 CEntryStub::GenerateAheadOfTime(isolate); |
| 2795 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2694 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 2796 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 2695 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
| 2797 // It is important that the store buffer overflow stubs are generated first. | 2696 // It is important that the store buffer overflow stubs are generated first. |
| 2798 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); | |
| 2799 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 2697 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
| 2800 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | 2698 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
| 2801 BinaryOpStub::GenerateAheadOfTime(isolate); | 2699 BinaryOpICStub::GenerateAheadOfTime(isolate); |
| 2802 } | 2700 } |
| 2803 | 2701 |
| 2804 | 2702 |
| 2805 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 2703 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
| 2806 } | 2704 } |
| 2807 | 2705 |
| 2808 | 2706 |
| 2809 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { | 2707 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { |
| 2810 CEntryStub stub(1, kDontSaveFPRegs); | 2708 CEntryStub stub(1, kDontSaveFPRegs); |
| 2811 stub.GetCode(isolate)->set_is_pregenerated(true); | 2709 stub.GetCode(isolate); |
| 2812 CEntryStub save_doubles(1, kSaveFPRegs); | 2710 CEntryStub save_doubles(1, kSaveFPRegs); |
| 2813 save_doubles.GetCode(isolate)->set_is_pregenerated(true); | 2711 save_doubles.GetCode(isolate); |
| 2814 } | 2712 } |
| 2815 | 2713 |
| 2816 | 2714 |
| 2817 static void JumpIfOOM(MacroAssembler* masm, | 2715 static void JumpIfOOM(MacroAssembler* masm, |
| 2818 Register value, | 2716 Register value, |
| 2819 Register scratch, | 2717 Register scratch, |
| 2820 Label* oom_label) { | 2718 Label* oom_label) { |
| 2821 __ movq(scratch, value); | 2719 __ movq(scratch, value); |
| 2822 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3); | 2720 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3); |
| 2823 STATIC_ASSERT(kFailureTag == 3); | 2721 STATIC_ASSERT(kFailureTag == 3); |
| (...skipping 2254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5078 __ Drop(1); | 4976 __ Drop(1); |
| 5079 __ ret(2 * kPointerSize); | 4977 __ ret(2 * kPointerSize); |
| 5080 | 4978 |
| 5081 __ bind(¬_in_dictionary); | 4979 __ bind(¬_in_dictionary); |
| 5082 __ movq(scratch, Immediate(0)); | 4980 __ movq(scratch, Immediate(0)); |
| 5083 __ Drop(1); | 4981 __ Drop(1); |
| 5084 __ ret(2 * kPointerSize); | 4982 __ ret(2 * kPointerSize); |
| 5085 } | 4983 } |
| 5086 | 4984 |
| 5087 | 4985 |
| 5088 struct AheadOfTimeWriteBarrierStubList { | |
| 5089 Register object, value, address; | |
| 5090 RememberedSetAction action; | |
| 5091 }; | |
| 5092 | |
| 5093 | |
| 5094 #define REG(Name) { kRegister_ ## Name ## _Code } | |
| 5095 | |
| 5096 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { | |
| 5097 // Used in RegExpExecStub. | |
| 5098 { REG(rbx), REG(rax), REG(rdi), EMIT_REMEMBERED_SET }, | |
| 5099 // Used in CompileArrayPushCall. | |
| 5100 { REG(rbx), REG(rcx), REG(rdx), EMIT_REMEMBERED_SET }, | |
| 5101 // Used in StoreStubCompiler::CompileStoreField and | |
| 5102 // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField. | |
| 5103 { REG(rdx), REG(rcx), REG(rbx), EMIT_REMEMBERED_SET }, | |
| 5104 // GenerateStoreField calls the stub with two different permutations of | |
| 5105 // registers. This is the second. | |
| 5106 { REG(rbx), REG(rcx), REG(rdx), EMIT_REMEMBERED_SET }, | |
| 5107 // StoreIC::GenerateNormal via GenerateDictionaryStore. | |
| 5108 { REG(rbx), REG(r8), REG(r9), EMIT_REMEMBERED_SET }, | |
| 5109 // KeyedStoreIC::GenerateGeneric. | |
| 5110 { REG(rbx), REG(rdx), REG(rcx), EMIT_REMEMBERED_SET}, | |
| 5111 // KeyedStoreStubCompiler::GenerateStoreFastElement. | |
| 5112 { REG(rdi), REG(rbx), REG(rcx), EMIT_REMEMBERED_SET}, | |
| 5113 { REG(rdx), REG(rdi), REG(rbx), EMIT_REMEMBERED_SET}, | |
| 5114 // ElementsTransitionGenerator::GenerateMapChangeElementTransition | |
| 5115 // and ElementsTransitionGenerator::GenerateSmiToDouble | |
| 5116 // and ElementsTransitionGenerator::GenerateDoubleToObject | |
| 5117 { REG(rdx), REG(rbx), REG(rdi), EMIT_REMEMBERED_SET}, | |
| 5118 { REG(rdx), REG(rbx), REG(rdi), OMIT_REMEMBERED_SET}, | |
| 5119 // ElementsTransitionGenerator::GenerateSmiToDouble | |
| 5120 // and ElementsTransitionGenerator::GenerateDoubleToObject | |
| 5121 { REG(rdx), REG(r11), REG(r15), EMIT_REMEMBERED_SET}, | |
| 5122 // ElementsTransitionGenerator::GenerateDoubleToObject | |
| 5123 { REG(r11), REG(rax), REG(r15), EMIT_REMEMBERED_SET}, | |
| 5124 // StoreArrayLiteralElementStub::Generate | |
| 5125 { REG(rbx), REG(rax), REG(rcx), EMIT_REMEMBERED_SET}, | |
| 5126 // FastNewClosureStub::Generate and | |
| 5127 // StringAddStub::Generate | |
| 5128 { REG(rcx), REG(rdx), REG(rbx), EMIT_REMEMBERED_SET}, | |
| 5129 // StringAddStub::Generate | |
| 5130 { REG(rcx), REG(rax), REG(rbx), EMIT_REMEMBERED_SET}, | |
| 5131 // Null termination. | |
| 5132 { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET} | |
| 5133 }; | |
| 5134 | |
| 5135 #undef REG | |
| 5136 | |
| 5137 bool RecordWriteStub::IsPregenerated(Isolate* isolate) { | |
| 5138 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | |
| 5139 !entry->object.is(no_reg); | |
| 5140 entry++) { | |
| 5141 if (object_.is(entry->object) && | |
| 5142 value_.is(entry->value) && | |
| 5143 address_.is(entry->address) && | |
| 5144 remembered_set_action_ == entry->action && | |
| 5145 save_fp_regs_mode_ == kDontSaveFPRegs) { | |
| 5146 return true; | |
| 5147 } | |
| 5148 } | |
| 5149 return false; | |
| 5150 } | |
| 5151 | |
| 5152 | |
| 5153 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( | 4986 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( |
| 5154 Isolate* isolate) { | 4987 Isolate* isolate) { |
| 5155 StoreBufferOverflowStub stub1(kDontSaveFPRegs); | 4988 StoreBufferOverflowStub stub1(kDontSaveFPRegs); |
| 5156 stub1.GetCode(isolate)->set_is_pregenerated(true); | 4989 stub1.GetCode(isolate); |
| 5157 StoreBufferOverflowStub stub2(kSaveFPRegs); | 4990 StoreBufferOverflowStub stub2(kSaveFPRegs); |
| 5158 stub2.GetCode(isolate)->set_is_pregenerated(true); | 4991 stub2.GetCode(isolate); |
| 5159 } | 4992 } |
| 5160 | 4993 |
| 5161 | 4994 |
| 5162 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) { | |
| 5163 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | |
| 5164 !entry->object.is(no_reg); | |
| 5165 entry++) { | |
| 5166 RecordWriteStub stub(entry->object, | |
| 5167 entry->value, | |
| 5168 entry->address, | |
| 5169 entry->action, | |
| 5170 kDontSaveFPRegs); | |
| 5171 stub.GetCode(isolate)->set_is_pregenerated(true); | |
| 5172 } | |
| 5173 } | |
| 5174 | |
| 5175 | |
| 5176 bool CodeStub::CanUseFPRegisters() { | 4995 bool CodeStub::CanUseFPRegisters() { |
| 5177 return true; // Always have SSE2 on x64. | 4996 return true; // Always have SSE2 on x64. |
| 5178 } | 4997 } |
| 5179 | 4998 |
| 5180 | 4999 |
| 5181 // Takes the input in 3 registers: address_ value_ and object_. A pointer to | 5000 // Takes the input in 3 registers: address_ value_ and object_. A pointer to |
| 5182 // the value has just been written into the object, now this stub makes sure | 5001 // the value has just been written into the object, now this stub makes sure |
| 5183 // we keep the GC informed. The word in the object where the value has been | 5002 // we keep the GC informed. The word in the object where the value has been |
| 5184 // written is in the address register. | 5003 // written is in the address register. |
| 5185 void RecordWriteStub::Generate(MacroAssembler* masm) { | 5004 void RecordWriteStub::Generate(MacroAssembler* masm) { |
| (...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5489 __ subl(rax, Immediate(1)); | 5308 __ subl(rax, Immediate(1)); |
| 5490 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 5309 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
| 5491 ParameterCount argument_count(rax); | 5310 ParameterCount argument_count(rax); |
| 5492 __ InvokeFunction( | 5311 __ InvokeFunction( |
| 5493 rdi, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); | 5312 rdi, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); |
| 5494 } | 5313 } |
| 5495 | 5314 |
| 5496 | 5315 |
| 5497 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 5316 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
| 5498 if (masm->isolate()->function_entry_hook() != NULL) { | 5317 if (masm->isolate()->function_entry_hook() != NULL) { |
| 5499 // It's always safe to call the entry hook stub, as the hook itself | |
| 5500 // is not allowed to call back to V8. | |
| 5501 AllowStubCallsScope allow_stub_calls(masm, true); | |
| 5502 | |
| 5503 ProfileEntryHookStub stub; | 5318 ProfileEntryHookStub stub; |
| 5504 masm->CallStub(&stub); | 5319 masm->CallStub(&stub); |
| 5505 } | 5320 } |
| 5506 } | 5321 } |
| 5507 | 5322 |
| 5508 | 5323 |
| 5509 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { | 5324 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { |
| 5510 // This stub can be called from essentially anywhere, so it needs to save | 5325 // This stub can be called from essentially anywhere, so it needs to save |
| 5511 // all volatile and callee-save registers. | 5326 // all volatile and callee-save registers. |
| 5512 const size_t kNumSavedRegisters = 2; | 5327 const size_t kNumSavedRegisters = 2; |
| (...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5660 template<class T> | 5475 template<class T> |
| 5661 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { | 5476 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
| 5662 ElementsKind initial_kind = GetInitialFastElementsKind(); | 5477 ElementsKind initial_kind = GetInitialFastElementsKind(); |
| 5663 ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind); | 5478 ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind); |
| 5664 | 5479 |
| 5665 int to_index = GetSequenceIndexFromFastElementsKind( | 5480 int to_index = GetSequenceIndexFromFastElementsKind( |
| 5666 TERMINAL_FAST_ELEMENTS_KIND); | 5481 TERMINAL_FAST_ELEMENTS_KIND); |
| 5667 for (int i = 0; i <= to_index; ++i) { | 5482 for (int i = 0; i <= to_index; ++i) { |
| 5668 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 5483 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
| 5669 T stub(kind); | 5484 T stub(kind); |
| 5670 stub.GetCode(isolate)->set_is_pregenerated(true); | 5485 stub.GetCode(isolate); |
| 5671 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE || | 5486 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE || |
| 5672 (!FLAG_track_allocation_sites && | 5487 (!FLAG_track_allocation_sites && |
| 5673 (kind == initial_kind || kind == initial_holey_kind))) { | 5488 (kind == initial_kind || kind == initial_holey_kind))) { |
| 5674 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); | 5489 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); |
| 5675 stub1.GetCode(isolate)->set_is_pregenerated(true); | 5490 stub1.GetCode(isolate); |
| 5676 } | 5491 } |
| 5677 } | 5492 } |
| 5678 } | 5493 } |
| 5679 | 5494 |
| 5680 | 5495 |
| 5681 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { | 5496 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 5682 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( | 5497 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( |
| 5683 isolate); | 5498 isolate); |
| 5684 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( | 5499 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( |
| 5685 isolate); | 5500 isolate); |
| 5686 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( | 5501 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( |
| 5687 isolate); | 5502 isolate); |
| 5688 } | 5503 } |
| 5689 | 5504 |
| 5690 | 5505 |
| 5691 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( | 5506 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( |
| 5692 Isolate* isolate) { | 5507 Isolate* isolate) { |
| 5693 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; | 5508 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; |
| 5694 for (int i = 0; i < 2; i++) { | 5509 for (int i = 0; i < 2; i++) { |
| 5695 // For internal arrays we only need a few things | 5510 // For internal arrays we only need a few things |
| 5696 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); | 5511 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); |
| 5697 stubh1.GetCode(isolate)->set_is_pregenerated(true); | 5512 stubh1.GetCode(isolate); |
| 5698 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); | 5513 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); |
| 5699 stubh2.GetCode(isolate)->set_is_pregenerated(true); | 5514 stubh2.GetCode(isolate); |
| 5700 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); | 5515 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); |
| 5701 stubh3.GetCode(isolate)->set_is_pregenerated(true); | 5516 stubh3.GetCode(isolate); |
| 5702 } | 5517 } |
| 5703 } | 5518 } |
| 5704 | 5519 |
| 5705 | 5520 |
| 5706 void ArrayConstructorStub::GenerateDispatchToArrayStub( | 5521 void ArrayConstructorStub::GenerateDispatchToArrayStub( |
| 5707 MacroAssembler* masm, | 5522 MacroAssembler* masm, |
| 5708 AllocationSiteOverrideMode mode) { | 5523 AllocationSiteOverrideMode mode) { |
| 5709 if (argument_count_ == ANY) { | 5524 if (argument_count_ == ANY) { |
| 5710 Label not_zero_case, not_one_case; | 5525 Label not_zero_case, not_one_case; |
| 5711 __ testq(rax, rax); | 5526 __ testq(rax, rax); |
| (...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5876 __ bind(&fast_elements_case); | 5691 __ bind(&fast_elements_case); |
| 5877 GenerateCase(masm, FAST_ELEMENTS); | 5692 GenerateCase(masm, FAST_ELEMENTS); |
| 5878 } | 5693 } |
| 5879 | 5694 |
| 5880 | 5695 |
| 5881 #undef __ | 5696 #undef __ |
| 5882 | 5697 |
| 5883 } } // namespace v8::internal | 5698 } } // namespace v8::internal |
| 5884 | 5699 |
| 5885 #endif // V8_TARGET_ARCH_X64 | 5700 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |