| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 759 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 770 | 770 |
| 771 void LCodeGen::DeoptimizeIf(Condition cc, | 771 void LCodeGen::DeoptimizeIf(Condition cc, |
| 772 LEnvironment* environment) { | 772 LEnvironment* environment) { |
| 773 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 773 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
| 774 ? Deoptimizer::LAZY | 774 ? Deoptimizer::LAZY |
| 775 : Deoptimizer::EAGER; | 775 : Deoptimizer::EAGER; |
| 776 DeoptimizeIf(cc, environment, bailout_type); | 776 DeoptimizeIf(cc, environment, bailout_type); |
| 777 } | 777 } |
| 778 | 778 |
| 779 | 779 |
| 780 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { | |
| 781 ZoneList<Handle<Map> > maps(1, zone()); | |
| 782 ZoneList<Handle<JSObject> > objects(1, zone()); | |
| 783 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | |
| 784 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { | |
| 785 if (Code::IsWeakEmbeddedObject(code->kind(), it.rinfo()->target_object())) { | |
| 786 if (it.rinfo()->target_object()->IsMap()) { | |
| 787 Handle<Map> map(Map::cast(it.rinfo()->target_object())); | |
| 788 maps.Add(map, zone()); | |
| 789 } else if (it.rinfo()->target_object()->IsJSObject()) { | |
| 790 Handle<JSObject> object(JSObject::cast(it.rinfo()->target_object())); | |
| 791 objects.Add(object, zone()); | |
| 792 } | |
| 793 } | |
| 794 } | |
| 795 #ifdef VERIFY_HEAP | |
| 796 // This disables verification of weak embedded objects after full GC. | |
| 797 // AddDependentCode can cause a GC, which would observe the state where | |
| 798 // this code is not yet in the depended code lists of the embedded maps. | |
| 799 NoWeakObjectVerificationScope disable_verification_of_embedded_objects; | |
| 800 #endif | |
| 801 for (int i = 0; i < maps.length(); i++) { | |
| 802 maps.at(i)->AddDependentCode(DependentCode::kWeaklyEmbeddedGroup, code); | |
| 803 } | |
| 804 for (int i = 0; i < objects.length(); i++) { | |
| 805 AddWeakObjectToCodeDependency(isolate()->heap(), objects.at(i), code); | |
| 806 } | |
| 807 } | |
| 808 | |
| 809 | |
| 810 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 780 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
| 811 int length = deoptimizations_.length(); | 781 int length = deoptimizations_.length(); |
| 812 if (length == 0) return; | 782 if (length == 0) return; |
| 813 Handle<DeoptimizationInputData> data = | 783 Handle<DeoptimizationInputData> data = |
| 814 factory()->NewDeoptimizationInputData(length, TENURED); | 784 factory()->NewDeoptimizationInputData(length, TENURED); |
| 815 | 785 |
| 816 Handle<ByteArray> translations = | 786 Handle<ByteArray> translations = |
| 817 translations_.CreateByteArray(isolate()->factory()); | 787 translations_.CreateByteArray(isolate()->factory()); |
| 818 data->SetTranslationByteArray(*translations); | 788 data->SetTranslationByteArray(*translations); |
| 819 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); | 789 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); |
| (...skipping 1124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1944 } | 1914 } |
| 1945 } | 1915 } |
| 1946 | 1916 |
| 1947 | 1917 |
| 1948 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1918 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 1949 ASSERT(ToRegister(instr->context()).is(rsi)); | 1919 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 1950 ASSERT(ToRegister(instr->left()).is(rdx)); | 1920 ASSERT(ToRegister(instr->left()).is(rdx)); |
| 1951 ASSERT(ToRegister(instr->right()).is(rax)); | 1921 ASSERT(ToRegister(instr->right()).is(rax)); |
| 1952 ASSERT(ToRegister(instr->result()).is(rax)); | 1922 ASSERT(ToRegister(instr->result()).is(rax)); |
| 1953 | 1923 |
| 1954 BinaryOpStub stub(instr->op(), NO_OVERWRITE); | 1924 BinaryOpICStub stub(instr->op(), NO_OVERWRITE); |
| 1955 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1925 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1956 __ nop(); // Signals no inlined code. | 1926 __ nop(); // Signals no inlined code. |
| 1957 } | 1927 } |
| 1958 | 1928 |
| 1959 | 1929 |
| 1960 template<class InstrType> | 1930 template<class InstrType> |
| 1961 void LCodeGen::EmitBranch(InstrType instr, Condition cc) { | 1931 void LCodeGen::EmitBranch(InstrType instr, Condition cc) { |
| 1962 int left_block = instr->TrueDestination(chunk_); | 1932 int left_block = instr->TrueDestination(chunk_); |
| 1963 int right_block = instr->FalseDestination(chunk_); | 1933 int right_block = instr->FalseDestination(chunk_); |
| 1964 | 1934 |
| (...skipping 1805 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3770 __ movsd(Operand(rsp, 0), input_reg); | 3740 __ movsd(Operand(rsp, 0), input_reg); |
| 3771 __ fld_d(Operand(rsp, 0)); | 3741 __ fld_d(Operand(rsp, 0)); |
| 3772 __ fyl2x(); | 3742 __ fyl2x(); |
| 3773 __ fstp_d(Operand(rsp, 0)); | 3743 __ fstp_d(Operand(rsp, 0)); |
| 3774 __ movsd(input_reg, Operand(rsp, 0)); | 3744 __ movsd(input_reg, Operand(rsp, 0)); |
| 3775 __ addq(rsp, Immediate(kDoubleSize)); | 3745 __ addq(rsp, Immediate(kDoubleSize)); |
| 3776 __ bind(&done); | 3746 __ bind(&done); |
| 3777 } | 3747 } |
| 3778 | 3748 |
| 3779 | 3749 |
| 3780 void LCodeGen::DoMathTan(LMathTan* instr) { | |
| 3781 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | |
| 3782 // Set the context register to a GC-safe fake value. Clobbering it is | |
| 3783 // OK because this instruction is marked as a call. | |
| 3784 __ Set(rsi, 0); | |
| 3785 TranscendentalCacheStub stub(TranscendentalCache::TAN, | |
| 3786 TranscendentalCacheStub::UNTAGGED); | |
| 3787 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | |
| 3788 } | |
| 3789 | |
| 3790 | |
| 3791 void LCodeGen::DoMathCos(LMathCos* instr) { | |
| 3792 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | |
| 3793 // Set the context register to a GC-safe fake value. Clobbering it is | |
| 3794 // OK because this instruction is marked as a call. | |
| 3795 __ Set(rsi, 0); | |
| 3796 TranscendentalCacheStub stub(TranscendentalCache::COS, | |
| 3797 TranscendentalCacheStub::UNTAGGED); | |
| 3798 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | |
| 3799 } | |
| 3800 | |
| 3801 | |
| 3802 void LCodeGen::DoMathSin(LMathSin* instr) { | |
| 3803 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | |
| 3804 // Set the context register to a GC-safe fake value. Clobbering it is | |
| 3805 // OK because this instruction is marked as a call. | |
| 3806 __ Set(rsi, 0); | |
| 3807 TranscendentalCacheStub stub(TranscendentalCache::SIN, | |
| 3808 TranscendentalCacheStub::UNTAGGED); | |
| 3809 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | |
| 3810 } | |
| 3811 | |
| 3812 | |
| 3813 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3750 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
| 3814 ASSERT(ToRegister(instr->context()).is(rsi)); | 3751 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3815 ASSERT(ToRegister(instr->function()).is(rdi)); | 3752 ASSERT(ToRegister(instr->function()).is(rdi)); |
| 3816 ASSERT(instr->HasPointerMap()); | 3753 ASSERT(instr->HasPointerMap()); |
| 3817 | 3754 |
| 3818 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); | 3755 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); |
| 3819 if (known_function.is_null()) { | 3756 if (known_function.is_null()) { |
| 3820 LPointerMap* pointers = instr->pointer_map(); | 3757 LPointerMap* pointers = instr->pointer_map(); |
| 3821 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3758 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
| 3822 ParameterCount count(instr->arity()); | 3759 ParameterCount count(instr->arity()); |
| (...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3965 Register function = ToRegister(instr->function()); | 3902 Register function = ToRegister(instr->function()); |
| 3966 Register code_object = ToRegister(instr->code_object()); | 3903 Register code_object = ToRegister(instr->code_object()); |
| 3967 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); | 3904 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); |
| 3968 __ movq(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); | 3905 __ movq(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); |
| 3969 } | 3906 } |
| 3970 | 3907 |
| 3971 | 3908 |
| 3972 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { | 3909 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { |
| 3973 Register result = ToRegister(instr->result()); | 3910 Register result = ToRegister(instr->result()); |
| 3974 Register base = ToRegister(instr->base_object()); | 3911 Register base = ToRegister(instr->base_object()); |
| 3975 __ lea(result, Operand(base, instr->offset())); | 3912 if (instr->offset()->IsConstantOperand()) { |
| 3913 LConstantOperand* offset = LConstantOperand::cast(instr->offset()); |
| 3914 __ lea(result, Operand(base, ToInteger32(offset))); |
| 3915 } else { |
| 3916 Register offset = ToRegister(instr->offset()); |
| 3917 __ lea(result, Operand(base, offset, times_1, 0)); |
| 3918 } |
| 3976 } | 3919 } |
| 3977 | 3920 |
| 3978 | 3921 |
| 3979 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { | 3922 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
| 3980 Representation representation = instr->representation(); | 3923 Representation representation = instr->representation(); |
| 3981 | 3924 |
| 3982 HObjectAccess access = instr->hydrogen()->access(); | 3925 HObjectAccess access = instr->hydrogen()->access(); |
| 3983 int offset = access.offset(); | 3926 int offset = access.offset(); |
| 3984 | 3927 |
| 3985 if (access.IsExternalMemory()) { | 3928 if (access.IsExternalMemory()) { |
| (...skipping 370 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4356 __ j(not_equal, ¬_applicable); | 4299 __ j(not_equal, ¬_applicable); |
| 4357 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { | 4300 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { |
| 4358 Register new_map_reg = ToRegister(instr->new_map_temp()); | 4301 Register new_map_reg = ToRegister(instr->new_map_temp()); |
| 4359 __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); | 4302 __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); |
| 4360 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); | 4303 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); |
| 4361 // Write barrier. | 4304 // Write barrier. |
| 4362 ASSERT_NE(instr->temp(), NULL); | 4305 ASSERT_NE(instr->temp(), NULL); |
| 4363 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, | 4306 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, |
| 4364 ToRegister(instr->temp()), kDontSaveFPRegs); | 4307 ToRegister(instr->temp()), kDontSaveFPRegs); |
| 4365 } else { | 4308 } else { |
| 4309 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 4366 PushSafepointRegistersScope scope(this); | 4310 PushSafepointRegistersScope scope(this); |
| 4367 if (!object_reg.is(rax)) { | 4311 if (!object_reg.is(rax)) { |
| 4368 __ movq(rax, object_reg); | 4312 __ movq(rax, object_reg); |
| 4369 } | 4313 } |
| 4370 LoadContextFromDeferred(instr->context()); | |
| 4371 __ Move(rbx, to_map); | 4314 __ Move(rbx, to_map); |
| 4372 TransitionElementsKindStub stub(from_kind, to_kind); | 4315 TransitionElementsKindStub stub(from_kind, to_kind); |
| 4373 __ CallStub(&stub); | 4316 __ CallStub(&stub); |
| 4374 RecordSafepointWithRegisters( | 4317 RecordSafepointWithRegisters( |
| 4375 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4318 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4376 } | 4319 } |
| 4377 __ bind(¬_applicable); | 4320 __ bind(¬_applicable); |
| 4378 } | 4321 } |
| 4379 | 4322 |
| 4380 | 4323 |
| (...skipping 1258 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5639 FixedArray::kHeaderSize - kPointerSize)); | 5582 FixedArray::kHeaderSize - kPointerSize)); |
| 5640 __ bind(&done); | 5583 __ bind(&done); |
| 5641 } | 5584 } |
| 5642 | 5585 |
| 5643 | 5586 |
| 5644 #undef __ | 5587 #undef __ |
| 5645 | 5588 |
| 5646 } } // namespace v8::internal | 5589 } } // namespace v8::internal |
| 5647 | 5590 |
| 5648 #endif // V8_TARGET_ARCH_X64 | 5591 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |