Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 246643014: CodeStubs contain their corresponding Isolate* now. (part 1) (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Feedback. Rebased. Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/full-codegen-x64.cc ('k') | src/x64/macro-assembler-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
212 SaveCallerDoubles(); 212 SaveCallerDoubles();
213 } 213 }
214 } 214 }
215 215
216 // Possibly allocate a local context. 216 // Possibly allocate a local context.
217 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 217 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
218 if (heap_slots > 0) { 218 if (heap_slots > 0) {
219 Comment(";;; Allocate local context"); 219 Comment(";;; Allocate local context");
220 // Argument to NewContext is the function, which is still in rdi. 220 // Argument to NewContext is the function, which is still in rdi.
221 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 221 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
222 FastNewContextStub stub(heap_slots); 222 FastNewContextStub stub(isolate(), heap_slots);
223 __ CallStub(&stub); 223 __ CallStub(&stub);
224 } else { 224 } else {
225 __ Push(rdi); 225 __ Push(rdi);
226 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); 226 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
227 } 227 }
228 RecordSafepoint(Safepoint::kNoLazyDeopt); 228 RecordSafepoint(Safepoint::kNoLazyDeopt);
229 // Context is returned in rax. It replaces the context passed to us. 229 // Context is returned in rax. It replaces the context passed to us.
230 // It's saved in the stack and kept live in rsi. 230 // It's saved in the stack and kept live in rsi.
231 __ movp(rsi, rax); 231 __ movp(rsi, rax);
232 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax); 232 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
(...skipping 749 matching lines...) Expand 10 before | Expand all | Expand 10 after
982 void LCodeGen::DoParameter(LParameter* instr) { 982 void LCodeGen::DoParameter(LParameter* instr) {
983 // Nothing to do. 983 // Nothing to do.
984 } 984 }
985 985
986 986
987 void LCodeGen::DoCallStub(LCallStub* instr) { 987 void LCodeGen::DoCallStub(LCallStub* instr) {
988 ASSERT(ToRegister(instr->context()).is(rsi)); 988 ASSERT(ToRegister(instr->context()).is(rsi));
989 ASSERT(ToRegister(instr->result()).is(rax)); 989 ASSERT(ToRegister(instr->result()).is(rax));
990 switch (instr->hydrogen()->major_key()) { 990 switch (instr->hydrogen()->major_key()) {
991 case CodeStub::RegExpExec: { 991 case CodeStub::RegExpExec: {
992 RegExpExecStub stub; 992 RegExpExecStub stub(isolate());
993 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 993 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
994 break; 994 break;
995 } 995 }
996 case CodeStub::SubString: { 996 case CodeStub::SubString: {
997 SubStringStub stub; 997 SubStringStub stub(isolate());
998 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 998 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
999 break; 999 break;
1000 } 1000 }
1001 case CodeStub::StringCompare: { 1001 case CodeStub::StringCompare: {
1002 StringCompareStub stub; 1002 StringCompareStub stub(isolate());
1003 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 1003 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
1004 break; 1004 break;
1005 } 1005 }
1006 default: 1006 default:
1007 UNREACHABLE(); 1007 UNREACHABLE();
1008 } 1008 }
1009 } 1009 }
1010 1010
1011 1011
1012 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { 1012 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
(...skipping 1009 matching lines...) Expand 10 before | Expand all | Expand 10 after
2022 } 2022 }
2023 } 2023 }
2024 2024
2025 2025
2026 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 2026 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
2027 ASSERT(ToRegister(instr->context()).is(rsi)); 2027 ASSERT(ToRegister(instr->context()).is(rsi));
2028 ASSERT(ToRegister(instr->left()).is(rdx)); 2028 ASSERT(ToRegister(instr->left()).is(rdx));
2029 ASSERT(ToRegister(instr->right()).is(rax)); 2029 ASSERT(ToRegister(instr->right()).is(rax));
2030 ASSERT(ToRegister(instr->result()).is(rax)); 2030 ASSERT(ToRegister(instr->result()).is(rax));
2031 2031
2032 BinaryOpICStub stub(instr->op(), NO_OVERWRITE); 2032 BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
2033 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2033 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
2034 } 2034 }
2035 2035
2036 2036
2037 template<class InstrType> 2037 template<class InstrType>
2038 void LCodeGen::EmitBranch(InstrType instr, Condition cc) { 2038 void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
2039 int left_block = instr->TrueDestination(chunk_); 2039 int left_block = instr->TrueDestination(chunk_);
2040 int right_block = instr->FalseDestination(chunk_); 2040 int right_block = instr->FalseDestination(chunk_);
2041 2041
2042 int next_block = GetNextEmittedBlock(); 2042 int next_block = GetNextEmittedBlock();
(...skipping 573 matching lines...) Expand 10 before | Expand all | Expand 10 after
2616 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { 2616 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2617 Register reg = ToRegister(instr->value()); 2617 Register reg = ToRegister(instr->value());
2618 2618
2619 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); 2619 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
2620 EmitBranch(instr, equal); 2620 EmitBranch(instr, equal);
2621 } 2621 }
2622 2622
2623 2623
2624 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 2624 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2625 ASSERT(ToRegister(instr->context()).is(rsi)); 2625 ASSERT(ToRegister(instr->context()).is(rsi));
2626 InstanceofStub stub(InstanceofStub::kNoFlags); 2626 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
2627 __ Push(ToRegister(instr->left())); 2627 __ Push(ToRegister(instr->left()));
2628 __ Push(ToRegister(instr->right())); 2628 __ Push(ToRegister(instr->right()));
2629 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2629 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
2630 Label true_value, done; 2630 Label true_value, done;
2631 __ testp(rax, rax); 2631 __ testp(rax, rax);
2632 __ j(zero, &true_value, Label::kNear); 2632 __ j(zero, &true_value, Label::kNear);
2633 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); 2633 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2634 __ jmp(&done, Label::kNear); 2634 __ jmp(&done, Label::kNear);
2635 __ bind(&true_value); 2635 __ bind(&true_value);
2636 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); 2636 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
2702 __ bind(&done); 2702 __ bind(&done);
2703 } 2703 }
2704 2704
2705 2705
2706 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 2706 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2707 Label* map_check) { 2707 Label* map_check) {
2708 { 2708 {
2709 PushSafepointRegistersScope scope(this); 2709 PushSafepointRegistersScope scope(this);
2710 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>( 2710 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>(
2711 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck); 2711 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
2712 InstanceofStub stub(flags); 2712 InstanceofStub stub(isolate(), flags);
2713 2713
2714 __ Push(ToRegister(instr->value())); 2714 __ Push(ToRegister(instr->value()));
2715 __ Push(instr->function()); 2715 __ Push(instr->function());
2716 2716
2717 static const int kAdditionalDelta = 10; 2717 static const int kAdditionalDelta = 10;
2718 int delta = 2718 int delta =
2719 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; 2719 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
2720 ASSERT(delta >= 0); 2720 ASSERT(delta >= 0);
2721 __ PushImm32(delta); 2721 __ PushImm32(delta);
2722 2722
(...skipping 1072 matching lines...) Expand 10 before | Expand all | Expand 10 after
3795 3795
3796 Register exponent = rdx; 3796 Register exponent = rdx;
3797 ASSERT(!instr->right()->IsRegister() || 3797 ASSERT(!instr->right()->IsRegister() ||
3798 ToRegister(instr->right()).is(exponent)); 3798 ToRegister(instr->right()).is(exponent));
3799 ASSERT(!instr->right()->IsDoubleRegister() || 3799 ASSERT(!instr->right()->IsDoubleRegister() ||
3800 ToDoubleRegister(instr->right()).is(xmm1)); 3800 ToDoubleRegister(instr->right()).is(xmm1));
3801 ASSERT(ToDoubleRegister(instr->left()).is(xmm2)); 3801 ASSERT(ToDoubleRegister(instr->left()).is(xmm2));
3802 ASSERT(ToDoubleRegister(instr->result()).is(xmm3)); 3802 ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
3803 3803
3804 if (exponent_type.IsSmi()) { 3804 if (exponent_type.IsSmi()) {
3805 MathPowStub stub(MathPowStub::TAGGED); 3805 MathPowStub stub(isolate(), MathPowStub::TAGGED);
3806 __ CallStub(&stub); 3806 __ CallStub(&stub);
3807 } else if (exponent_type.IsTagged()) { 3807 } else if (exponent_type.IsTagged()) {
3808 Label no_deopt; 3808 Label no_deopt;
3809 __ JumpIfSmi(exponent, &no_deopt, Label::kNear); 3809 __ JumpIfSmi(exponent, &no_deopt, Label::kNear);
3810 __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx); 3810 __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx);
3811 DeoptimizeIf(not_equal, instr->environment()); 3811 DeoptimizeIf(not_equal, instr->environment());
3812 __ bind(&no_deopt); 3812 __ bind(&no_deopt);
3813 MathPowStub stub(MathPowStub::TAGGED); 3813 MathPowStub stub(isolate(), MathPowStub::TAGGED);
3814 __ CallStub(&stub); 3814 __ CallStub(&stub);
3815 } else if (exponent_type.IsInteger32()) { 3815 } else if (exponent_type.IsInteger32()) {
3816 MathPowStub stub(MathPowStub::INTEGER); 3816 MathPowStub stub(isolate(), MathPowStub::INTEGER);
3817 __ CallStub(&stub); 3817 __ CallStub(&stub);
3818 } else { 3818 } else {
3819 ASSERT(exponent_type.IsDouble()); 3819 ASSERT(exponent_type.IsDouble());
3820 MathPowStub stub(MathPowStub::DOUBLE); 3820 MathPowStub stub(isolate(), MathPowStub::DOUBLE);
3821 __ CallStub(&stub); 3821 __ CallStub(&stub);
3822 } 3822 }
3823 } 3823 }
3824 3824
3825 3825
3826 void LCodeGen::DoMathExp(LMathExp* instr) { 3826 void LCodeGen::DoMathExp(LMathExp* instr) {
3827 XMMRegister input = ToDoubleRegister(instr->value()); 3827 XMMRegister input = ToDoubleRegister(instr->value());
3828 XMMRegister result = ToDoubleRegister(instr->result()); 3828 XMMRegister result = ToDoubleRegister(instr->result());
3829 XMMRegister temp0 = double_scratch0(); 3829 XMMRegister temp0 = double_scratch0();
3830 Register temp1 = ToRegister(instr->temp1()); 3830 Register temp1 = ToRegister(instr->temp1());
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
3901 } 3901 }
3902 } 3902 }
3903 3903
3904 3904
3905 void LCodeGen::DoCallFunction(LCallFunction* instr) { 3905 void LCodeGen::DoCallFunction(LCallFunction* instr) {
3906 ASSERT(ToRegister(instr->context()).is(rsi)); 3906 ASSERT(ToRegister(instr->context()).is(rsi));
3907 ASSERT(ToRegister(instr->function()).is(rdi)); 3907 ASSERT(ToRegister(instr->function()).is(rdi));
3908 ASSERT(ToRegister(instr->result()).is(rax)); 3908 ASSERT(ToRegister(instr->result()).is(rax));
3909 3909
3910 int arity = instr->arity(); 3910 int arity = instr->arity();
3911 CallFunctionStub stub(arity, instr->hydrogen()->function_flags()); 3911 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
3912 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 3912 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3913 } 3913 }
3914 3914
3915 3915
3916 void LCodeGen::DoCallNew(LCallNew* instr) { 3916 void LCodeGen::DoCallNew(LCallNew* instr) {
3917 ASSERT(ToRegister(instr->context()).is(rsi)); 3917 ASSERT(ToRegister(instr->context()).is(rsi));
3918 ASSERT(ToRegister(instr->constructor()).is(rdi)); 3918 ASSERT(ToRegister(instr->constructor()).is(rdi));
3919 ASSERT(ToRegister(instr->result()).is(rax)); 3919 ASSERT(ToRegister(instr->result()).is(rax));
3920 3920
3921 __ Set(rax, instr->arity()); 3921 __ Set(rax, instr->arity());
3922 // No cell in ebx for construct type feedback in optimized code 3922 // No cell in ebx for construct type feedback in optimized code
3923 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); 3923 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
3924 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); 3924 CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
3925 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3925 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3926 } 3926 }
3927 3927
3928 3928
3929 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { 3929 void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
3930 ASSERT(ToRegister(instr->context()).is(rsi)); 3930 ASSERT(ToRegister(instr->context()).is(rsi));
3931 ASSERT(ToRegister(instr->constructor()).is(rdi)); 3931 ASSERT(ToRegister(instr->constructor()).is(rdi));
3932 ASSERT(ToRegister(instr->result()).is(rax)); 3932 ASSERT(ToRegister(instr->result()).is(rax));
3933 3933
3934 __ Set(rax, instr->arity()); 3934 __ Set(rax, instr->arity());
3935 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); 3935 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
3936 ElementsKind kind = instr->hydrogen()->elements_kind(); 3936 ElementsKind kind = instr->hydrogen()->elements_kind();
3937 AllocationSiteOverrideMode override_mode = 3937 AllocationSiteOverrideMode override_mode =
3938 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) 3938 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE)
3939 ? DISABLE_ALLOCATION_SITES 3939 ? DISABLE_ALLOCATION_SITES
3940 : DONT_OVERRIDE; 3940 : DONT_OVERRIDE;
3941 3941
3942 if (instr->arity() == 0) { 3942 if (instr->arity() == 0) {
3943 ArrayNoArgumentConstructorStub stub(kind, override_mode); 3943 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
3944 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3944 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3945 } else if (instr->arity() == 1) { 3945 } else if (instr->arity() == 1) {
3946 Label done; 3946 Label done;
3947 if (IsFastPackedElementsKind(kind)) { 3947 if (IsFastPackedElementsKind(kind)) {
3948 Label packed_case; 3948 Label packed_case;
3949 // We might need a change here 3949 // We might need a change here
3950 // look at the first argument 3950 // look at the first argument
3951 __ movp(rcx, Operand(rsp, 0)); 3951 __ movp(rcx, Operand(rsp, 0));
3952 __ testp(rcx, rcx); 3952 __ testp(rcx, rcx);
3953 __ j(zero, &packed_case, Label::kNear); 3953 __ j(zero, &packed_case, Label::kNear);
3954 3954
3955 ElementsKind holey_kind = GetHoleyElementsKind(kind); 3955 ElementsKind holey_kind = GetHoleyElementsKind(kind);
3956 ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); 3956 ArraySingleArgumentConstructorStub stub(isolate(),
3957 holey_kind,
3958 override_mode);
3957 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3959 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3958 __ jmp(&done, Label::kNear); 3960 __ jmp(&done, Label::kNear);
3959 __ bind(&packed_case); 3961 __ bind(&packed_case);
3960 } 3962 }
3961 3963
3962 ArraySingleArgumentConstructorStub stub(kind, override_mode); 3964 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
3963 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3965 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3964 __ bind(&done); 3966 __ bind(&done);
3965 } else { 3967 } else {
3966 ArrayNArgumentsConstructorStub stub(kind, override_mode); 3968 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
3967 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3969 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3968 } 3970 }
3969 } 3971 }
3970 3972
3971 3973
3972 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 3974 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3973 ASSERT(ToRegister(instr->context()).is(rsi)); 3975 ASSERT(ToRegister(instr->context()).is(rsi));
3974 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles()); 3976 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles());
3975 } 3977 }
3976 3978
(...skipping 427 matching lines...) Expand 10 before | Expand all | Expand 10 after
4404 // Write barrier. 4406 // Write barrier.
4405 ASSERT_NE(instr->temp(), NULL); 4407 ASSERT_NE(instr->temp(), NULL);
4406 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, 4408 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
4407 ToRegister(instr->temp()), kDontSaveFPRegs); 4409 ToRegister(instr->temp()), kDontSaveFPRegs);
4408 } else { 4410 } else {
4409 ASSERT(object_reg.is(rax)); 4411 ASSERT(object_reg.is(rax));
4410 ASSERT(ToRegister(instr->context()).is(rsi)); 4412 ASSERT(ToRegister(instr->context()).is(rsi));
4411 PushSafepointRegistersScope scope(this); 4413 PushSafepointRegistersScope scope(this);
4412 __ Move(rbx, to_map); 4414 __ Move(rbx, to_map);
4413 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; 4415 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
4414 TransitionElementsKindStub stub(from_kind, to_kind, is_js_array); 4416 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
4415 __ CallStub(&stub); 4417 __ CallStub(&stub);
4416 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); 4418 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
4417 } 4419 }
4418 __ bind(&not_applicable); 4420 __ bind(&not_applicable);
4419 } 4421 }
4420 4422
4421 4423
4422 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { 4424 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4423 Register object = ToRegister(instr->object()); 4425 Register object = ToRegister(instr->object());
4424 Register temp = ToRegister(instr->temp()); 4426 Register temp = ToRegister(instr->temp());
4425 Label no_memento_found; 4427 Label no_memento_found;
4426 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); 4428 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found);
4427 DeoptimizeIf(equal, instr->environment()); 4429 DeoptimizeIf(equal, instr->environment());
4428 __ bind(&no_memento_found); 4430 __ bind(&no_memento_found);
4429 } 4431 }
4430 4432
4431 4433
4432 void LCodeGen::DoStringAdd(LStringAdd* instr) { 4434 void LCodeGen::DoStringAdd(LStringAdd* instr) {
4433 ASSERT(ToRegister(instr->context()).is(rsi)); 4435 ASSERT(ToRegister(instr->context()).is(rsi));
4434 ASSERT(ToRegister(instr->left()).is(rdx)); 4436 ASSERT(ToRegister(instr->left()).is(rdx));
4435 ASSERT(ToRegister(instr->right()).is(rax)); 4437 ASSERT(ToRegister(instr->right()).is(rax));
4436 StringAddStub stub(instr->hydrogen()->flags(), 4438 StringAddStub stub(isolate(),
4439 instr->hydrogen()->flags(),
4437 instr->hydrogen()->pretenure_flag()); 4440 instr->hydrogen()->pretenure_flag());
4438 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4441 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
4439 } 4442 }
4440 4443
4441 4444
4442 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 4445 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4443 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { 4446 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode {
4444 public: 4447 public:
4445 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) 4448 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
4446 : LDeferredCode(codegen), instr_(instr) { } 4449 : LDeferredCode(codegen), instr_(instr) { }
(...skipping 860 matching lines...) Expand 10 before | Expand all | Expand 10 after
5307 } 5310 }
5308 } 5311 }
5309 5312
5310 5313
5311 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 5314 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5312 ASSERT(ToRegister(instr->context()).is(rsi)); 5315 ASSERT(ToRegister(instr->context()).is(rsi));
5313 // Use the fast case closure allocation code that allocates in new 5316 // Use the fast case closure allocation code that allocates in new
5314 // space for nested functions that don't need literals cloning. 5317 // space for nested functions that don't need literals cloning.
5315 bool pretenure = instr->hydrogen()->pretenure(); 5318 bool pretenure = instr->hydrogen()->pretenure();
5316 if (!pretenure && instr->hydrogen()->has_no_literals()) { 5319 if (!pretenure && instr->hydrogen()->has_no_literals()) {
5317 FastNewClosureStub stub(instr->hydrogen()->strict_mode(), 5320 FastNewClosureStub stub(isolate(),
5321 instr->hydrogen()->strict_mode(),
5318 instr->hydrogen()->is_generator()); 5322 instr->hydrogen()->is_generator());
5319 __ Move(rbx, instr->hydrogen()->shared_info()); 5323 __ Move(rbx, instr->hydrogen()->shared_info());
5320 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 5324 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5321 } else { 5325 } else {
5322 __ Push(rsi); 5326 __ Push(rsi);
5323 __ Push(instr->hydrogen()->shared_info()); 5327 __ Push(instr->hydrogen()->shared_info());
5324 __ PushRoot(pretenure ? Heap::kTrueValueRootIndex : 5328 __ PushRoot(pretenure ? Heap::kTrueValueRootIndex :
5325 Heap::kFalseValueRootIndex); 5329 Heap::kFalseValueRootIndex);
5326 CallRuntime(Runtime::kHiddenNewClosure, 3, instr); 5330 CallRuntime(Runtime::kHiddenNewClosure, 3, instr);
5327 } 5331 }
(...skipping 395 matching lines...) Expand 10 before | Expand all | Expand 10 after
5723 __ bind(deferred->exit()); 5727 __ bind(deferred->exit());
5724 __ bind(&done); 5728 __ bind(&done);
5725 } 5729 }
5726 5730
5727 5731
5728 #undef __ 5732 #undef __
5729 5733
5730 } } // namespace v8::internal 5734 } } // namespace v8::internal
5731 5735
5732 #endif // V8_TARGET_ARCH_X64 5736 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/full-codegen-x64.cc ('k') | src/x64/macro-assembler-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698