Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 157503002: A64: Synchronize with r18444. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
173 Isolate* isolate, 173 Isolate* isolate,
174 CodeStubInterfaceDescriptor* descriptor) { 174 CodeStubInterfaceDescriptor* descriptor) {
175 static Register registers[] = { rax, rbx }; 175 static Register registers[] = { rax, rbx };
176 descriptor->register_param_count_ = 2; 176 descriptor->register_param_count_ = 2;
177 descriptor->register_params_ = registers; 177 descriptor->register_params_ = registers;
178 descriptor->deoptimization_handler_ = 178 descriptor->deoptimization_handler_ =
179 Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry; 179 Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry;
180 } 180 }
181 181
182 182
183 void BinaryOpICStub::InitializeInterfaceDescriptor(
184 Isolate* isolate,
185 CodeStubInterfaceDescriptor* descriptor) {
186 static Register registers[] = { rdx, rax };
187 descriptor->register_param_count_ = 2;
188 descriptor->register_params_ = registers;
189 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
190 descriptor->SetMissHandler(
191 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
192 }
193
194
195 static void InitializeArrayConstructorDescriptor( 183 static void InitializeArrayConstructorDescriptor(
196 Isolate* isolate, 184 Isolate* isolate,
197 CodeStubInterfaceDescriptor* descriptor, 185 CodeStubInterfaceDescriptor* descriptor,
198 int constant_stack_parameter_count) { 186 int constant_stack_parameter_count) {
199 // register state 187 // register state
200 // rax -- number of arguments 188 // rax -- number of arguments
201 // rdi -- function 189 // rdi -- function
202 // rbx -- type info cell with elements kind 190 // rbx -- type info cell with elements kind
203 static Register registers_variable_args[] = { rdi, rbx, rax }; 191 static Register registers_variable_args[] = { rdi, rbx, rax };
204 static Register registers_no_args[] = { rdi, rbx }; 192 static Register registers_no_args[] = { rdi, rbx };
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
332 Isolate* isolate, 320 Isolate* isolate,
333 CodeStubInterfaceDescriptor* descriptor) { 321 CodeStubInterfaceDescriptor* descriptor) {
334 static Register registers[] = { rax, rbx, rcx, rdx }; 322 static Register registers[] = { rax, rbx, rcx, rdx };
335 descriptor->register_param_count_ = 4; 323 descriptor->register_param_count_ = 4;
336 descriptor->register_params_ = registers; 324 descriptor->register_params_ = registers;
337 descriptor->deoptimization_handler_ = 325 descriptor->deoptimization_handler_ =
338 FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss); 326 FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
339 } 327 }
340 328
341 329
330 void BinaryOpICStub::InitializeInterfaceDescriptor(
331 Isolate* isolate,
332 CodeStubInterfaceDescriptor* descriptor) {
333 static Register registers[] = { rdx, rax };
334 descriptor->register_param_count_ = 2;
335 descriptor->register_params_ = registers;
336 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
337 descriptor->SetMissHandler(
338 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
339 }
340
341
342 void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor(
343 Isolate* isolate,
344 CodeStubInterfaceDescriptor* descriptor) {
345 static Register registers[] = { rcx, rdx, rax };
346 descriptor->register_param_count_ = 3;
347 descriptor->register_params_ = registers;
348 descriptor->deoptimization_handler_ =
349 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite);
350 }
351
352
342 void NewStringAddStub::InitializeInterfaceDescriptor( 353 void NewStringAddStub::InitializeInterfaceDescriptor(
343 Isolate* isolate, 354 Isolate* isolate,
344 CodeStubInterfaceDescriptor* descriptor) { 355 CodeStubInterfaceDescriptor* descriptor) {
345 static Register registers[] = { rdx, rax }; 356 static Register registers[] = { rdx, rax };
346 descriptor->register_param_count_ = 2; 357 descriptor->register_param_count_ = 2;
347 descriptor->register_params_ = registers; 358 descriptor->register_params_ = registers;
348 descriptor->deoptimization_handler_ = 359 descriptor->deoptimization_handler_ =
349 Runtime::FunctionForId(Runtime::kStringAdd)->entry; 360 Runtime::FunctionForId(Runtime::kStringAdd)->entry;
350 } 361 }
351 362
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after
590 if (!final_result_reg.is(result_reg)) { 601 if (!final_result_reg.is(result_reg)) {
591 ASSERT(final_result_reg.is(rcx)); 602 ASSERT(final_result_reg.is(rcx));
592 __ movl(final_result_reg, result_reg); 603 __ movl(final_result_reg, result_reg);
593 } 604 }
594 __ pop(save_reg); 605 __ pop(save_reg);
595 __ pop(scratch1); 606 __ pop(scratch1);
596 __ ret(0); 607 __ ret(0);
597 } 608 }
598 609
599 610
600 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
601 // TAGGED case:
602 // Input:
603 // rsp[8] : argument (should be number).
604 // rsp[0] : return address.
605 // Output:
606 // rax: tagged double result.
607 // UNTAGGED case:
608 // Input::
609 // rsp[0] : return address.
610 // xmm1 : untagged double input argument
611 // Output:
612 // xmm1 : untagged double result.
613
614 Label runtime_call;
615 Label runtime_call_clear_stack;
616 Label skip_cache;
617 const bool tagged = (argument_type_ == TAGGED);
618 if (tagged) {
619 Label input_not_smi, loaded;
620
621 // Test that rax is a number.
622 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
623 __ movq(rax, args.GetArgumentOperand(0));
624 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear);
625 // Input is a smi. Untag and load it onto the FPU stack.
626 // Then load the bits of the double into rbx.
627 __ SmiToInteger32(rax, rax);
628 __ subq(rsp, Immediate(kDoubleSize));
629 __ Cvtlsi2sd(xmm1, rax);
630 __ movsd(Operand(rsp, 0), xmm1);
631 __ movq(rbx, xmm1);
632 __ movq(rdx, xmm1);
633 __ fld_d(Operand(rsp, 0));
634 __ addq(rsp, Immediate(kDoubleSize));
635 __ jmp(&loaded, Label::kNear);
636
637 __ bind(&input_not_smi);
638 // Check if input is a HeapNumber.
639 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex);
640 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
641 __ j(not_equal, &runtime_call);
642 // Input is a HeapNumber. Push it on the FPU stack and load its
643 // bits into rbx.
644 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
645 __ MoveDouble(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
646 __ movq(rdx, rbx);
647
648 __ bind(&loaded);
649 } else { // UNTAGGED.
650 __ movq(rbx, xmm1);
651 __ movq(rdx, xmm1);
652 }
653
654 // ST[0] == double value, if TAGGED.
655 // rbx = bits of double value.
656 // rdx = also bits of double value.
657 // Compute hash (h is 32 bits, bits are 64 and the shifts are arithmetic):
658 // h = h0 = bits ^ (bits >> 32);
659 // h ^= h >> 16;
660 // h ^= h >> 8;
661 // h = h & (cacheSize - 1);
662 // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1)
663 __ sar(rdx, Immediate(32));
664 __ xorl(rdx, rbx);
665 __ movl(rcx, rdx);
666 __ movl(rax, rdx);
667 __ movl(rdi, rdx);
668 __ sarl(rdx, Immediate(8));
669 __ sarl(rcx, Immediate(16));
670 __ sarl(rax, Immediate(24));
671 __ xorl(rcx, rdx);
672 __ xorl(rax, rdi);
673 __ xorl(rcx, rax);
674 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
675 __ andl(rcx, Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
676
677 // ST[0] == double value.
678 // rbx = bits of double value.
679 // rcx = TranscendentalCache::hash(double value).
680 ExternalReference cache_array =
681 ExternalReference::transcendental_cache_array_address(masm->isolate());
682 __ Move(rax, cache_array);
683 int cache_array_index =
684 type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
685 __ movq(rax, Operand(rax, cache_array_index));
686 // rax points to the cache for the type type_.
687 // If NULL, the cache hasn't been initialized yet, so go through runtime.
688 __ testq(rax, rax);
689 __ j(zero, &runtime_call_clear_stack); // Only clears stack if TAGGED.
690 #ifdef DEBUG
691 // Check that the layout of cache elements match expectations.
692 { // NOLINT - doesn't like a single brace on a line.
693 TranscendentalCache::SubCache::Element test_elem[2];
694 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
695 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
696 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
697 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
698 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
699 // Two uint_32's and a pointer per element.
700 CHECK_EQ(2 * kIntSize + 1 * kPointerSize,
701 static_cast<int>(elem2_start - elem_start));
702 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
703 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start));
704 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start));
705 }
706 #endif
707 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16].
708 __ addl(rcx, rcx);
709 __ lea(rcx, Operand(rax, rcx, times_8, 0));
710 // Check if cache matches: Double value is stored in uint32_t[2] array.
711 Label cache_miss;
712 __ cmpq(rbx, Operand(rcx, 0));
713 __ j(not_equal, &cache_miss, Label::kNear);
714 // Cache hit!
715 Counters* counters = masm->isolate()->counters();
716 __ IncrementCounter(counters->transcendental_cache_hit(), 1);
717 __ movq(rax, Operand(rcx, 2 * kIntSize));
718 if (tagged) {
719 __ fstp(0); // Clear FPU stack.
720 __ ret(kPointerSize);
721 } else { // UNTAGGED.
722 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
723 __ Ret();
724 }
725
726 __ bind(&cache_miss);
727 __ IncrementCounter(counters->transcendental_cache_miss(), 1);
728 // Update cache with new value.
729 if (tagged) {
730 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
731 } else { // UNTAGGED.
732 __ AllocateHeapNumber(rax, rdi, &skip_cache);
733 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
734 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
735 }
736 GenerateOperation(masm, type_);
737 __ movq(Operand(rcx, 0), rbx);
738 __ movq(Operand(rcx, 2 * kIntSize), rax);
739 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
740 if (tagged) {
741 __ ret(kPointerSize);
742 } else { // UNTAGGED.
743 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
744 __ Ret();
745
746 // Skip cache and return answer directly, only in untagged case.
747 __ bind(&skip_cache);
748 __ subq(rsp, Immediate(kDoubleSize));
749 __ movsd(Operand(rsp, 0), xmm1);
750 __ fld_d(Operand(rsp, 0));
751 GenerateOperation(masm, type_);
752 __ fstp_d(Operand(rsp, 0));
753 __ movsd(xmm1, Operand(rsp, 0));
754 __ addq(rsp, Immediate(kDoubleSize));
755 // We return the value in xmm1 without adding it to the cache, but
756 // we cause a scavenging GC so that future allocations will succeed.
757 {
758 FrameScope scope(masm, StackFrame::INTERNAL);
759 // Allocate an unused object bigger than a HeapNumber.
760 __ Push(Smi::FromInt(2 * kDoubleSize));
761 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
762 }
763 __ Ret();
764 }
765
766 // Call runtime, doing whatever allocation and cleanup is necessary.
767 if (tagged) {
768 __ bind(&runtime_call_clear_stack);
769 __ fstp(0);
770 __ bind(&runtime_call);
771 __ TailCallExternalReference(
772 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1);
773 } else { // UNTAGGED.
774 __ bind(&runtime_call_clear_stack);
775 __ bind(&runtime_call);
776 __ AllocateHeapNumber(rax, rdi, &skip_cache);
777 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
778 {
779 FrameScope scope(masm, StackFrame::INTERNAL);
780 __ push(rax);
781 __ CallRuntime(RuntimeFunction(), 1);
782 }
783 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
784 __ Ret();
785 }
786 }
787
788
789 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
790 switch (type_) {
791 // Add more cases when necessary.
792 case TranscendentalCache::LOG: return Runtime::kMath_log;
793 default:
794 UNIMPLEMENTED();
795 return Runtime::kAbort;
796 }
797 }
798
799
800 void TranscendentalCacheStub::GenerateOperation(
801 MacroAssembler* masm, TranscendentalCache::Type type) {
802 // Registers:
803 // rax: Newly allocated HeapNumber, which must be preserved.
804 // rbx: Bits of input double. Must be preserved.
805 // rcx: Pointer to cache entry. Must be preserved.
806 // st(0): Input double
807 ASSERT(type == TranscendentalCache::LOG);
808 __ fldln2();
809 __ fxch();
810 __ fyl2x();
811 }
812
813
814 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm, 611 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
815 Label* not_numbers) { 612 Label* not_numbers) {
816 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done; 613 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
817 // Load operand in rdx into xmm0, or branch to not_numbers. 614 // Load operand in rdx into xmm0, or branch to not_numbers.
818 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); 615 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
819 __ JumpIfSmi(rdx, &load_smi_rdx); 616 __ JumpIfSmi(rdx, &load_smi_rdx);
820 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx); 617 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
821 __ j(not_equal, not_numbers); // Argument in rdx is not a number. 618 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
822 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 619 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
823 // Load operand in rax into xmm1, or branch to not_numbers. 620 // Load operand in rax into xmm1, or branch to not_numbers.
(...skipping 1866 matching lines...) Expand 10 before | Expand all | Expand 10 after
2690 2487
2691 2488
2692 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2489 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2693 CEntryStub::GenerateAheadOfTime(isolate); 2490 CEntryStub::GenerateAheadOfTime(isolate);
2694 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2491 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2695 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2492 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2696 // It is important that the store buffer overflow stubs are generated first. 2493 // It is important that the store buffer overflow stubs are generated first.
2697 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 2494 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2698 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); 2495 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2699 BinaryOpICStub::GenerateAheadOfTime(isolate); 2496 BinaryOpICStub::GenerateAheadOfTime(isolate);
2497 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2700 } 2498 }
2701 2499
2702 2500
2703 void CodeStub::GenerateFPStubs(Isolate* isolate) { 2501 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2704 } 2502 }
2705 2503
2706 2504
2707 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 2505 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2708 CEntryStub stub(1, kDontSaveFPRegs); 2506 CEntryStub stub(1, kDontSaveFPRegs);
2709 stub.GetCode(isolate); 2507 stub.GetCode(isolate);
(...skipping 1618 matching lines...) Expand 10 before | Expand all | Expand 10 after
4328 const Register min_length = scratch1; 4126 const Register min_length = scratch1;
4329 4127
4330 Label compare_lengths; 4128 Label compare_lengths;
4331 // If min-length is zero, go directly to comparing lengths. 4129 // If min-length is zero, go directly to comparing lengths.
4332 __ SmiTest(min_length); 4130 __ SmiTest(min_length);
4333 __ j(zero, &compare_lengths, Label::kNear); 4131 __ j(zero, &compare_lengths, Label::kNear);
4334 4132
4335 // Compare loop. 4133 // Compare loop.
4336 Label result_not_equal; 4134 Label result_not_equal;
4337 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2, 4135 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
4338 &result_not_equal, Label::kNear); 4136 &result_not_equal,
4137 // In debug-code mode, SmiTest below might push
4138 // the target label outside the near range.
4139 Label::kFar);
4339 4140
4340 // Completed loop without finding different characters. 4141 // Completed loop without finding different characters.
4341 // Compare lengths (precomputed). 4142 // Compare lengths (precomputed).
4342 __ bind(&compare_lengths); 4143 __ bind(&compare_lengths);
4343 __ SmiTest(length_difference); 4144 __ SmiTest(length_difference);
4344 Label length_not_equal; 4145 Label length_not_equal;
4345 __ j(not_zero, &length_not_equal, Label::kNear); 4146 __ j(not_zero, &length_not_equal, Label::kNear);
4346 4147
4347 // Result is EQUAL. 4148 // Result is EQUAL.
4348 __ Move(rax, Smi::FromInt(EQUAL)); 4149 __ Move(rax, Smi::FromInt(EQUAL));
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
4433 __ PushReturnAddressFrom(rcx); 4234 __ PushReturnAddressFrom(rcx);
4434 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); 4235 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
4435 4236
4436 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 4237 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4437 // tagged as a small integer. 4238 // tagged as a small integer.
4438 __ bind(&runtime); 4239 __ bind(&runtime);
4439 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 4240 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4440 } 4241 }
4441 4242
4442 4243
4244 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
4245 // ----------- S t a t e -------------
4246 // -- rdx : left
4247 // -- rax : right
4248 // -- rsp[0] : return address
4249 // -----------------------------------
4250 Isolate* isolate = masm->isolate();
4251
4252 // Load rcx with the allocation site. We stick an undefined dummy value here
4253 // and replace it with the real allocation site later when we instantiate this
4254 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
4255 __ Move(rcx, handle(isolate->heap()->undefined_value()));
4256
4257 // Make sure that we actually patched the allocation site.
4258 if (FLAG_debug_code) {
4259 __ testb(rcx, Immediate(kSmiTagMask));
4260 __ Assert(zero, kExpectedAllocationSite);
4261 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
4262 isolate->factory()->allocation_site_map());
4263 __ Assert(equal, kExpectedAllocationSite);
4264 }
4265
4266 // Tail call into the stub that handles binary operations with allocation
4267 // sites.
4268 BinaryOpWithAllocationSiteStub stub(state_);
4269 __ TailCallStub(&stub);
4270 }
4271
4272
4443 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { 4273 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
4444 ASSERT(state_ == CompareIC::SMI); 4274 ASSERT(state_ == CompareIC::SMI);
4445 Label miss; 4275 Label miss;
4446 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); 4276 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
4447 4277
4448 if (GetCondition() == equal) { 4278 if (GetCondition() == equal) {
4449 // For equality we do not care about the sign of the result. 4279 // For equality we do not care about the sign of the result.
4450 __ subq(rax, rdx); 4280 __ subq(rax, rdx);
4451 } else { 4281 } else {
4452 Label done; 4282 Label done;
(...skipping 1238 matching lines...) Expand 10 before | Expand all | Expand 10 after
5691 __ bind(&fast_elements_case); 5521 __ bind(&fast_elements_case);
5692 GenerateCase(masm, FAST_ELEMENTS); 5522 GenerateCase(masm, FAST_ELEMENTS);
5693 } 5523 }
5694 5524
5695 5525
5696 #undef __ 5526 #undef __
5697 5527
5698 } } // namespace v8::internal 5528 } } // namespace v8::internal
5699 5529
5700 #endif // V8_TARGET_ARCH_X64 5530 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698