Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(55)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 9227007: Version 3.8.6 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 8 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 1736 matching lines...) Expand 10 before | Expand all | Expand 10 after
1748 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1748 int true_block = chunk_->LookupDestination(instr->true_block_id());
1749 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1749 int false_block = chunk_->LookupDestination(instr->false_block_id());
1750 1750
1751 __ testl(FieldOperand(input, String::kHashFieldOffset), 1751 __ testl(FieldOperand(input, String::kHashFieldOffset),
1752 Immediate(String::kContainsCachedArrayIndexMask)); 1752 Immediate(String::kContainsCachedArrayIndexMask));
1753 EmitBranch(true_block, false_block, equal); 1753 EmitBranch(true_block, false_block, equal);
1754 } 1754 }
1755 1755
1756 1756
1757 // Branches to a label or falls through with the answer in the z flag. 1757 // Branches to a label or falls through with the answer in the z flag.
1758 // Trashes the temp register and possibly input (if it and temp are aliased). 1758 // Trashes the temp register.
1759 void LCodeGen::EmitClassOfTest(Label* is_true, 1759 void LCodeGen::EmitClassOfTest(Label* is_true,
1760 Label* is_false, 1760 Label* is_false,
1761 Handle<String> class_name, 1761 Handle<String> class_name,
1762 Register input, 1762 Register input,
1763 Register temp, 1763 Register temp,
1764 Register scratch) { 1764 Register temp2) {
1765 ASSERT(!input.is(temp));
1766 ASSERT(!input.is(temp2));
1767 ASSERT(!temp.is(temp2));
1768
1765 __ JumpIfSmi(input, is_false); 1769 __ JumpIfSmi(input, is_false);
1766 1770
1767 if (class_name->IsEqualTo(CStrVector("Function"))) { 1771 if (class_name->IsEqualTo(CStrVector("Function"))) {
1768 // Assuming the following assertions, we can use the same compares to test 1772 // Assuming the following assertions, we can use the same compares to test
1769 // for both being a function type and being in the object type range. 1773 // for both being a function type and being in the object type range.
1770 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 1774 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
1771 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 1775 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
1772 FIRST_SPEC_OBJECT_TYPE + 1); 1776 FIRST_SPEC_OBJECT_TYPE + 1);
1773 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 1777 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
1774 LAST_SPEC_OBJECT_TYPE - 1); 1778 LAST_SPEC_OBJECT_TYPE - 1);
1775 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1779 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1776 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp); 1780 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
1777 __ j(below, is_false); 1781 __ j(below, is_false);
1778 __ j(equal, is_true); 1782 __ j(equal, is_true);
1779 __ CmpInstanceType(temp, LAST_SPEC_OBJECT_TYPE); 1783 __ CmpInstanceType(temp, LAST_SPEC_OBJECT_TYPE);
1780 __ j(equal, is_true); 1784 __ j(equal, is_true);
1781 } else { 1785 } else {
1782 // Faster code path to avoid two compares: subtract lower bound from the 1786 // Faster code path to avoid two compares: subtract lower bound from the
1783 // actual type and do a signed compare with the width of the type range. 1787 // actual type and do a signed compare with the width of the type range.
1784 __ movq(temp, FieldOperand(input, HeapObject::kMapOffset)); 1788 __ movq(temp, FieldOperand(input, HeapObject::kMapOffset));
1785 __ movq(scratch, FieldOperand(temp, Map::kInstanceTypeOffset)); 1789 __ movq(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
1786 __ subb(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 1790 __ subb(temp2, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
1787 __ cmpb(scratch, 1791 __ cmpb(temp2,
1788 Immediate(static_cast<int8_t>(LAST_NONCALLABLE_SPEC_OBJECT_TYPE - 1792 Immediate(static_cast<int8_t>(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
1789 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE))); 1793 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)));
1790 __ j(above, is_false); 1794 __ j(above, is_false);
1791 } 1795 }
1792 1796
1793 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range. 1797 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range.
1794 // Check if the constructor in the map is a function. 1798 // Check if the constructor in the map is a function.
1795 __ movq(temp, FieldOperand(temp, Map::kConstructorOffset)); 1799 __ movq(temp, FieldOperand(temp, Map::kConstructorOffset));
1796 1800
1797 // Objects with a non-function constructor have class 'Object'. 1801 // Objects with a non-function constructor have class 'Object'.
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
1890 __ JumpIfSmi(object, &false_result); 1894 __ JumpIfSmi(object, &false_result);
1891 1895
1892 // This is the inlined call site instanceof cache. The two occurences of the 1896 // This is the inlined call site instanceof cache. The two occurences of the
1893 // hole value will be patched to the last map/result pair generated by the 1897 // hole value will be patched to the last map/result pair generated by the
1894 // instanceof stub. 1898 // instanceof stub.
1895 Label cache_miss; 1899 Label cache_miss;
1896 // Use a temp register to avoid memory operands with variable lengths. 1900 // Use a temp register to avoid memory operands with variable lengths.
1897 Register map = ToRegister(instr->TempAt(0)); 1901 Register map = ToRegister(instr->TempAt(0));
1898 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); 1902 __ movq(map, FieldOperand(object, HeapObject::kMapOffset));
1899 __ bind(deferred->map_check()); // Label for calculating code patching. 1903 __ bind(deferred->map_check()); // Label for calculating code patching.
1900 __ movq(kScratchRegister, factory()->the_hole_value(), 1904 Handle<JSGlobalPropertyCell> cache_cell =
1901 RelocInfo::EMBEDDED_OBJECT); 1905 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
1902 __ cmpq(map, kScratchRegister); // Patched to cached map. 1906 __ movq(kScratchRegister, cache_cell, RelocInfo::GLOBAL_PROPERTY_CELL);
1907 __ cmpq(map, Operand(kScratchRegister, 0));
1903 __ j(not_equal, &cache_miss, Label::kNear); 1908 __ j(not_equal, &cache_miss, Label::kNear);
1904 // Patched to load either true or false. 1909 // Patched to load either true or false.
1905 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); 1910 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
1906 #ifdef DEBUG 1911 #ifdef DEBUG
1907 // Check that the code size between patch label and patch sites is invariant. 1912 // Check that the code size between patch label and patch sites is invariant.
1908 Label end_of_patched_code; 1913 Label end_of_patched_code;
1909 __ bind(&end_of_patched_code); 1914 __ bind(&end_of_patched_code);
1910 ASSERT(true); 1915 ASSERT(true);
1911 #endif 1916 #endif
1912 __ jmp(&done); 1917 __ jmp(&done);
(...skipping 716 matching lines...) Expand 10 before | Expand all | Expand 10 after
2629 RecordPosition(pointers->position()); 2634 RecordPosition(pointers->position());
2630 2635
2631 // Invoke function. 2636 // Invoke function.
2632 __ SetCallKind(rcx, call_kind); 2637 __ SetCallKind(rcx, call_kind);
2633 if (*function == *info()->closure()) { 2638 if (*function == *info()->closure()) {
2634 __ CallSelf(); 2639 __ CallSelf();
2635 } else { 2640 } else {
2636 __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 2641 __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2637 } 2642 }
2638 2643
2639 // Setup deoptimization. 2644 // Set up deoptimization.
2640 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); 2645 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
2641 2646
2642 // Restore context. 2647 // Restore context.
2643 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2648 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2644 } 2649 }
2645 2650
2646 2651
2647 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 2652 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2648 ASSERT(ToRegister(instr->result()).is(rax)); 2653 ASSERT(ToRegister(instr->result()).is(rax));
2649 __ LoadHeapObject(rdi, instr->function()); 2654 __ LoadHeapObject(rdi, instr->function());
(...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after
2919 MathPowStub stub(MathPowStub::INTEGER); 2924 MathPowStub stub(MathPowStub::INTEGER);
2920 __ CallStub(&stub); 2925 __ CallStub(&stub);
2921 } else { 2926 } else {
2922 ASSERT(exponent_type.IsDouble()); 2927 ASSERT(exponent_type.IsDouble());
2923 MathPowStub stub(MathPowStub::DOUBLE); 2928 MathPowStub stub(MathPowStub::DOUBLE);
2924 __ CallStub(&stub); 2929 __ CallStub(&stub);
2925 } 2930 }
2926 } 2931 }
2927 2932
2928 2933
2934 void LCodeGen::DoRandom(LRandom* instr) {
2935 // Having marked this instruction as a call we can use any
2936 // registers.
2937 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2938
2939 // Choose the right register for the first argument depending on
2940 // calling convention.
2941 #ifdef _WIN64
2942 ASSERT(ToRegister(instr->InputAt(0)).is(rcx));
2943 Register global_object = rcx;
2944 #else
2945 ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
2946 Register global_object = rdi;
2947 #endif
2948
2949 __ PrepareCallCFunction(1);
2950 __ movq(global_object,
2951 FieldOperand(global_object, GlobalObject::kGlobalContextOffset));
2952 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2953
2954 // Convert 32 random bits in rax to 0.(32 random bits) in a double
2955 // by computing:
2956 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2957 __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2958 __ movd(xmm2, rcx);
2959 __ movd(xmm1, rax);
2960 __ cvtss2sd(xmm2, xmm2);
2961 __ xorps(xmm1, xmm2);
2962 __ subsd(xmm1, xmm2);
2963 }
2964
2965
2929 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { 2966 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2930 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2967 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2931 TranscendentalCacheStub stub(TranscendentalCache::LOG, 2968 TranscendentalCacheStub stub(TranscendentalCache::LOG,
2932 TranscendentalCacheStub::UNTAGGED); 2969 TranscendentalCacheStub::UNTAGGED);
2933 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 2970 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2934 } 2971 }
2935 2972
2936 2973
2937 void LCodeGen::DoMathTan(LUnaryMathOperation* instr) { 2974 void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
2938 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2975 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
(...skipping 564 matching lines...) Expand 10 before | Expand all | Expand 10 after
3503 Condition is_smi = __ CheckSmi(input); 3540 Condition is_smi = __ CheckSmi(input);
3504 DeoptimizeIf(NegateCondition(is_smi), instr->environment()); 3541 DeoptimizeIf(NegateCondition(is_smi), instr->environment());
3505 } 3542 }
3506 __ SmiToInteger32(input, input); 3543 __ SmiToInteger32(input, input);
3507 } 3544 }
3508 3545
3509 3546
3510 void LCodeGen::EmitNumberUntagD(Register input_reg, 3547 void LCodeGen::EmitNumberUntagD(Register input_reg,
3511 XMMRegister result_reg, 3548 XMMRegister result_reg,
3512 bool deoptimize_on_undefined, 3549 bool deoptimize_on_undefined,
3550 bool deoptimize_on_minus_zero,
3513 LEnvironment* env) { 3551 LEnvironment* env) {
3514 Label load_smi, done; 3552 Label load_smi, done;
3515 3553
3516 // Smi check. 3554 // Smi check.
3517 __ JumpIfSmi(input_reg, &load_smi, Label::kNear); 3555 __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
3518 3556
3519 // Heap number map check. 3557 // Heap number map check.
3520 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), 3558 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
3521 Heap::kHeapNumberMapRootIndex); 3559 Heap::kHeapNumberMapRootIndex);
3522 if (deoptimize_on_undefined) { 3560 if (deoptimize_on_undefined) {
3523 DeoptimizeIf(not_equal, env); 3561 DeoptimizeIf(not_equal, env);
3524 } else { 3562 } else {
3525 Label heap_number; 3563 Label heap_number;
3526 __ j(equal, &heap_number, Label::kNear); 3564 __ j(equal, &heap_number, Label::kNear);
3527 3565
3528 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex); 3566 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
3529 DeoptimizeIf(not_equal, env); 3567 DeoptimizeIf(not_equal, env);
3530 3568
3531 // Convert undefined to NaN. Compute NaN as 0/0. 3569 // Convert undefined to NaN. Compute NaN as 0/0.
3532 __ xorps(result_reg, result_reg); 3570 __ xorps(result_reg, result_reg);
3533 __ divsd(result_reg, result_reg); 3571 __ divsd(result_reg, result_reg);
3534 __ jmp(&done, Label::kNear); 3572 __ jmp(&done, Label::kNear);
3535 3573
3536 __ bind(&heap_number); 3574 __ bind(&heap_number);
3537 } 3575 }
3538 // Heap number to XMM conversion. 3576 // Heap number to XMM conversion.
3539 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); 3577 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3578 if (deoptimize_on_minus_zero) {
3579 XMMRegister xmm_scratch = xmm0;
3580 __ xorps(xmm_scratch, xmm_scratch);
3581 __ ucomisd(xmm_scratch, result_reg);
3582 __ j(not_equal, &done, Label::kNear);
3583 __ movmskpd(kScratchRegister, result_reg);
3584 __ testq(kScratchRegister, Immediate(1));
3585 DeoptimizeIf(not_zero, env);
3586 }
3540 __ jmp(&done, Label::kNear); 3587 __ jmp(&done, Label::kNear);
3541 3588
3542 // Smi to XMM conversion 3589 // Smi to XMM conversion
3543 __ bind(&load_smi); 3590 __ bind(&load_smi);
3544 __ SmiToInteger32(kScratchRegister, input_reg); 3591 __ SmiToInteger32(kScratchRegister, input_reg);
3545 __ cvtlsi2sd(result_reg, kScratchRegister); 3592 __ cvtlsi2sd(result_reg, kScratchRegister);
3546 __ bind(&done); 3593 __ bind(&done);
3547 } 3594 }
3548 3595
3549 3596
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
3621 LOperand* input = instr->InputAt(0); 3668 LOperand* input = instr->InputAt(0);
3622 ASSERT(input->IsRegister()); 3669 ASSERT(input->IsRegister());
3623 LOperand* result = instr->result(); 3670 LOperand* result = instr->result();
3624 ASSERT(result->IsDoubleRegister()); 3671 ASSERT(result->IsDoubleRegister());
3625 3672
3626 Register input_reg = ToRegister(input); 3673 Register input_reg = ToRegister(input);
3627 XMMRegister result_reg = ToDoubleRegister(result); 3674 XMMRegister result_reg = ToDoubleRegister(result);
3628 3675
3629 EmitNumberUntagD(input_reg, result_reg, 3676 EmitNumberUntagD(input_reg, result_reg,
3630 instr->hydrogen()->deoptimize_on_undefined(), 3677 instr->hydrogen()->deoptimize_on_undefined(),
3678 instr->hydrogen()->deoptimize_on_minus_zero(),
3631 instr->environment()); 3679 instr->environment());
3632 } 3680 }
3633 3681
3634 3682
3635 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { 3683 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
3636 LOperand* input = instr->InputAt(0); 3684 LOperand* input = instr->InputAt(0);
3637 ASSERT(input->IsDoubleRegister()); 3685 ASSERT(input->IsDoubleRegister());
3638 LOperand* result = instr->result(); 3686 LOperand* result = instr->result();
3639 ASSERT(result->IsRegister()); 3687 ASSERT(result->IsRegister());
3640 3688
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
3740 isolate()->factory()->NewJSGlobalPropertyCell(target); 3788 isolate()->factory()->NewJSGlobalPropertyCell(target);
3741 __ movq(kScratchRegister, cell, RelocInfo::GLOBAL_PROPERTY_CELL); 3789 __ movq(kScratchRegister, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
3742 __ cmpq(reg, Operand(kScratchRegister, 0)); 3790 __ cmpq(reg, Operand(kScratchRegister, 0));
3743 } else { 3791 } else {
3744 __ Cmp(reg, target); 3792 __ Cmp(reg, target);
3745 } 3793 }
3746 DeoptimizeIf(not_equal, instr->environment()); 3794 DeoptimizeIf(not_equal, instr->environment());
3747 } 3795 }
3748 3796
3749 3797
3798 void LCodeGen::DoCheckMapCommon(Register reg,
3799 Handle<Map> map,
3800 CompareMapMode mode,
3801 LEnvironment* env) {
3802 Label success;
3803 __ CompareMap(reg, map, &success, mode);
3804 DeoptimizeIf(not_equal, env);
3805 __ bind(&success);
3806 }
3807
3808
3750 void LCodeGen::DoCheckMap(LCheckMap* instr) { 3809 void LCodeGen::DoCheckMap(LCheckMap* instr) {
3751 LOperand* input = instr->InputAt(0); 3810 LOperand* input = instr->InputAt(0);
3752 ASSERT(input->IsRegister()); 3811 ASSERT(input->IsRegister());
3753 Register reg = ToRegister(input); 3812 Register reg = ToRegister(input);
3754 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), 3813 Handle<Map> map = instr->hydrogen()->map();
3755 instr->hydrogen()->map()); 3814 DoCheckMapCommon(reg, map, instr->hydrogen()->mode(), instr->environment());
3756 DeoptimizeIf(not_equal, instr->environment());
3757 } 3815 }
3758 3816
3759 3817
3760 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { 3818 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
3761 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); 3819 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
3762 Register result_reg = ToRegister(instr->result()); 3820 Register result_reg = ToRegister(instr->result());
3763 Register temp_reg = ToRegister(instr->TempAt(0)); 3821 Register temp_reg = ToRegister(instr->TempAt(0));
3764 __ ClampDoubleToUint8(value_reg, xmm0, result_reg, temp_reg); 3822 __ ClampDoubleToUint8(value_reg, xmm0, result_reg, temp_reg);
3765 } 3823 }
3766 3824
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
3812 Register reg = ToRegister(instr->TempAt(0)); 3870 Register reg = ToRegister(instr->TempAt(0));
3813 3871
3814 Handle<JSObject> holder = instr->holder(); 3872 Handle<JSObject> holder = instr->holder();
3815 Handle<JSObject> current_prototype = instr->prototype(); 3873 Handle<JSObject> current_prototype = instr->prototype();
3816 3874
3817 // Load prototype object. 3875 // Load prototype object.
3818 __ LoadHeapObject(reg, current_prototype); 3876 __ LoadHeapObject(reg, current_prototype);
3819 3877
3820 // Check prototype maps up to the holder. 3878 // Check prototype maps up to the holder.
3821 while (!current_prototype.is_identical_to(holder)) { 3879 while (!current_prototype.is_identical_to(holder)) {
3822 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), 3880 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
3823 Handle<Map>(current_prototype->map())); 3881 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
3824 DeoptimizeIf(not_equal, instr->environment());
3825 current_prototype = 3882 current_prototype =
3826 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); 3883 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3827 // Load next prototype object. 3884 // Load next prototype object.
3828 __ LoadHeapObject(reg, current_prototype); 3885 __ LoadHeapObject(reg, current_prototype);
3829 } 3886 }
3830 3887
3831 // Check the holder map. 3888 // Check the holder map.
3832 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), 3889 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
3833 Handle<Map>(current_prototype->map())); 3890 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
3834 DeoptimizeIf(not_equal, instr->environment());
3835 } 3891 }
3836 3892
3837 3893
3838 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { 3894 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
3839 Heap* heap = isolate()->heap(); 3895 Heap* heap = isolate()->heap();
3840 ElementsKind boilerplate_elements_kind = 3896 ElementsKind boilerplate_elements_kind =
3841 instr->hydrogen()->boilerplate_elements_kind(); 3897 instr->hydrogen()->boilerplate_elements_kind();
3842 3898
3843 // Deopt if the array literal boilerplate ElementsKind is of a type different 3899 // Deopt if the array literal boilerplate ElementsKind is of a type different
3844 // than the expected one. The check isn't necessary if the boilerplate has 3900 // than the expected one. The check isn't necessary if the boilerplate has
3845 // already been converted to FAST_ELEMENTS. 3901 // already been converted to FAST_ELEMENTS.
3846 if (boilerplate_elements_kind != FAST_ELEMENTS) { 3902 if (boilerplate_elements_kind != FAST_ELEMENTS) {
3847 __ LoadHeapObject(rax, instr->hydrogen()->boilerplate_object()); 3903 __ LoadHeapObject(rax, instr->hydrogen()->boilerplate_object());
3848 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); 3904 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3849 // Load the map's "bit field 2". 3905 // Load the map's "bit field 2".
3850 __ movb(rbx, FieldOperand(rbx, Map::kBitField2Offset)); 3906 __ movb(rbx, FieldOperand(rbx, Map::kBitField2Offset));
3851 // Retrieve elements_kind from bit field 2. 3907 // Retrieve elements_kind from bit field 2.
3852 __ and_(rbx, Immediate(Map::kElementsKindMask)); 3908 __ and_(rbx, Immediate(Map::kElementsKindMask));
3853 __ cmpb(rbx, Immediate(boilerplate_elements_kind << 3909 __ cmpb(rbx, Immediate(boilerplate_elements_kind <<
3854 Map::kElementsKindShift)); 3910 Map::kElementsKindShift));
3855 DeoptimizeIf(not_equal, instr->environment()); 3911 DeoptimizeIf(not_equal, instr->environment());
3856 } 3912 }
3857 3913
3858 // Setup the parameters to the stub/runtime call. 3914 // Set up the parameters to the stub/runtime call.
3859 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 3915 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3860 __ push(FieldOperand(rax, JSFunction::kLiteralsOffset)); 3916 __ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
3861 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); 3917 __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
3862 // Boilerplate already exists, constant elements are never accessed. 3918 // Boilerplate already exists, constant elements are never accessed.
3863 // Pass an empty fixed array. 3919 // Pass an empty fixed array.
3864 __ Push(Handle<FixedArray>(heap->empty_fixed_array())); 3920 __ Push(Handle<FixedArray>(heap->empty_fixed_array()));
3865 3921
3866 // Pick the right runtime function or stub to call. 3922 // Pick the right runtime function or stub to call.
3867 int length = instr->hydrogen()->length(); 3923 int length = instr->hydrogen()->length();
3868 if (instr->hydrogen()->IsCopyOnWrite()) { 3924 if (instr->hydrogen()->IsCopyOnWrite()) {
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
3949 __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate()); 4005 __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate());
3950 EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset); 4006 EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset);
3951 ASSERT_EQ(size, offset); 4007 ASSERT_EQ(size, offset);
3952 } 4008 }
3953 4009
3954 4010
3955 void LCodeGen::DoObjectLiteralGeneric(LObjectLiteralGeneric* instr) { 4011 void LCodeGen::DoObjectLiteralGeneric(LObjectLiteralGeneric* instr) {
3956 Handle<FixedArray> constant_properties = 4012 Handle<FixedArray> constant_properties =
3957 instr->hydrogen()->constant_properties(); 4013 instr->hydrogen()->constant_properties();
3958 4014
3959 // Setup the parameters to the stub/runtime call. 4015 // Set up the parameters to the stub/runtime call.
3960 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 4016 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3961 __ push(FieldOperand(rax, JSFunction::kLiteralsOffset)); 4017 __ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
3962 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); 4018 __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
3963 __ Push(constant_properties); 4019 __ Push(constant_properties);
3964 int flags = instr->hydrogen()->fast_elements() 4020 int flags = instr->hydrogen()->fast_elements()
3965 ? ObjectLiteral::kFastElements 4021 ? ObjectLiteral::kFastElements
3966 : ObjectLiteral::kNoFlags; 4022 : ObjectLiteral::kNoFlags;
3967 flags |= instr->hydrogen()->has_function() 4023 flags |= instr->hydrogen()->has_function()
3968 ? ObjectLiteral::kHasFunction 4024 ? ObjectLiteral::kHasFunction
3969 : ObjectLiteral::kNoFlags; 4025 : ObjectLiteral::kNoFlags;
(...skipping 359 matching lines...) Expand 10 before | Expand all | Expand 10 after
4329 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 4385 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
4330 ASSERT(osr_pc_offset_ == -1); 4386 ASSERT(osr_pc_offset_ == -1);
4331 osr_pc_offset_ = masm()->pc_offset(); 4387 osr_pc_offset_ = masm()->pc_offset();
4332 } 4388 }
4333 4389
4334 #undef __ 4390 #undef __
4335 4391
4336 } } // namespace v8::internal 4392 } } // namespace v8::internal
4337 4393
4338 #endif // V8_TARGET_ARCH_X64 4394 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698