Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(907)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 3023001: Remove unnecessary formatting differences between ia32 and x64 code generator... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 10 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/codegen-ia32.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
132 owner_->set_state(this); 132 owner_->set_state(this);
133 } 133 }
134 134
135 135
136 CodeGenState::~CodeGenState() { 136 CodeGenState::~CodeGenState() {
137 ASSERT(owner_->state() == this); 137 ASSERT(owner_->state() == this);
138 owner_->set_state(previous_); 138 owner_->set_state(previous_);
139 } 139 }
140 140
141 141
142 // ----------------------------------------------------------------------------- 142 // -------------------------------------------------------------------------
143 // CodeGenerator implementation. 143 // CodeGenerator implementation.
144 144
145 CodeGenerator::CodeGenerator(MacroAssembler* masm) 145 CodeGenerator::CodeGenerator(MacroAssembler* masm)
146 : deferred_(8), 146 : deferred_(8),
147 masm_(masm), 147 masm_(masm),
148 info_(NULL), 148 info_(NULL),
149 frame_(NULL), 149 frame_(NULL),
150 allocator_(NULL), 150 allocator_(NULL),
151 state_(NULL), 151 state_(NULL),
152 loop_nesting_(0), 152 loop_nesting_(0),
153 function_return_is_shadowed_(false), 153 function_return_is_shadowed_(false),
154 in_spilled_code_(false) { 154 in_spilled_code_(false) {
155 } 155 }
156 156
157 157
158 // Calling conventions:
159 // rbp: caller's frame pointer
160 // rsp: stack pointer
161 // rdi: called JS function
162 // rsi: callee's context
163
158 void CodeGenerator::Generate(CompilationInfo* info) { 164 void CodeGenerator::Generate(CompilationInfo* info) {
159 // Record the position for debugging purposes. 165 // Record the position for debugging purposes.
160 CodeForFunctionPosition(info->function()); 166 CodeForFunctionPosition(info->function());
161 Comment cmnt(masm_, "[ function compiled by virtual frame code generator"); 167 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
162 168
163 // Initialize state. 169 // Initialize state.
164 info_ = info; 170 info_ = info;
165 ASSERT(allocator_ == NULL); 171 ASSERT(allocator_ == NULL);
166 RegisterAllocator register_allocator(this); 172 RegisterAllocator register_allocator(this);
167 allocator_ = &register_allocator; 173 allocator_ = &register_allocator;
168 ASSERT(frame_ == NULL); 174 ASSERT(frame_ == NULL);
169 frame_ = new VirtualFrame(); 175 frame_ = new VirtualFrame();
170 set_in_spilled_code(false); 176 set_in_spilled_code(false);
171 177
172 // Adjust for function-level loop nesting. 178 // Adjust for function-level loop nesting.
173 ASSERT_EQ(0, loop_nesting_); 179 ASSERT_EQ(0, loop_nesting_);
174 loop_nesting_ += info->loop_nesting(); 180 loop_nesting_ = info->loop_nesting();
175 181
176 JumpTarget::set_compiling_deferred_code(false); 182 JumpTarget::set_compiling_deferred_code(false);
177 183
178 #ifdef DEBUG 184 #ifdef DEBUG
179 if (strlen(FLAG_stop_at) > 0 && 185 if (strlen(FLAG_stop_at) > 0 &&
180 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 186 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
181 frame_->SpillAll(); 187 frame_->SpillAll();
182 __ int3(); 188 __ int3();
183 } 189 }
184 #endif 190 #endif
(...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after
425 // always at a function context. However it is safe to dereference be- 431 // always at a function context. However it is safe to dereference be-
426 // cause the function context of a function context is itself. Before 432 // cause the function context of a function context is itself. Before
427 // deleting this mov we should try to create a counter-example first, 433 // deleting this mov we should try to create a counter-example first,
428 // though...) 434 // though...)
429 __ movq(tmp, ContextOperand(context, Context::FCONTEXT_INDEX)); 435 __ movq(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
430 return ContextOperand(tmp, index); 436 return ContextOperand(tmp, index);
431 } 437 }
432 438
433 default: 439 default:
434 UNREACHABLE(); 440 UNREACHABLE();
435 return Operand(rsp, 0); 441 return Operand(rax);
436 } 442 }
437 } 443 }
438 444
439 445
440 Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot, 446 Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
441 Result tmp, 447 Result tmp,
442 JumpTarget* slow) { 448 JumpTarget* slow) {
443 ASSERT(slot->type() == Slot::CONTEXT); 449 ASSERT(slot->type() == Slot::CONTEXT);
444 ASSERT(tmp.is_register()); 450 ASSERT(tmp.is_register());
445 Register context = rsi; 451 Register context = rsi;
(...skipping 16 matching lines...) Expand all
462 slow->Branch(not_equal, not_taken); 468 slow->Branch(not_equal, not_taken);
463 __ movq(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX)); 469 __ movq(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
464 return ContextOperand(tmp.reg(), slot->index()); 470 return ContextOperand(tmp.reg(), slot->index());
465 } 471 }
466 472
467 473
468 // Emit code to load the value of an expression to the top of the 474 // Emit code to load the value of an expression to the top of the
469 // frame. If the expression is boolean-valued it may be compiled (or 475 // frame. If the expression is boolean-valued it may be compiled (or
470 // partially compiled) into control flow to the control destination. 476 // partially compiled) into control flow to the control destination.
471 // If force_control is true, control flow is forced. 477 // If force_control is true, control flow is forced.
472 void CodeGenerator::LoadCondition(Expression* x, 478 void CodeGenerator::LoadCondition(Expression* expr,
473 ControlDestination* dest, 479 ControlDestination* dest,
474 bool force_control) { 480 bool force_control) {
475 ASSERT(!in_spilled_code()); 481 ASSERT(!in_spilled_code());
476 int original_height = frame_->height(); 482 int original_height = frame_->height();
477 483
478 { CodeGenState new_state(this, dest); 484 { CodeGenState new_state(this, dest);
479 Visit(x); 485 Visit(expr);
480 486
481 // If we hit a stack overflow, we may not have actually visited 487 // If we hit a stack overflow, we may not have actually visited
482 // the expression. In that case, we ensure that we have a 488 // the expression. In that case, we ensure that we have a
483 // valid-looking frame state because we will continue to generate 489 // valid-looking frame state because we will continue to generate
484 // code as we unwind the C++ stack. 490 // code as we unwind the C++ stack.
485 // 491 //
486 // It's possible to have both a stack overflow and a valid frame 492 // It's possible to have both a stack overflow and a valid frame
487 // state (eg, a subexpression overflowed, visiting it returned 493 // state (eg, a subexpression overflowed, visiting it returned
488 // with a dummied frame state, and visiting this expression 494 // with a dummied frame state, and visiting this expression
489 // returned with a normal-looking state). 495 // returned with a normal-looking state).
490 if (HasStackOverflow() && 496 if (HasStackOverflow() &&
491 !dest->is_used() && 497 !dest->is_used() &&
492 frame_->height() == original_height) { 498 frame_->height() == original_height) {
493 dest->Goto(true); 499 dest->Goto(true);
494 } 500 }
495 } 501 }
496 502
497 if (force_control && !dest->is_used()) { 503 if (force_control && !dest->is_used()) {
498 // Convert the TOS value into flow to the control destination. 504 // Convert the TOS value into flow to the control destination.
499 // TODO(X64): Make control flow to control destinations work.
500 ToBoolean(dest); 505 ToBoolean(dest);
501 } 506 }
502 507
503 ASSERT(!(force_control && !dest->is_used())); 508 ASSERT(!(force_control && !dest->is_used()));
504 ASSERT(dest->is_used() || frame_->height() == original_height + 1); 509 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
505 } 510 }
506 511
507 512
508 void CodeGenerator::LoadAndSpill(Expression* expression) { 513 void CodeGenerator::LoadAndSpill(Expression* expression) {
509 // TODO(x64): No architecture specific code. Move to shared location.
510 ASSERT(in_spilled_code()); 514 ASSERT(in_spilled_code());
511 set_in_spilled_code(false); 515 set_in_spilled_code(false);
512 Load(expression); 516 Load(expression);
513 frame_->SpillAll(); 517 frame_->SpillAll();
514 set_in_spilled_code(true); 518 set_in_spilled_code(true);
515 } 519 }
516 520
517 521
518 void CodeGenerator::Load(Expression* expr) { 522 void CodeGenerator::Load(Expression* expr) {
519 #ifdef DEBUG 523 #ifdef DEBUG
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
645 frame_->Push(Factory::the_hole_value()); 649 frame_->Push(Factory::the_hole_value());
646 } else { 650 } else {
647 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); 651 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
648 frame_->PushFunction(); 652 frame_->PushFunction();
649 frame_->PushReceiverSlotAddress(); 653 frame_->PushReceiverSlotAddress();
650 frame_->Push(Smi::FromInt(scope()->num_parameters())); 654 frame_->Push(Smi::FromInt(scope()->num_parameters()));
651 Result result = frame_->CallStub(&stub, 3); 655 Result result = frame_->CallStub(&stub, 3);
652 frame_->Push(&result); 656 frame_->Push(&result);
653 } 657 }
654 658
655
656 Variable* arguments = scope()->arguments()->var(); 659 Variable* arguments = scope()->arguments()->var();
657 Variable* shadow = scope()->arguments_shadow()->var(); 660 Variable* shadow = scope()->arguments_shadow()->var();
658 ASSERT(arguments != NULL && arguments->slot() != NULL); 661 ASSERT(arguments != NULL && arguments->slot() != NULL);
659 ASSERT(shadow != NULL && shadow->slot() != NULL); 662 ASSERT(shadow != NULL && shadow->slot() != NULL);
660 JumpTarget done; 663 JumpTarget done;
661 bool skip_arguments = false; 664 bool skip_arguments = false;
662 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) { 665 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
663 // We have to skip storing into the arguments slot if it has 666 // We have to skip storing into the arguments slot if it has
664 // already been written to. This can happen if the a function 667 // already been written to. This can happen if the a function
665 // has a local variable named 'arguments'. 668 // has a local variable named 'arguments'.
666 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); 669 LoadFromSlot(arguments->slot(), NOT_INSIDE_TYPEOF);
667 Result probe = frame_->Pop(); 670 Result probe = frame_->Pop();
668 if (probe.is_constant()) { 671 if (probe.is_constant()) {
669 // We have to skip updating the arguments object if it has been 672 // We have to skip updating the arguments object if it has
670 // assigned a proper value. 673 // been assigned a proper value.
671 skip_arguments = !probe.handle()->IsTheHole(); 674 skip_arguments = !probe.handle()->IsTheHole();
672 } else { 675 } else {
673 __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex); 676 __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex);
674 probe.Unuse(); 677 probe.Unuse();
675 done.Branch(not_equal); 678 done.Branch(not_equal);
676 } 679 }
677 } 680 }
678 if (!skip_arguments) { 681 if (!skip_arguments) {
679 StoreToSlot(arguments->slot(), NOT_CONST_INIT); 682 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
680 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind(); 683 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
681 } 684 }
682 StoreToSlot(shadow->slot(), NOT_CONST_INIT); 685 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
683 return frame_->Pop(); 686 return frame_->Pop();
684 } 687 }
685 688
686 //------------------------------------------------------------------------------ 689 //------------------------------------------------------------------------------
687 // CodeGenerator implementation of variables, lookups, and stores. 690 // CodeGenerator implementation of variables, lookups, and stores.
688 691
689 //------------------------------------------------------------------------------
690 // CodeGenerator implementation of variables, lookups, and stores.
691
692 Reference::Reference(CodeGenerator* cgen, 692 Reference::Reference(CodeGenerator* cgen,
693 Expression* expression, 693 Expression* expression,
694 bool persist_after_get) 694 bool persist_after_get)
695 : cgen_(cgen), 695 : cgen_(cgen),
696 expression_(expression), 696 expression_(expression),
697 type_(ILLEGAL), 697 type_(ILLEGAL),
698 persist_after_get_(persist_after_get) { 698 persist_after_get_(persist_after_get) {
699 cgen->LoadReference(this); 699 cgen->LoadReference(this);
700 } 700 }
701 701
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
838 Label* operand_conversion_failure, 838 Label* operand_conversion_failure,
839 Register heap_number_map); 839 Register heap_number_map);
840 // As above, but we know the operands to be numbers. In that case, 840 // As above, but we know the operands to be numbers. In that case,
841 // conversion can't fail. 841 // conversion can't fail.
842 static void LoadNumbersAsIntegers(MacroAssembler* masm); 842 static void LoadNumbersAsIntegers(MacroAssembler* masm);
843 }; 843 };
844 844
845 845
846 const char* GenericBinaryOpStub::GetName() { 846 const char* GenericBinaryOpStub::GetName() {
847 if (name_ != NULL) return name_; 847 if (name_ != NULL) return name_;
848 const int len = 100; 848 const int kMaxNameLength = 100;
849 name_ = Bootstrapper::AllocateAutoDeletedArray(len); 849 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
850 if (name_ == NULL) return "OOM"; 850 if (name_ == NULL) return "OOM";
851 const char* op_name = Token::Name(op_); 851 const char* op_name = Token::Name(op_);
852 const char* overwrite_name; 852 const char* overwrite_name;
853 switch (mode_) { 853 switch (mode_) {
854 case NO_OVERWRITE: overwrite_name = "Alloc"; break; 854 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
855 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; 855 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
856 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; 856 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
857 default: overwrite_name = "UnknownOverwrite"; break; 857 default: overwrite_name = "UnknownOverwrite"; break;
858 } 858 }
859 859
860 OS::SNPrintF(Vector<char>(name_, len), 860 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
861 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s", 861 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
862 op_name, 862 op_name,
863 overwrite_name, 863 overwrite_name,
864 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "", 864 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
865 args_in_registers_ ? "RegArgs" : "StackArgs", 865 args_in_registers_ ? "RegArgs" : "StackArgs",
866 args_reversed_ ? "_R" : "", 866 args_reversed_ ? "_R" : "",
867 static_operands_type_.ToString(), 867 static_operands_type_.ToString(),
868 BinaryOpIC::GetName(runtime_operands_type_)); 868 BinaryOpIC::GetName(runtime_operands_type_));
869 return name_; 869 return name_;
870 } 870 }
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after
1131 // Use intptr_t to detect overflow of 32-bit int. 1131 // Use intptr_t to detect overflow of 32-bit int.
1132 if (Smi::IsValid(static_cast<intptr_t>(left) - right)) { 1132 if (Smi::IsValid(static_cast<intptr_t>(left) - right)) {
1133 answer_object = Smi::FromInt(left - right); 1133 answer_object = Smi::FromInt(left - right);
1134 } 1134 }
1135 break; 1135 break;
1136 case Token::MUL: { 1136 case Token::MUL: {
1137 double answer = static_cast<double>(left) * right; 1137 double answer = static_cast<double>(left) * right;
1138 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) { 1138 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
1139 // If the product is zero and the non-zero factor is negative, 1139 // If the product is zero and the non-zero factor is negative,
1140 // the spec requires us to return floating point negative zero. 1140 // the spec requires us to return floating point negative zero.
1141 if (answer != 0 || (left + right) >= 0) { 1141 if (answer != 0 || (left >= 0 && right >= 0)) {
1142 answer_object = Smi::FromInt(static_cast<int>(answer)); 1142 answer_object = Smi::FromInt(static_cast<int>(answer));
1143 } 1143 }
1144 } 1144 }
1145 } 1145 }
1146 break; 1146 break;
1147 case Token::DIV: 1147 case Token::DIV:
1148 case Token::MOD: 1148 case Token::MOD:
1149 break; 1149 break;
1150 case Token::BIT_OR: 1150 case Token::BIT_OR:
1151 answer_object = Smi::FromInt(left | right); 1151 answer_object = Smi::FromInt(left | right);
(...skipping 486 matching lines...) Expand 10 before | Expand all | Expand 10 after
1638 1638
1639 virtual void Generate(); 1639 virtual void Generate();
1640 1640
1641 private: 1641 private:
1642 Register dst_; 1642 Register dst_;
1643 Smi* value_; 1643 Smi* value_;
1644 OverwriteMode overwrite_mode_; 1644 OverwriteMode overwrite_mode_;
1645 }; 1645 };
1646 1646
1647 1647
1648
1649 void DeferredInlineSmiSub::Generate() { 1648 void DeferredInlineSmiSub::Generate() {
1650 GenericBinaryOpStub igostub(Token::SUB, overwrite_mode_, NO_SMI_CODE_IN_STUB); 1649 GenericBinaryOpStub igostub(Token::SUB, overwrite_mode_, NO_SMI_CODE_IN_STUB);
1651 igostub.GenerateCall(masm_, dst_, value_); 1650 igostub.GenerateCall(masm_, dst_, value_);
1652 if (!dst_.is(rax)) __ movq(dst_, rax); 1651 if (!dst_.is(rax)) __ movq(dst_, rax);
1653 } 1652 }
1654 1653
1655 1654
1656 Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr, 1655 Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr,
1657 Result* operand, 1656 Result* operand,
1658 Handle<Object> value, 1657 Handle<Object> value,
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1703 } 1702 }
1704 1703
1705 case Token::SUB: { 1704 case Token::SUB: {
1706 if (reversed) { 1705 if (reversed) {
1707 Result constant_operand(value); 1706 Result constant_operand(value);
1708 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand, 1707 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
1709 overwrite_mode); 1708 overwrite_mode);
1710 } else { 1709 } else {
1711 operand->ToRegister(); 1710 operand->ToRegister();
1712 frame_->Spill(operand->reg()); 1711 frame_->Spill(operand->reg());
1712 answer = *operand;
1713 DeferredCode* deferred = new DeferredInlineSmiSub(operand->reg(), 1713 DeferredCode* deferred = new DeferredInlineSmiSub(operand->reg(),
1714 smi_value, 1714 smi_value,
1715 overwrite_mode); 1715 overwrite_mode);
1716 JumpIfNotSmiUsingTypeInfo(operand->reg(), operand->type_info(), 1716 JumpIfNotSmiUsingTypeInfo(operand->reg(), operand->type_info(),
1717 deferred); 1717 deferred);
1718 // A smi currently fits in a 32-bit Immediate. 1718 // A smi currently fits in a 32-bit Immediate.
1719 __ SmiSubConstant(operand->reg(), 1719 __ SmiSubConstant(operand->reg(),
1720 operand->reg(), 1720 operand->reg(),
1721 smi_value, 1721 smi_value,
1722 deferred->entry_label()); 1722 deferred->entry_label());
1723 deferred->BindExit(); 1723 deferred->BindExit();
1724 answer = *operand; 1724 operand->Unuse();
1725 } 1725 }
1726 break; 1726 break;
1727 } 1727 }
1728 1728
1729 case Token::SAR: 1729 case Token::SAR:
1730 if (reversed) { 1730 if (reversed) {
1731 Result constant_operand(value); 1731 Result constant_operand(value);
1732 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand, 1732 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
1733 overwrite_mode); 1733 overwrite_mode);
1734 } else { 1734 } else {
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
1925 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand, 1925 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
1926 overwrite_mode); 1926 overwrite_mode);
1927 } 1927 }
1928 break; 1928 break;
1929 } 1929 }
1930 } 1930 }
1931 ASSERT(answer.is_valid()); 1931 ASSERT(answer.is_valid());
1932 return answer; 1932 return answer;
1933 } 1933 }
1934 1934
1935
1935 static bool CouldBeNaN(const Result& result) { 1936 static bool CouldBeNaN(const Result& result) {
1936 if (result.type_info().IsSmi()) return false; 1937 if (result.type_info().IsSmi()) return false;
1937 if (result.type_info().IsInteger32()) return false; 1938 if (result.type_info().IsInteger32()) return false;
1938 if (!result.is_constant()) return true; 1939 if (!result.is_constant()) return true;
1939 if (!result.handle()->IsHeapNumber()) return false; 1940 if (!result.handle()->IsHeapNumber()) return false;
1940 return isnan(HeapNumber::cast(*result.handle())->value()); 1941 return isnan(HeapNumber::cast(*result.handle())->value());
1941 } 1942 }
1942 1943
1943 1944
1944 // Convert from signed to unsigned comparison to match the way EFLAGS are set 1945 // Convert from signed to unsigned comparison to match the way EFLAGS are set
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after
2172 Smi::FromInt(1)); 2173 Smi::FromInt(1));
2173 __ bind(&characters_were_different); 2174 __ bind(&characters_were_different);
2174 } 2175 }
2175 temp2.Unuse(); 2176 temp2.Unuse();
2176 left_side.Unuse(); 2177 left_side.Unuse();
2177 right_side.Unuse(); 2178 right_side.Unuse();
2178 dest->Split(cc); 2179 dest->Split(cc);
2179 } 2180 }
2180 } else { 2181 } else {
2181 // Neither side is a constant Smi, constant 1-char string, or constant null. 2182 // Neither side is a constant Smi, constant 1-char string, or constant null.
2182 // If either side is a non-smi constant, skip the smi check. 2183 // If either side is a non-smi constant, or known to be a heap number,
2184 // skip the smi check.
2183 bool known_non_smi = 2185 bool known_non_smi =
2184 (left_side.is_constant() && !left_side.handle()->IsSmi()) || 2186 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
2185 (right_side.is_constant() && !right_side.handle()->IsSmi()) || 2187 (right_side.is_constant() && !right_side.handle()->IsSmi()) ||
2186 left_side.type_info().IsDouble() || 2188 left_side.type_info().IsDouble() ||
2187 right_side.type_info().IsDouble(); 2189 right_side.type_info().IsDouble();
2188 2190
2189 NaNInformation nan_info = 2191 NaNInformation nan_info =
2190 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ? 2192 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
2191 kBothCouldBeNaN : 2193 kBothCouldBeNaN :
2192 kCantBothBeNaN; 2194 kCantBothBeNaN;
2193 2195
2194 // Inline number comparison handling any combination of smi's and heap 2196 // Inline number comparison handling any combination of smi's and heap
2195 // numbers if: 2197 // numbers if:
2196 // code is in a loop 2198 // code is in a loop
2197 // the compare operation is different from equal 2199 // the compare operation is different from equal
2198 // compare is not a for-loop comparison 2200 // compare is not a for-loop comparison
2199 // The reason for excluding equal is that it will most likely be done 2201 // The reason for excluding equal is that it will most likely be done
2200 // with smi's (not heap numbers) and the code to comparing smi's is inlined 2202 // with smi's (not heap numbers) and the code to comparing smi's is inlined
2201 // separately. The same reason applies for for-loop comparison which will 2203 // separately. The same reason applies for for-loop comparison which will
2202 // also most likely be smi comparisons. 2204 // also most likely be smi comparisons.
2203 bool is_loop_condition = (node->AsExpression() != NULL) 2205 bool is_loop_condition = (node->AsExpression() != NULL)
2204 && node->AsExpression()->is_loop_condition(); 2206 && node->AsExpression()->is_loop_condition();
2205 bool inline_number_compare = 2207 bool inline_number_compare =
2206 loop_nesting() > 0 && cc != equal && !is_loop_condition; 2208 loop_nesting() > 0 && cc != equal && !is_loop_condition;
2207 2209
2210 // Left and right needed in registers for the following code.
2208 left_side.ToRegister(); 2211 left_side.ToRegister();
2209 right_side.ToRegister(); 2212 right_side.ToRegister();
2210 2213
2211 if (known_non_smi) { 2214 if (known_non_smi) {
2212 // Inlined equality check: 2215 // Inlined equality check:
2213 // If at least one of the objects is not NaN, then if the objects 2216 // If at least one of the objects is not NaN, then if the objects
2214 // are identical, they are equal. 2217 // are identical, they are equal.
2215 if (nan_info == kCantBothBeNaN && cc == equal) { 2218 if (nan_info == kCantBothBeNaN && cc == equal) {
2216 __ cmpq(left_side.reg(), right_side.reg()); 2219 __ cmpq(left_side.reg(), right_side.reg());
2217 dest->true_target()->Branch(equal); 2220 dest->true_target()->Branch(equal);
2218 } 2221 }
2219 2222
2220 // Inlined number comparison: 2223 // Inlined number comparison:
2221 if (inline_number_compare) { 2224 if (inline_number_compare) {
2222 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest); 2225 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2223 } 2226 }
2224 2227
2228 // End of in-line compare, call out to the compare stub. Don't include
2229 // number comparison in the stub if it was inlined.
2225 CompareStub stub(cc, strict, nan_info, !inline_number_compare); 2230 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
2226 Result answer = frame_->CallStub(&stub, &left_side, &right_side); 2231 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2227 __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flag. 2232 __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flag.
2228 answer.Unuse(); 2233 answer.Unuse();
2229 dest->Split(cc); 2234 dest->Split(cc);
2230 } else { 2235 } else {
2231 // Here we split control flow to the stub call and inlined cases 2236 // Here we split control flow to the stub call and inlined cases
2232 // before finally splitting it to the control destination. We use 2237 // before finally splitting it to the control destination. We use
2233 // a jump target and branching to duplicate the virtual frame at 2238 // a jump target and branching to duplicate the virtual frame at
2234 // the first split. We manually handle the off-frame references 2239 // the first split. We manually handle the off-frame references
(...skipping 10 matching lines...) Expand all
2245 if (nan_info == kCantBothBeNaN && cc == equal) { 2250 if (nan_info == kCantBothBeNaN && cc == equal) {
2246 __ cmpq(left_side.reg(), right_side.reg()); 2251 __ cmpq(left_side.reg(), right_side.reg());
2247 dest->true_target()->Branch(equal); 2252 dest->true_target()->Branch(equal);
2248 } 2253 }
2249 2254
2250 // Inlined number comparison: 2255 // Inlined number comparison:
2251 if (inline_number_compare) { 2256 if (inline_number_compare) {
2252 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest); 2257 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2253 } 2258 }
2254 2259
2260 // End of in-line compare, call out to the compare stub. Don't include
2261 // number comparison in the stub if it was inlined.
2255 CompareStub stub(cc, strict, nan_info, !inline_number_compare); 2262 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
2256 Result answer = frame_->CallStub(&stub, &left_side, &right_side); 2263 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2257 __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flags. 2264 __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flags.
2258 answer.Unuse(); 2265 answer.Unuse();
2259 dest->true_target()->Branch(cc); 2266 dest->true_target()->Branch(cc);
2260 dest->false_target()->Jump(); 2267 dest->false_target()->Jump();
2261 2268
2262 is_smi.Bind(); 2269 is_smi.Bind();
2263 left_side = Result(left_reg); 2270 left_side = Result(left_reg);
2264 right_side = Result(right_reg); 2271 right_side = Result(right_reg);
(...skipping 432 matching lines...) Expand 10 before | Expand all | Expand 10 after
2697 2704
2698 void CodeGenerator::CheckStack() { 2705 void CodeGenerator::CheckStack() {
2699 DeferredStackCheck* deferred = new DeferredStackCheck; 2706 DeferredStackCheck* deferred = new DeferredStackCheck;
2700 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 2707 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
2701 deferred->Branch(below); 2708 deferred->Branch(below);
2702 deferred->BindExit(); 2709 deferred->BindExit();
2703 } 2710 }
2704 2711
2705 2712
2706 void CodeGenerator::VisitAndSpill(Statement* statement) { 2713 void CodeGenerator::VisitAndSpill(Statement* statement) {
2707 // TODO(X64): No architecture specific code. Move to shared location.
2708 ASSERT(in_spilled_code()); 2714 ASSERT(in_spilled_code());
2709 set_in_spilled_code(false); 2715 set_in_spilled_code(false);
2710 Visit(statement); 2716 Visit(statement);
2711 if (frame_ != NULL) { 2717 if (frame_ != NULL) {
2712 frame_->SpillAll(); 2718 frame_->SpillAll();
2713 } 2719 }
2714 set_in_spilled_code(true); 2720 set_in_spilled_code(true);
2715 } 2721 }
2716 2722
2717 2723
2718 void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) { 2724 void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
2725 #ifdef DEBUG
2726 int original_height = frame_->height();
2727 #endif
2719 ASSERT(in_spilled_code()); 2728 ASSERT(in_spilled_code());
2720 set_in_spilled_code(false); 2729 set_in_spilled_code(false);
2721 VisitStatements(statements); 2730 VisitStatements(statements);
2722 if (frame_ != NULL) { 2731 if (frame_ != NULL) {
2723 frame_->SpillAll(); 2732 frame_->SpillAll();
2724 } 2733 }
2725 set_in_spilled_code(true); 2734 set_in_spilled_code(true);
2735
2736 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2726 } 2737 }
2727 2738
2728 2739
2729 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { 2740 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
2741 #ifdef DEBUG
2742 int original_height = frame_->height();
2743 #endif
2730 ASSERT(!in_spilled_code()); 2744 ASSERT(!in_spilled_code());
2731 for (int i = 0; has_valid_frame() && i < statements->length(); i++) { 2745 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
2732 Visit(statements->at(i)); 2746 Visit(statements->at(i));
2733 } 2747 }
2748 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2734 } 2749 }
2735 2750
2736 2751
2737 void CodeGenerator::VisitBlock(Block* node) { 2752 void CodeGenerator::VisitBlock(Block* node) {
2738 ASSERT(!in_spilled_code()); 2753 ASSERT(!in_spilled_code());
2739 Comment cmnt(masm_, "[ Block"); 2754 Comment cmnt(masm_, "[ Block");
2740 CodeForStatementPosition(node); 2755 CodeForStatementPosition(node);
2741 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); 2756 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
2742 VisitStatements(node->statements()); 2757 VisitStatements(node->statements());
2743 if (node->break_target()->is_linked()) { 2758 if (node->break_target()->is_linked()) {
(...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after
2959 } 2974 }
2960 2975
2961 2976
2962 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { 2977 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
2963 ASSERT(!in_spilled_code()); 2978 ASSERT(!in_spilled_code());
2964 Comment cmnt(masm_, "[ ReturnStatement"); 2979 Comment cmnt(masm_, "[ ReturnStatement");
2965 2980
2966 CodeForStatementPosition(node); 2981 CodeForStatementPosition(node);
2967 Load(node->expression()); 2982 Load(node->expression());
2968 Result return_value = frame_->Pop(); 2983 Result return_value = frame_->Pop();
2984 masm()->WriteRecordedPositions();
2969 if (function_return_is_shadowed_) { 2985 if (function_return_is_shadowed_) {
2970 function_return_.Jump(&return_value); 2986 function_return_.Jump(&return_value);
2971 } else { 2987 } else {
2972 frame_->PrepareForReturn(); 2988 frame_->PrepareForReturn();
2973 if (function_return_.is_bound()) { 2989 if (function_return_.is_bound()) {
2974 // If the function return label is already bound we reuse the 2990 // If the function return label is already bound we reuse the
2975 // code by jumping to the return site. 2991 // code by jumping to the return site.
2976 function_return_.Jump(&return_value); 2992 function_return_.Jump(&return_value);
2977 } else { 2993 } else {
2978 function_return_.Bind(&return_value); 2994 function_return_.Bind(&return_value);
(...skipping 17 matching lines...) Expand all
2996 // Add a label for checking the size of the code used for returning. 3012 // Add a label for checking the size of the code used for returning.
2997 #ifdef DEBUG 3013 #ifdef DEBUG
2998 Label check_exit_codesize; 3014 Label check_exit_codesize;
2999 masm_->bind(&check_exit_codesize); 3015 masm_->bind(&check_exit_codesize);
3000 #endif 3016 #endif
3001 3017
3002 // Leave the frame and return popping the arguments and the 3018 // Leave the frame and return popping the arguments and the
3003 // receiver. 3019 // receiver.
3004 frame_->Exit(); 3020 frame_->Exit();
3005 masm_->ret((scope()->num_parameters() + 1) * kPointerSize); 3021 masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
3022 DeleteFrame();
3023
3006 #ifdef ENABLE_DEBUGGER_SUPPORT 3024 #ifdef ENABLE_DEBUGGER_SUPPORT
3007 // Add padding that will be overwritten by a debugger breakpoint. 3025 // Add padding that will be overwritten by a debugger breakpoint.
3008 // frame_->Exit() generates "movq rsp, rbp; pop rbp; ret k" 3026 // frame_->Exit() generates "movq rsp, rbp; pop rbp; ret k"
3009 // with length 7 (3 + 1 + 3). 3027 // with length 7 (3 + 1 + 3).
3010 const int kPadding = Assembler::kJSReturnSequenceLength - 7; 3028 const int kPadding = Assembler::kJSReturnSequenceLength - 7;
3011 for (int i = 0; i < kPadding; ++i) { 3029 for (int i = 0; i < kPadding; ++i) {
3012 masm_->int3(); 3030 masm_->int3();
3013 } 3031 }
3014 // Check that the size of the code used for returning matches what is 3032 // Check that the size of the code used for returning matches what is
3015 // expected by the debugger. 3033 // expected by the debugger.
3016 ASSERT_EQ(Assembler::kJSReturnSequenceLength, 3034 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
3017 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); 3035 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
3018 #endif 3036 #endif
3019 DeleteFrame();
3020 } 3037 }
3021 3038
3022 3039
3023 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) { 3040 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
3024 ASSERT(!in_spilled_code()); 3041 ASSERT(!in_spilled_code());
3025 Comment cmnt(masm_, "[ WithEnterStatement"); 3042 Comment cmnt(masm_, "[ WithEnterStatement");
3026 CodeForStatementPosition(node); 3043 CodeForStatementPosition(node);
3027 Load(node->expression()); 3044 Load(node->expression());
3028 Result context; 3045 Result context;
3029 if (node->is_catch_block()) { 3046 if (node->is_catch_block()) {
(...skipping 18 matching lines...) Expand all
3048 Comment cmnt(masm_, "[ WithExitStatement"); 3065 Comment cmnt(masm_, "[ WithExitStatement");
3049 CodeForStatementPosition(node); 3066 CodeForStatementPosition(node);
3050 // Pop context. 3067 // Pop context.
3051 __ movq(rsi, ContextOperand(rsi, Context::PREVIOUS_INDEX)); 3068 __ movq(rsi, ContextOperand(rsi, Context::PREVIOUS_INDEX));
3052 // Update context local. 3069 // Update context local.
3053 frame_->SaveContextRegister(); 3070 frame_->SaveContextRegister();
3054 } 3071 }
3055 3072
3056 3073
3057 void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) { 3074 void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
3058 // TODO(X64): This code is completely generic and should be moved somewhere
3059 // where it can be shared between architectures.
3060 ASSERT(!in_spilled_code()); 3075 ASSERT(!in_spilled_code());
3061 Comment cmnt(masm_, "[ SwitchStatement"); 3076 Comment cmnt(masm_, "[ SwitchStatement");
3062 CodeForStatementPosition(node); 3077 CodeForStatementPosition(node);
3063 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); 3078 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3064 3079
3065 // Compile the switch value. 3080 // Compile the switch value.
3066 Load(node->tag()); 3081 Load(node->tag());
3067 3082
3068 ZoneList<CaseClause*>* cases = node->cases(); 3083 ZoneList<CaseClause*>* cases = node->cases();
3069 int length = cases->length(); 3084 int length = cases->length();
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after
3341 if (node->continue_target()->is_linked()) { 3356 if (node->continue_target()->is_linked()) {
3342 node->continue_target()->Bind(); 3357 node->continue_target()->Bind();
3343 } 3358 }
3344 if (has_valid_frame()) { 3359 if (has_valid_frame()) {
3345 // The break target is the fall-through (body is a backward 3360 // The break target is the fall-through (body is a backward
3346 // jump from here and thus an invalid fall-through). 3361 // jump from here and thus an invalid fall-through).
3347 ControlDestination dest(&body, node->break_target(), false); 3362 ControlDestination dest(&body, node->break_target(), false);
3348 LoadCondition(node->cond(), &dest, true); 3363 LoadCondition(node->cond(), &dest, true);
3349 } 3364 }
3350 } else { 3365 } else {
3351 // If we have chosen not to recompile the test at the 3366 // If we have chosen not to recompile the test at the bottom,
3352 // bottom, jump back to the one at the top. 3367 // jump back to the one at the top.
3353 if (has_valid_frame()) { 3368 if (has_valid_frame()) {
3354 node->continue_target()->Jump(); 3369 node->continue_target()->Jump();
3355 } 3370 }
3356 } 3371 }
3357 break; 3372 break;
3358 case ALWAYS_FALSE: 3373 case ALWAYS_FALSE:
3359 UNREACHABLE(); 3374 UNREACHABLE();
3360 break; 3375 break;
3361 } 3376 }
3362 3377
(...skipping 541 matching lines...) Expand 10 before | Expand all | Expand 10 after
3904 node->break_target()->Bind(); 3919 node->break_target()->Bind();
3905 frame_->Drop(5); 3920 frame_->Drop(5);
3906 3921
3907 // Exit. 3922 // Exit.
3908 exit.Bind(); 3923 exit.Bind();
3909 3924
3910 node->continue_target()->Unuse(); 3925 node->continue_target()->Unuse();
3911 node->break_target()->Unuse(); 3926 node->break_target()->Unuse();
3912 } 3927 }
3913 3928
3929
3914 void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) { 3930 void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
3915 ASSERT(!in_spilled_code()); 3931 ASSERT(!in_spilled_code());
3916 VirtualFrame::SpilledScope spilled_scope; 3932 VirtualFrame::SpilledScope spilled_scope;
3917 Comment cmnt(masm_, "[ TryCatchStatement"); 3933 Comment cmnt(masm_, "[ TryCatchStatement");
3918 CodeForStatementPosition(node); 3934 CodeForStatementPosition(node);
3919 3935
3920 JumpTarget try_block; 3936 JumpTarget try_block;
3921 JumpTarget exit; 3937 JumpTarget exit;
3922 3938
3923 try_block.Call(); 3939 try_block.Call();
(...skipping 8160 matching lines...) Expand 10 before | Expand all | Expand 10 after
12084 #undef __ 12100 #undef __
12085 12101
12086 void RecordWriteStub::Generate(MacroAssembler* masm) { 12102 void RecordWriteStub::Generate(MacroAssembler* masm) {
12087 masm->RecordWriteHelper(object_, addr_, scratch_); 12103 masm->RecordWriteHelper(object_, addr_, scratch_);
12088 masm->ret(0); 12104 masm->ret(0);
12089 } 12105 }
12090 12106
12091 } } // namespace v8::internal 12107 } } // namespace v8::internal
12092 12108
12093 #endif // V8_TARGET_ARCH_X64 12109 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/ia32/codegen-ia32.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698