| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 25 matching lines...) Expand all Loading... |
| 36 #include "full-codegen.h" | 36 #include "full-codegen.h" |
| 37 #include "parser.h" | 37 #include "parser.h" |
| 38 #include "scopes.h" | 38 #include "scopes.h" |
| 39 #include "stub-cache.h" | 39 #include "stub-cache.h" |
| 40 | 40 |
| 41 namespace v8 { | 41 namespace v8 { |
| 42 namespace internal { | 42 namespace internal { |
| 43 | 43 |
| 44 #define __ ACCESS_MASM(masm_) | 44 #define __ ACCESS_MASM(masm_) |
| 45 | 45 |
| 46 |
| 47 class JumpPatchSite BASE_EMBEDDED { |
| 48 public: |
| 49 explicit JumpPatchSite(MacroAssembler* masm) |
| 50 : masm_(masm) { |
| 51 #ifdef DEBUG |
| 52 info_emitted_ = false; |
| 53 #endif |
| 54 } |
| 55 |
| 56 ~JumpPatchSite() { |
| 57 ASSERT(patch_site_.is_bound() == info_emitted_); |
| 58 } |
| 59 |
| 60 void EmitJumpIfNotSmi(Register reg, NearLabel* target) { |
| 61 __ testb(reg, Immediate(kSmiTagMask)); |
| 62 EmitJump(not_carry, target); // Always taken before patched. |
| 63 } |
| 64 |
| 65 void EmitJumpIfSmi(Register reg, NearLabel* target) { |
| 66 __ testb(reg, Immediate(kSmiTagMask)); |
| 67 EmitJump(carry, target); // Never taken before patched. |
| 68 } |
| 69 |
| 70 void EmitPatchInfo() { |
| 71 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); |
| 72 ASSERT(is_int8(delta_to_patch_site)); |
| 73 __ testl(rax, Immediate(delta_to_patch_site)); |
| 74 #ifdef DEBUG |
| 75 info_emitted_ = true; |
| 76 #endif |
| 77 } |
| 78 |
| 79 bool is_bound() const { return patch_site_.is_bound(); } |
| 80 |
| 81 private: |
| 82 // jc will be patched with jz, jnc will become jnz. |
| 83 void EmitJump(Condition cc, NearLabel* target) { |
| 84 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 85 ASSERT(cc == carry || cc == not_carry); |
| 86 __ bind(&patch_site_); |
| 87 __ j(cc, target); |
| 88 } |
| 89 |
| 90 MacroAssembler* masm_; |
| 91 Label patch_site_; |
| 92 #ifdef DEBUG |
| 93 bool info_emitted_; |
| 94 #endif |
| 95 }; |
| 96 |
| 97 |
| 46 // Generate code for a JS function. On entry to the function the receiver | 98 // Generate code for a JS function. On entry to the function the receiver |
| 47 // and arguments have been pushed on the stack left to right, with the | 99 // and arguments have been pushed on the stack left to right, with the |
| 48 // return address on top of them. The actual argument count matches the | 100 // return address on top of them. The actual argument count matches the |
| 49 // formal parameter count expected by the function. | 101 // formal parameter count expected by the function. |
| 50 // | 102 // |
| 51 // The live registers are: | 103 // The live registers are: |
| 52 // o rdi: the JS function object being called (ie, ourselves) | 104 // o rdi: the JS function object being called (ie, ourselves) |
| 53 // o rsi: our context | 105 // o rsi: our context |
| 54 // o rbp: our caller's frame pointer | 106 // o rbp: our caller's frame pointer |
| 55 // o rsp: stack pointer (pointing to return address) | 107 // o rsp: stack pointer (pointing to return address) |
| (...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 238 // Add a label for checking the size of the code used for returning. | 290 // Add a label for checking the size of the code used for returning. |
| 239 Label check_exit_codesize; | 291 Label check_exit_codesize; |
| 240 masm_->bind(&check_exit_codesize); | 292 masm_->bind(&check_exit_codesize); |
| 241 #endif | 293 #endif |
| 242 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); | 294 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); |
| 243 __ RecordJSReturn(); | 295 __ RecordJSReturn(); |
| 244 // Do not use the leave instruction here because it is too short to | 296 // Do not use the leave instruction here because it is too short to |
| 245 // patch with the code required by the debugger. | 297 // patch with the code required by the debugger. |
| 246 __ movq(rsp, rbp); | 298 __ movq(rsp, rbp); |
| 247 __ pop(rbp); | 299 __ pop(rbp); |
| 248 __ ret((scope()->num_parameters() + 1) * kPointerSize); | 300 |
| 301 int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize; |
| 302 __ Ret(arguments_bytes, rcx); |
| 303 |
| 249 #ifdef ENABLE_DEBUGGER_SUPPORT | 304 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 250 // Add padding that will be overwritten by a debugger breakpoint. We | 305 // Add padding that will be overwritten by a debugger breakpoint. We |
| 251 // have just generated "movq rsp, rbp; pop rbp; ret k" with length 7 | 306 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k" |
| 252 // (3 + 1 + 3). | 307 // (3 + 1 + 3). |
| 253 const int kPadding = Assembler::kJSReturnSequenceLength - 7; | 308 const int kPadding = Assembler::kJSReturnSequenceLength - 7; |
| 254 for (int i = 0; i < kPadding; ++i) { | 309 for (int i = 0; i < kPadding; ++i) { |
| 255 masm_->int3(); | 310 masm_->int3(); |
| 256 } | 311 } |
| 257 // Check that the size of the code used for returning matches what is | 312 // Check that the size of the code used for returning is large enough |
| 258 // expected by the debugger. | 313 // for the debugger's requirements. |
| 259 ASSERT_EQ(Assembler::kJSReturnSequenceLength, | 314 ASSERT(Assembler::kJSReturnSequenceLength <= |
| 260 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); | 315 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); |
| 261 #endif | 316 #endif |
| 262 } | 317 } |
| 263 } | 318 } |
| 264 | 319 |
| 265 | 320 |
| 266 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand( | 321 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand( |
| 267 Token::Value op, Expression* left, Expression* right) { | 322 Token::Value op, Expression* left, Expression* right) { |
| 268 ASSERT(ShouldInlineSmiCase(op)); | 323 ASSERT(ShouldInlineSmiCase(op)); |
| 269 return kNoConstants; | 324 return kNoConstants; |
| 270 } | 325 } |
| (...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 652 __ Push(Smi::FromInt(0)); // no initial value! | 707 __ Push(Smi::FromInt(0)); // no initial value! |
| 653 } | 708 } |
| 654 __ CallRuntime(Runtime::kDeclareContextSlot, 4); | 709 __ CallRuntime(Runtime::kDeclareContextSlot, 4); |
| 655 break; | 710 break; |
| 656 } | 711 } |
| 657 } | 712 } |
| 658 | 713 |
| 659 } else if (prop != NULL) { | 714 } else if (prop != NULL) { |
| 660 if (function != NULL || mode == Variable::CONST) { | 715 if (function != NULL || mode == Variable::CONST) { |
| 661 // We are declaring a function or constant that rewrites to a | 716 // We are declaring a function or constant that rewrites to a |
| 662 // property. Use (keyed) IC to set the initial value. | 717 // property. Use (keyed) IC to set the initial value. We |
| 663 VisitForStackValue(prop->obj()); | 718 // cannot visit the rewrite because it's shared and we risk |
| 719 // recording duplicate AST IDs for bailouts from optimized code. |
| 720 ASSERT(prop->obj()->AsVariableProxy() != NULL); |
| 721 { AccumulatorValueContext for_object(this); |
| 722 EmitVariableLoad(prop->obj()->AsVariableProxy()->var()); |
| 723 } |
| 664 if (function != NULL) { | 724 if (function != NULL) { |
| 665 VisitForStackValue(prop->key()); | 725 __ push(rax); |
| 666 VisitForAccumulatorValue(function); | 726 VisitForAccumulatorValue(function); |
| 667 __ pop(rcx); | 727 __ pop(rdx); |
| 668 } else { | 728 } else { |
| 669 VisitForAccumulatorValue(prop->key()); | 729 __ movq(rdx, rax); |
| 670 __ movq(rcx, result_register()); | 730 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex); |
| 671 __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex); | |
| 672 } | 731 } |
| 673 __ pop(rdx); | 732 ASSERT(prop->key()->AsLiteral() != NULL && |
| 733 prop->key()->AsLiteral()->handle()->IsSmi()); |
| 734 __ Move(rcx, prop->key()->AsLiteral()->handle()); |
| 674 | 735 |
| 675 Handle<Code> ic(isolate()->builtins()->builtin( | 736 Handle<Code> ic(isolate()->builtins()->builtin( |
| 676 Builtins::KeyedStoreIC_Initialize)); | 737 Builtins::KeyedStoreIC_Initialize)); |
| 677 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 738 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
| 678 } | 739 } |
| 679 } | 740 } |
| 680 } | 741 } |
| 681 | 742 |
| 682 | 743 |
| 683 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { | 744 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 722 Comment cmnt(masm_, "[ Case comparison"); | 783 Comment cmnt(masm_, "[ Case comparison"); |
| 723 __ bind(&next_test); | 784 __ bind(&next_test); |
| 724 next_test.Unuse(); | 785 next_test.Unuse(); |
| 725 | 786 |
| 726 // Compile the label expression. | 787 // Compile the label expression. |
| 727 VisitForAccumulatorValue(clause->label()); | 788 VisitForAccumulatorValue(clause->label()); |
| 728 | 789 |
| 729 // Perform the comparison as if via '==='. | 790 // Perform the comparison as if via '==='. |
| 730 __ movq(rdx, Operand(rsp, 0)); // Switch value. | 791 __ movq(rdx, Operand(rsp, 0)); // Switch value. |
| 731 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); | 792 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
| 793 JumpPatchSite patch_site(masm_); |
| 732 if (inline_smi_code) { | 794 if (inline_smi_code) { |
| 733 Label slow_case; | 795 NearLabel slow_case; |
| 734 __ JumpIfNotBothSmi(rdx, rax, &slow_case); | 796 __ movq(rcx, rdx); |
| 735 __ SmiCompare(rdx, rax); | 797 __ or_(rcx, rax); |
| 798 patch_site.EmitJumpIfNotSmi(rcx, &slow_case); |
| 799 |
| 800 __ cmpq(rdx, rax); |
| 736 __ j(not_equal, &next_test); | 801 __ j(not_equal, &next_test); |
| 737 __ Drop(1); // Switch value is no longer needed. | 802 __ Drop(1); // Switch value is no longer needed. |
| 738 __ jmp(clause->body_target()->entry_label()); | 803 __ jmp(clause->body_target()->entry_label()); |
| 739 __ bind(&slow_case); | 804 __ bind(&slow_case); |
| 740 } | 805 } |
| 741 | 806 |
| 742 CompareFlags flags = inline_smi_code | 807 // Record position before stub call for type feedback. |
| 743 ? NO_SMI_COMPARE_IN_STUB | 808 SetSourcePosition(clause->position()); |
| 744 : NO_COMPARE_FLAGS; | 809 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); |
| 745 CompareStub stub(equal, true, flags); | 810 EmitCallIC(ic, &patch_site); |
| 746 __ CallStub(&stub); | 811 |
| 747 __ testq(rax, rax); | 812 __ testq(rax, rax); |
| 748 __ j(not_equal, &next_test); | 813 __ j(not_equal, &next_test); |
| 749 __ Drop(1); // Switch value is no longer needed. | 814 __ Drop(1); // Switch value is no longer needed. |
| 750 __ jmp(clause->body_target()->entry_label()); | 815 __ jmp(clause->body_target()->entry_label()); |
| 751 } | 816 } |
| 752 | 817 |
| 753 // Discard the test value and jump to the default if present, otherwise to | 818 // Discard the test value and jump to the default if present, otherwise to |
| 754 // the end of the statement. | 819 // the end of the statement. |
| 755 __ bind(&next_test); | 820 __ bind(&next_test); |
| 756 __ Drop(1); // Switch value is no longer needed. | 821 __ Drop(1); // Switch value is no longer needed. |
| (...skipping 769 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1526 Token::Value op, | 1591 Token::Value op, |
| 1527 OverwriteMode mode, | 1592 OverwriteMode mode, |
| 1528 Expression* left, | 1593 Expression* left, |
| 1529 Expression* right, | 1594 Expression* right, |
| 1530 ConstantOperand constant) { | 1595 ConstantOperand constant) { |
| 1531 ASSERT(constant == kNoConstants); // Only handled case. | 1596 ASSERT(constant == kNoConstants); // Only handled case. |
| 1532 | 1597 |
| 1533 // Do combined smi check of the operands. Left operand is on the | 1598 // Do combined smi check of the operands. Left operand is on the |
| 1534 // stack (popped into rdx). Right operand is in rax but moved into | 1599 // stack (popped into rdx). Right operand is in rax but moved into |
| 1535 // rcx to make the shifts easier. | 1600 // rcx to make the shifts easier. |
| 1536 Label done, stub_call, smi_case; | 1601 NearLabel done, stub_call, smi_case; |
| 1537 __ pop(rdx); | 1602 __ pop(rdx); |
| 1538 __ movq(rcx, rax); | 1603 __ movq(rcx, rax); |
| 1539 Condition smi = masm()->CheckBothSmi(rdx, rax); | 1604 __ or_(rax, rdx); |
| 1540 __ j(smi, &smi_case); | 1605 JumpPatchSite patch_site(masm_); |
| 1606 patch_site.EmitJumpIfSmi(rax, &smi_case); |
| 1541 | 1607 |
| 1542 __ bind(&stub_call); | 1608 __ bind(&stub_call); |
| 1543 GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown()); | 1609 __ movq(rax, rcx); |
| 1544 if (stub.ArgsInRegistersSupported()) { | 1610 TypeRecordingBinaryOpStub stub(op, mode); |
| 1545 stub.GenerateCall(masm_, rdx, rcx); | 1611 EmitCallIC(stub.GetCode(), &patch_site); |
| 1546 } else { | |
| 1547 __ push(rdx); | |
| 1548 __ push(rcx); | |
| 1549 __ CallStub(&stub); | |
| 1550 } | |
| 1551 __ jmp(&done); | 1612 __ jmp(&done); |
| 1552 | 1613 |
| 1553 __ bind(&smi_case); | 1614 __ bind(&smi_case); |
| 1554 switch (op) { | 1615 switch (op) { |
| 1555 case Token::SAR: | 1616 case Token::SAR: |
| 1556 __ SmiShiftArithmeticRight(rax, rdx, rcx); | 1617 __ SmiShiftArithmeticRight(rax, rdx, rcx); |
| 1557 break; | 1618 break; |
| 1558 case Token::SHL: | 1619 case Token::SHL: |
| 1559 __ SmiShiftLeft(rax, rdx, rcx); | 1620 __ SmiShiftLeft(rax, rdx, rcx); |
| 1560 break; | 1621 break; |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1584 break; | 1645 break; |
| 1585 } | 1646 } |
| 1586 | 1647 |
| 1587 __ bind(&done); | 1648 __ bind(&done); |
| 1588 context()->Plug(rax); | 1649 context()->Plug(rax); |
| 1589 } | 1650 } |
| 1590 | 1651 |
| 1591 | 1652 |
| 1592 void FullCodeGenerator::EmitBinaryOp(Token::Value op, | 1653 void FullCodeGenerator::EmitBinaryOp(Token::Value op, |
| 1593 OverwriteMode mode) { | 1654 OverwriteMode mode) { |
| 1594 GenericBinaryOpStub stub(op, mode, NO_GENERIC_BINARY_FLAGS); | 1655 TypeRecordingBinaryOpStub stub(op, mode); |
| 1595 if (stub.ArgsInRegistersSupported()) { | 1656 __ pop(rdx); |
| 1596 __ pop(rdx); | 1657 __ CallStub(&stub); |
| 1597 stub.GenerateCall(masm_, rdx, rax); | |
| 1598 } else { | |
| 1599 __ push(result_register()); | |
| 1600 __ CallStub(&stub); | |
| 1601 } | |
| 1602 context()->Plug(rax); | 1658 context()->Plug(rax); |
| 1603 } | 1659 } |
| 1604 | 1660 |
| 1605 | 1661 |
| 1606 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) { | 1662 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) { |
| 1607 // Invalid left-hand sides are rewritten to have a 'throw | 1663 // Invalid left-hand sides are rewritten to have a 'throw |
| 1608 // ReferenceError' on the left-hand side. | 1664 // ReferenceError' on the left-hand side. |
| 1609 if (!expr->IsValidLeftHandSide()) { | 1665 if (!expr->IsValidLeftHandSide()) { |
| 1610 VisitForEffect(expr); | 1666 VisitForEffect(expr); |
| 1611 return; | 1667 return; |
| (...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1945 | 2001 |
| 1946 // Push copy of the first argument or undefined if it doesn't exist. | 2002 // Push copy of the first argument or undefined if it doesn't exist. |
| 1947 if (arg_count > 0) { | 2003 if (arg_count > 0) { |
| 1948 __ push(Operand(rsp, arg_count * kPointerSize)); | 2004 __ push(Operand(rsp, arg_count * kPointerSize)); |
| 1949 } else { | 2005 } else { |
| 1950 __ PushRoot(Heap::kUndefinedValueRootIndex); | 2006 __ PushRoot(Heap::kUndefinedValueRootIndex); |
| 1951 } | 2007 } |
| 1952 | 2008 |
| 1953 // Push the receiver of the enclosing function and do runtime call. | 2009 // Push the receiver of the enclosing function and do runtime call. |
| 1954 __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize)); | 2010 __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize)); |
| 1955 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3); | 2011 // Push the strict mode flag. |
| 2012 __ Push(Smi::FromInt(strict_mode_flag())); |
| 2013 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 4); |
| 1956 | 2014 |
| 1957 // The runtime call returns a pair of values in rax (function) and | 2015 // The runtime call returns a pair of values in rax (function) and |
| 1958 // rdx (receiver). Touch up the stack with the right values. | 2016 // rdx (receiver). Touch up the stack with the right values. |
| 1959 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); | 2017 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); |
| 1960 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax); | 2018 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax); |
| 1961 } | 2019 } |
| 1962 // Record source position for debugger. | 2020 // Record source position for debugger. |
| 1963 SetSourcePosition(expr->position()); | 2021 SetSourcePosition(expr->position()); |
| 1964 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; | 2022 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
| 1965 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); | 2023 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); |
| (...skipping 1111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3077 context()->Plug(rax); | 3135 context()->Plug(rax); |
| 3078 break; | 3136 break; |
| 3079 } | 3137 } |
| 3080 | 3138 |
| 3081 case Token::ADD: { | 3139 case Token::ADD: { |
| 3082 Comment cmt(masm_, "[ UnaryOperation (ADD)"); | 3140 Comment cmt(masm_, "[ UnaryOperation (ADD)"); |
| 3083 VisitForAccumulatorValue(expr->expression()); | 3141 VisitForAccumulatorValue(expr->expression()); |
| 3084 Label no_conversion; | 3142 Label no_conversion; |
| 3085 Condition is_smi = masm_->CheckSmi(result_register()); | 3143 Condition is_smi = masm_->CheckSmi(result_register()); |
| 3086 __ j(is_smi, &no_conversion); | 3144 __ j(is_smi, &no_conversion); |
| 3087 __ push(result_register()); | 3145 ToNumberStub convert_stub; |
| 3088 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); | 3146 __ CallStub(&convert_stub); |
| 3089 __ bind(&no_conversion); | 3147 __ bind(&no_conversion); |
| 3090 context()->Plug(result_register()); | 3148 context()->Plug(result_register()); |
| 3091 break; | 3149 break; |
| 3092 } | 3150 } |
| 3093 | 3151 |
| 3094 case Token::SUB: { | 3152 case Token::SUB: { |
| 3095 Comment cmt(masm_, "[ UnaryOperation (SUB)"); | 3153 Comment cmt(masm_, "[ UnaryOperation (SUB)"); |
| 3096 bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); | 3154 bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); |
| 3097 UnaryOverwriteMode overwrite = | 3155 UnaryOverwriteMode overwrite = |
| 3098 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; | 3156 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; |
| (...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3194 | 3252 |
| 3195 // We need a second deoptimization point after loading the value | 3253 // We need a second deoptimization point after loading the value |
| 3196 // in case evaluating the property load my have a side effect. | 3254 // in case evaluating the property load my have a side effect. |
| 3197 PrepareForBailout(expr->increment(), TOS_REG); | 3255 PrepareForBailout(expr->increment(), TOS_REG); |
| 3198 | 3256 |
| 3199 // Call ToNumber only if operand is not a smi. | 3257 // Call ToNumber only if operand is not a smi. |
| 3200 NearLabel no_conversion; | 3258 NearLabel no_conversion; |
| 3201 Condition is_smi; | 3259 Condition is_smi; |
| 3202 is_smi = masm_->CheckSmi(rax); | 3260 is_smi = masm_->CheckSmi(rax); |
| 3203 __ j(is_smi, &no_conversion); | 3261 __ j(is_smi, &no_conversion); |
| 3204 __ push(rax); | 3262 ToNumberStub convert_stub; |
| 3205 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); | 3263 __ CallStub(&convert_stub); |
| 3206 __ bind(&no_conversion); | 3264 __ bind(&no_conversion); |
| 3207 | 3265 |
| 3208 // Save result for postfix expressions. | 3266 // Save result for postfix expressions. |
| 3209 if (expr->is_postfix()) { | 3267 if (expr->is_postfix()) { |
| 3210 if (!context()->IsEffect()) { | 3268 if (!context()->IsEffect()) { |
| 3211 // Save the result on the stack. If we have a named or keyed property | 3269 // Save the result on the stack. If we have a named or keyed property |
| 3212 // we store the result under the receiver that is currently on top | 3270 // we store the result under the receiver that is currently on top |
| 3213 // of the stack. | 3271 // of the stack. |
| 3214 switch (assign_type) { | 3272 switch (assign_type) { |
| 3215 case VARIABLE: | 3273 case VARIABLE: |
| 3216 __ push(rax); | 3274 __ push(rax); |
| 3217 break; | 3275 break; |
| 3218 case NAMED_PROPERTY: | 3276 case NAMED_PROPERTY: |
| 3219 __ movq(Operand(rsp, kPointerSize), rax); | 3277 __ movq(Operand(rsp, kPointerSize), rax); |
| 3220 break; | 3278 break; |
| 3221 case KEYED_PROPERTY: | 3279 case KEYED_PROPERTY: |
| 3222 __ movq(Operand(rsp, 2 * kPointerSize), rax); | 3280 __ movq(Operand(rsp, 2 * kPointerSize), rax); |
| 3223 break; | 3281 break; |
| 3224 } | 3282 } |
| 3225 } | 3283 } |
| 3226 } | 3284 } |
| 3227 | 3285 |
| 3228 // Inline smi case if we are in a loop. | 3286 // Inline smi case if we are in a loop. |
| 3229 Label stub_call, done; | 3287 NearLabel stub_call, done; |
| 3288 JumpPatchSite patch_site(masm_); |
| 3289 |
| 3230 if (ShouldInlineSmiCase(expr->op())) { | 3290 if (ShouldInlineSmiCase(expr->op())) { |
| 3231 if (expr->op() == Token::INC) { | 3291 if (expr->op() == Token::INC) { |
| 3232 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); | 3292 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
| 3233 } else { | 3293 } else { |
| 3234 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); | 3294 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); |
| 3235 } | 3295 } |
| 3236 __ j(overflow, &stub_call); | 3296 __ j(overflow, &stub_call); |
| 3237 // We could eliminate this smi check if we split the code at | 3297 // We could eliminate this smi check if we split the code at |
| 3238 // the first smi check before calling ToNumber. | 3298 // the first smi check before calling ToNumber. |
| 3239 is_smi = masm_->CheckSmi(rax); | 3299 patch_site.EmitJumpIfSmi(rax, &done); |
| 3240 __ j(is_smi, &done); | 3300 |
| 3241 __ bind(&stub_call); | 3301 __ bind(&stub_call); |
| 3242 // Call stub. Undo operation first. | 3302 // Call stub. Undo operation first. |
| 3243 if (expr->op() == Token::INC) { | 3303 if (expr->op() == Token::INC) { |
| 3244 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); | 3304 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); |
| 3245 } else { | 3305 } else { |
| 3246 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); | 3306 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
| 3247 } | 3307 } |
| 3248 } | 3308 } |
| 3249 | 3309 |
| 3250 // Record position before stub call. | 3310 // Record position before stub call. |
| 3251 SetSourcePosition(expr->position()); | 3311 SetSourcePosition(expr->position()); |
| 3252 | 3312 |
| 3253 // Call stub for +1/-1. | 3313 // Call stub for +1/-1. |
| 3254 GenericBinaryOpStub stub(expr->binary_op(), | 3314 TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE); |
| 3255 NO_OVERWRITE, | 3315 if (expr->op() == Token::INC) { |
| 3256 NO_GENERIC_BINARY_FLAGS); | 3316 __ Move(rdx, Smi::FromInt(1)); |
| 3257 stub.GenerateCall(masm_, rax, Smi::FromInt(1)); | 3317 } else { |
| 3318 __ movq(rdx, rax); |
| 3319 __ Move(rax, Smi::FromInt(1)); |
| 3320 } |
| 3321 EmitCallIC(stub.GetCode(), &patch_site); |
| 3258 __ bind(&done); | 3322 __ bind(&done); |
| 3259 | 3323 |
| 3260 // Store the value returned in rax. | 3324 // Store the value returned in rax. |
| 3261 switch (assign_type) { | 3325 switch (assign_type) { |
| 3262 case VARIABLE: | 3326 case VARIABLE: |
| 3263 if (expr->is_postfix()) { | 3327 if (expr->is_postfix()) { |
| 3264 // Perform the assignment as if via '='. | 3328 // Perform the assignment as if via '='. |
| 3265 { EffectContext context(this); | 3329 { EffectContext context(this); |
| 3266 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | 3330 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
| 3267 Token::ASSIGN); | 3331 Token::ASSIGN); |
| (...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3520 cc = greater_equal; | 3584 cc = greater_equal; |
| 3521 __ pop(rdx); | 3585 __ pop(rdx); |
| 3522 break; | 3586 break; |
| 3523 case Token::IN: | 3587 case Token::IN: |
| 3524 case Token::INSTANCEOF: | 3588 case Token::INSTANCEOF: |
| 3525 default: | 3589 default: |
| 3526 UNREACHABLE(); | 3590 UNREACHABLE(); |
| 3527 } | 3591 } |
| 3528 | 3592 |
| 3529 bool inline_smi_code = ShouldInlineSmiCase(op); | 3593 bool inline_smi_code = ShouldInlineSmiCase(op); |
| 3594 JumpPatchSite patch_site(masm_); |
| 3530 if (inline_smi_code) { | 3595 if (inline_smi_code) { |
| 3531 Label slow_case; | 3596 NearLabel slow_case; |
| 3532 __ JumpIfNotBothSmi(rax, rdx, &slow_case); | 3597 __ movq(rcx, rdx); |
| 3533 __ SmiCompare(rdx, rax); | 3598 __ or_(rcx, rax); |
| 3599 patch_site.EmitJumpIfNotSmi(rcx, &slow_case); |
| 3600 __ cmpq(rdx, rax); |
| 3534 Split(cc, if_true, if_false, NULL); | 3601 Split(cc, if_true, if_false, NULL); |
| 3535 __ bind(&slow_case); | 3602 __ bind(&slow_case); |
| 3536 } | 3603 } |
| 3537 | 3604 |
| 3538 CompareFlags flags = inline_smi_code | 3605 // Record position and call the compare IC. |
| 3539 ? NO_SMI_COMPARE_IN_STUB | 3606 SetSourcePosition(expr->position()); |
| 3540 : NO_COMPARE_FLAGS; | 3607 Handle<Code> ic = CompareIC::GetUninitialized(op); |
| 3541 CompareStub stub(cc, strict, flags); | 3608 EmitCallIC(ic, &patch_site); |
| 3542 __ CallStub(&stub); | |
| 3543 | 3609 |
| 3544 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); | 3610 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 3545 __ testq(rax, rax); | 3611 __ testq(rax, rax); |
| 3546 Split(cc, if_true, if_false, fall_through); | 3612 Split(cc, if_true, if_false, fall_through); |
| 3547 } | 3613 } |
| 3548 } | 3614 } |
| 3549 | 3615 |
| 3550 // Convert the result of the comparison into one expected for this | 3616 // Convert the result of the comparison into one expected for this |
| 3551 // expression's context. | 3617 // expression's context. |
| 3552 context()->Plug(if_true, if_false); | 3618 context()->Plug(if_true, if_false); |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3595 | 3661 |
| 3596 | 3662 |
| 3597 Register FullCodeGenerator::context_register() { | 3663 Register FullCodeGenerator::context_register() { |
| 3598 return rsi; | 3664 return rsi; |
| 3599 } | 3665 } |
| 3600 | 3666 |
| 3601 | 3667 |
| 3602 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) { | 3668 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) { |
| 3603 ASSERT(mode == RelocInfo::CODE_TARGET || | 3669 ASSERT(mode == RelocInfo::CODE_TARGET || |
| 3604 mode == RelocInfo::CODE_TARGET_CONTEXT); | 3670 mode == RelocInfo::CODE_TARGET_CONTEXT); |
| 3671 switch (ic->kind()) { |
| 3672 case Code::LOAD_IC: |
| 3673 __ IncrementCounter(COUNTERS->named_load_full(), 1); |
| 3674 break; |
| 3675 case Code::KEYED_LOAD_IC: |
| 3676 __ IncrementCounter(COUNTERS->keyed_load_full(), 1); |
| 3677 break; |
| 3678 case Code::STORE_IC: |
| 3679 __ IncrementCounter(COUNTERS->named_store_full(), 1); |
| 3680 break; |
| 3681 case Code::KEYED_STORE_IC: |
| 3682 __ IncrementCounter(COUNTERS->keyed_store_full(), 1); |
| 3683 default: |
| 3684 break; |
| 3685 } |
| 3686 |
| 3605 __ call(ic, mode); | 3687 __ call(ic, mode); |
| 3606 | 3688 |
| 3607 // Crankshaft doesn't need patching of inlined loads and stores. | 3689 // Crankshaft doesn't need patching of inlined loads and stores. |
| 3608 if (V8::UseCrankshaft()) return; | 3690 // When compiling the snapshot we need to produce code that works |
| 3691 // with and without Crankshaft. |
| 3692 if (V8::UseCrankshaft() && !Serializer::enabled()) { |
| 3693 return; |
| 3694 } |
| 3609 | 3695 |
| 3610 // If we're calling a (keyed) load or store stub, we have to mark | 3696 // If we're calling a (keyed) load or store stub, we have to mark |
| 3611 // the call as containing no inlined code so we will not attempt to | 3697 // the call as containing no inlined code so we will not attempt to |
| 3612 // patch it. | 3698 // patch it. |
| 3613 switch (ic->kind()) { | 3699 switch (ic->kind()) { |
| 3614 case Code::LOAD_IC: | 3700 case Code::LOAD_IC: |
| 3615 case Code::KEYED_LOAD_IC: | 3701 case Code::KEYED_LOAD_IC: |
| 3616 case Code::STORE_IC: | 3702 case Code::STORE_IC: |
| 3617 case Code::KEYED_STORE_IC: | 3703 case Code::KEYED_STORE_IC: |
| 3618 __ nop(); // Signals no inlined code. | 3704 __ nop(); // Signals no inlined code. |
| 3619 break; | 3705 break; |
| 3620 default: | 3706 default: |
| 3621 // Do nothing. | 3707 // Do nothing. |
| 3622 break; | 3708 break; |
| 3623 } | 3709 } |
| 3624 } | 3710 } |
| 3625 | 3711 |
| 3626 | 3712 |
| 3713 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) { |
| 3714 __ call(ic, RelocInfo::CODE_TARGET); |
| 3715 if (patch_site != NULL && patch_site->is_bound()) { |
| 3716 patch_site->EmitPatchInfo(); |
| 3717 } else { |
| 3718 __ nop(); // Signals no inlined code. |
| 3719 } |
| 3720 } |
| 3721 |
| 3722 |
| 3627 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | 3723 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
| 3628 ASSERT(IsAligned(frame_offset, kPointerSize)); | 3724 ASSERT(IsAligned(frame_offset, kPointerSize)); |
| 3629 __ movq(Operand(rbp, frame_offset), value); | 3725 __ movq(Operand(rbp, frame_offset), value); |
| 3630 } | 3726 } |
| 3631 | 3727 |
| 3632 | 3728 |
| 3633 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | 3729 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { |
| 3634 __ movq(dst, ContextOperand(rsi, context_index)); | 3730 __ movq(dst, ContextOperand(rsi, context_index)); |
| 3635 } | 3731 } |
| 3636 | 3732 |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3668 __ ret(0); | 3764 __ ret(0); |
| 3669 } | 3765 } |
| 3670 | 3766 |
| 3671 | 3767 |
| 3672 #undef __ | 3768 #undef __ |
| 3673 | 3769 |
| 3674 | 3770 |
| 3675 } } // namespace v8::internal | 3771 } } // namespace v8::internal |
| 3676 | 3772 |
| 3677 #endif // V8_TARGET_ARCH_X64 | 3773 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |