| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| (...skipping 24 matching lines...) Expand all Loading... |
| 36 #include "full-codegen.h" | 36 #include "full-codegen.h" |
| 37 #include "parser.h" | 37 #include "parser.h" |
| 38 #include "scopes.h" | 38 #include "scopes.h" |
| 39 #include "stub-cache.h" | 39 #include "stub-cache.h" |
| 40 | 40 |
| 41 namespace v8 { | 41 namespace v8 { |
| 42 namespace internal { | 42 namespace internal { |
| 43 | 43 |
| 44 #define __ ACCESS_MASM(masm_) | 44 #define __ ACCESS_MASM(masm_) |
| 45 | 45 |
| 46 |
| 47 class JumpPatchSite BASE_EMBEDDED { |
| 48 public: |
| 49 explicit JumpPatchSite(MacroAssembler* masm) |
| 50 : masm_(masm) { |
| 51 #ifdef DEBUG |
| 52 info_emitted_ = false; |
| 53 #endif |
| 54 } |
| 55 |
| 56 ~JumpPatchSite() { |
| 57 ASSERT(patch_site_.is_bound() == info_emitted_); |
| 58 } |
| 59 |
| 60 void EmitJumpIfNotSmi(Register reg, NearLabel* target) { |
| 61 __ testb(reg, Immediate(kSmiTagMask)); |
| 62 EmitJump(not_carry, target); // Always taken before patched. |
| 63 } |
| 64 |
| 65 void EmitJumpIfSmi(Register reg, NearLabel* target) { |
| 66 __ testb(reg, Immediate(kSmiTagMask)); |
| 67 EmitJump(carry, target); // Never taken before patched. |
| 68 } |
| 69 |
| 70 void EmitPatchInfo() { |
| 71 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); |
| 72 ASSERT(is_int8(delta_to_patch_site)); |
| 73 __ testl(rax, Immediate(delta_to_patch_site)); |
| 74 #ifdef DEBUG |
| 75 info_emitted_ = true; |
| 76 #endif |
| 77 } |
| 78 |
| 79 bool is_bound() const { return patch_site_.is_bound(); } |
| 80 |
| 81 private: |
| 82 // jc will be patched with jz, jnc will become jnz. |
| 83 void EmitJump(Condition cc, NearLabel* target) { |
| 84 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 85 ASSERT(cc == carry || cc == not_carry); |
| 86 __ bind(&patch_site_); |
| 87 __ j(cc, target); |
| 88 } |
| 89 |
| 90 MacroAssembler* masm_; |
| 91 Label patch_site_; |
| 92 #ifdef DEBUG |
| 93 bool info_emitted_; |
| 94 #endif |
| 95 }; |
| 96 |
| 97 |
| 46 // Generate code for a JS function. On entry to the function the receiver | 98 // Generate code for a JS function. On entry to the function the receiver |
| 47 // and arguments have been pushed on the stack left to right, with the | 99 // and arguments have been pushed on the stack left to right, with the |
| 48 // return address on top of them. The actual argument count matches the | 100 // return address on top of them. The actual argument count matches the |
| 49 // formal parameter count expected by the function. | 101 // formal parameter count expected by the function. |
| 50 // | 102 // |
| 51 // The live registers are: | 103 // The live registers are: |
| 52 // o rdi: the JS function object being called (ie, ourselves) | 104 // o rdi: the JS function object being called (ie, ourselves) |
| 53 // o rsi: our context | 105 // o rsi: our context |
| 54 // o rbp: our caller's frame pointer | 106 // o rbp: our caller's frame pointer |
| 55 // o rsp: stack pointer (pointing to return address) | 107 // o rsp: stack pointer (pointing to return address) |
| (...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 203 } | 255 } |
| 204 | 256 |
| 205 | 257 |
| 206 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { | 258 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { |
| 207 Comment cmnt(masm_, "[ Stack check"); | 259 Comment cmnt(masm_, "[ Stack check"); |
| 208 NearLabel ok; | 260 NearLabel ok; |
| 209 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 261 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 210 __ j(above_equal, &ok); | 262 __ j(above_equal, &ok); |
| 211 StackCheckStub stub; | 263 StackCheckStub stub; |
| 212 __ CallStub(&stub); | 264 __ CallStub(&stub); |
| 265 // Record a mapping of this PC offset to the OSR id. This is used to find |
| 266 // the AST id from the unoptimized code in order to use it as a key into |
| 267 // the deoptimization input data found in the optimized code. |
| 268 RecordStackCheck(stmt->OsrEntryId()); |
| 269 |
| 213 __ bind(&ok); | 270 __ bind(&ok); |
| 214 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | 271 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
| 272 // Record a mapping of the OSR id to this PC. This is used if the OSR |
| 273 // entry becomes the target of a bailout. We don't expect it to be, but |
| 274 // we want it to work if it is. |
| 215 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); | 275 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); |
| 216 RecordStackCheck(stmt->OsrEntryId()); | |
| 217 } | 276 } |
| 218 | 277 |
| 219 | 278 |
| 220 void FullCodeGenerator::EmitReturnSequence() { | 279 void FullCodeGenerator::EmitReturnSequence() { |
| 221 Comment cmnt(masm_, "[ Return sequence"); | 280 Comment cmnt(masm_, "[ Return sequence"); |
| 222 if (return_label_.is_bound()) { | 281 if (return_label_.is_bound()) { |
| 223 __ jmp(&return_label_); | 282 __ jmp(&return_label_); |
| 224 } else { | 283 } else { |
| 225 __ bind(&return_label_); | 284 __ bind(&return_label_); |
| 226 if (FLAG_trace) { | 285 if (FLAG_trace) { |
| 227 __ push(rax); | 286 __ push(rax); |
| 228 __ CallRuntime(Runtime::kTraceExit, 1); | 287 __ CallRuntime(Runtime::kTraceExit, 1); |
| 229 } | 288 } |
| 230 #ifdef DEBUG | 289 #ifdef DEBUG |
| 231 // Add a label for checking the size of the code used for returning. | 290 // Add a label for checking the size of the code used for returning. |
| 232 Label check_exit_codesize; | 291 Label check_exit_codesize; |
| 233 masm_->bind(&check_exit_codesize); | 292 masm_->bind(&check_exit_codesize); |
| 234 #endif | 293 #endif |
| 235 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); | 294 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); |
| 236 __ RecordJSReturn(); | 295 __ RecordJSReturn(); |
| 237 // Do not use the leave instruction here because it is too short to | 296 // Do not use the leave instruction here because it is too short to |
| 238 // patch with the code required by the debugger. | 297 // patch with the code required by the debugger. |
| 239 __ movq(rsp, rbp); | 298 __ movq(rsp, rbp); |
| 240 __ pop(rbp); | 299 __ pop(rbp); |
| 241 __ ret((scope()->num_parameters() + 1) * kPointerSize); | 300 |
| 301 int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize; |
| 302 __ Ret(arguments_bytes, rcx); |
| 303 |
| 242 #ifdef ENABLE_DEBUGGER_SUPPORT | 304 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 243 // Add padding that will be overwritten by a debugger breakpoint. We | 305 // Add padding that will be overwritten by a debugger breakpoint. We |
| 244 // have just generated "movq rsp, rbp; pop rbp; ret k" with length 7 | 306 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k" |
| 245 // (3 + 1 + 3). | 307 // (3 + 1 + 3). |
| 246 const int kPadding = Assembler::kJSReturnSequenceLength - 7; | 308 const int kPadding = Assembler::kJSReturnSequenceLength - 7; |
| 247 for (int i = 0; i < kPadding; ++i) { | 309 for (int i = 0; i < kPadding; ++i) { |
| 248 masm_->int3(); | 310 masm_->int3(); |
| 249 } | 311 } |
| 250 // Check that the size of the code used for returning matches what is | 312 // Check that the size of the code used for returning is large enough |
| 251 // expected by the debugger. | 313 // for the debugger's requirements. |
| 252 ASSERT_EQ(Assembler::kJSReturnSequenceLength, | 314 ASSERT(Assembler::kJSReturnSequenceLength <= |
| 253 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); | 315 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); |
| 254 #endif | 316 #endif |
| 255 } | 317 } |
| 256 } | 318 } |
| 257 | 319 |
| 258 | 320 |
| 259 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand( | 321 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand( |
| 260 Token::Value op, Expression* left, Expression* right) { | 322 Token::Value op, Expression* left, Expression* right) { |
| 261 ASSERT(ShouldInlineSmiCase(op)); | 323 ASSERT(ShouldInlineSmiCase(op)); |
| 262 return kNoConstants; | 324 return kNoConstants; |
| 263 } | 325 } |
| (...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 452 | 514 |
| 453 | 515 |
| 454 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { | 516 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { |
| 455 Heap::RootListIndex value_root_index = | 517 Heap::RootListIndex value_root_index = |
| 456 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; | 518 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; |
| 457 __ PushRoot(value_root_index); | 519 __ PushRoot(value_root_index); |
| 458 } | 520 } |
| 459 | 521 |
| 460 | 522 |
| 461 void FullCodeGenerator::TestContext::Plug(bool flag) const { | 523 void FullCodeGenerator::TestContext::Plug(bool flag) const { |
| 462 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); | 524 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, |
| 525 true, |
| 526 true_label_, |
| 527 false_label_); |
| 463 if (flag) { | 528 if (flag) { |
| 464 if (true_label_ != fall_through_) __ jmp(true_label_); | 529 if (true_label_ != fall_through_) __ jmp(true_label_); |
| 465 } else { | 530 } else { |
| 466 if (false_label_ != fall_through_) __ jmp(false_label_); | 531 if (false_label_ != fall_through_) __ jmp(false_label_); |
| 467 } | 532 } |
| 468 } | 533 } |
| 469 | 534 |
| 470 | 535 |
| 471 void FullCodeGenerator::DoTest(Label* if_true, | 536 void FullCodeGenerator::DoTest(Label* if_true, |
| 472 Label* if_false, | 537 Label* if_false, |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 548 int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize; | 613 int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize; |
| 549 __ RecordWrite(scratch1, offset, src, scratch2, kDontSaveFPRegs); | 614 __ RecordWrite(scratch1, offset, src, scratch2, kDontSaveFPRegs); |
| 550 } | 615 } |
| 551 } | 616 } |
| 552 | 617 |
| 553 | 618 |
| 554 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, | 619 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, |
| 555 bool should_normalize, | 620 bool should_normalize, |
| 556 Label* if_true, | 621 Label* if_true, |
| 557 Label* if_false) { | 622 Label* if_false) { |
| 623 // Only prepare for bailouts before splits if we're in a test |
| 624 // context. Otherwise, we let the Visit function deal with the |
| 625 // preparation to avoid preparing with the same AST id twice. |
| 626 if (!context()->IsTest() || !info_->IsOptimizable()) return; |
| 627 |
| 628 NearLabel skip; |
| 629 if (should_normalize) __ jmp(&skip); |
| 630 |
| 631 ForwardBailoutStack* current = forward_bailout_stack_; |
| 632 while (current != NULL) { |
| 633 PrepareForBailout(current->expr(), state); |
| 634 current = current->parent(); |
| 635 } |
| 636 |
| 637 if (should_normalize) { |
| 638 __ CompareRoot(rax, Heap::kTrueValueRootIndex); |
| 639 Split(equal, if_true, if_false, NULL); |
| 640 __ bind(&skip); |
| 641 } |
| 558 } | 642 } |
| 559 | 643 |
| 560 | 644 |
| 561 void FullCodeGenerator::EmitDeclaration(Variable* variable, | 645 void FullCodeGenerator::EmitDeclaration(Variable* variable, |
| 562 Variable::Mode mode, | 646 Variable::Mode mode, |
| 563 FunctionLiteral* function) { | 647 FunctionLiteral* function) { |
| 564 Comment cmnt(masm_, "[ Declaration"); | 648 Comment cmnt(masm_, "[ Declaration"); |
| 565 ASSERT(variable != NULL); // Must have been resolved. | 649 ASSERT(variable != NULL); // Must have been resolved. |
| 566 Slot* slot = variable->AsSlot(); | 650 Slot* slot = variable->AsSlot(); |
| 567 Property* prop = variable->AsProperty(); | 651 Property* prop = variable->AsProperty(); |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 623 __ Push(Smi::FromInt(0)); // no initial value! | 707 __ Push(Smi::FromInt(0)); // no initial value! |
| 624 } | 708 } |
| 625 __ CallRuntime(Runtime::kDeclareContextSlot, 4); | 709 __ CallRuntime(Runtime::kDeclareContextSlot, 4); |
| 626 break; | 710 break; |
| 627 } | 711 } |
| 628 } | 712 } |
| 629 | 713 |
| 630 } else if (prop != NULL) { | 714 } else if (prop != NULL) { |
| 631 if (function != NULL || mode == Variable::CONST) { | 715 if (function != NULL || mode == Variable::CONST) { |
| 632 // We are declaring a function or constant that rewrites to a | 716 // We are declaring a function or constant that rewrites to a |
| 633 // property. Use (keyed) IC to set the initial value. | 717 // property. Use (keyed) IC to set the initial value. We |
| 634 VisitForStackValue(prop->obj()); | 718 // cannot visit the rewrite because it's shared and we risk |
| 719 // recording duplicate AST IDs for bailouts from optimized code. |
| 720 ASSERT(prop->obj()->AsVariableProxy() != NULL); |
| 721 { AccumulatorValueContext for_object(this); |
| 722 EmitVariableLoad(prop->obj()->AsVariableProxy()->var()); |
| 723 } |
| 635 if (function != NULL) { | 724 if (function != NULL) { |
| 636 VisitForStackValue(prop->key()); | 725 __ push(rax); |
| 637 VisitForAccumulatorValue(function); | 726 VisitForAccumulatorValue(function); |
| 638 __ pop(rcx); | 727 __ pop(rdx); |
| 639 } else { | 728 } else { |
| 640 VisitForAccumulatorValue(prop->key()); | 729 __ movq(rdx, rax); |
| 641 __ movq(rcx, result_register()); | 730 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex); |
| 642 __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex); | |
| 643 } | 731 } |
| 644 __ pop(rdx); | 732 ASSERT(prop->key()->AsLiteral() != NULL && |
| 733 prop->key()->AsLiteral()->handle()->IsSmi()); |
| 734 __ Move(rcx, prop->key()->AsLiteral()->handle()); |
| 645 | 735 |
| 646 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); | 736 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); |
| 647 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 737 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
| 648 } | 738 } |
| 649 } | 739 } |
| 650 } | 740 } |
| 651 | 741 |
| 652 | 742 |
| 653 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { | 743 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { |
| 654 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun()); | 744 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun()); |
| 655 } | 745 } |
| 656 | 746 |
| 657 | 747 |
| 658 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | 748 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
| 659 // Call the runtime to declare the globals. | 749 // Call the runtime to declare the globals. |
| 660 __ push(rsi); // The context is the first argument. | 750 __ push(rsi); // The context is the first argument. |
| 661 __ Push(pairs); | 751 __ Push(pairs); |
| 662 __ Push(Smi::FromInt(is_eval() ? 1 : 0)); | 752 __ Push(Smi::FromInt(is_eval() ? 1 : 0)); |
| 663 __ CallRuntime(Runtime::kDeclareGlobals, 3); | 753 __ CallRuntime(Runtime::kDeclareGlobals, 3); |
| 664 // Return value is ignored. | 754 // Return value is ignored. |
| 665 } | 755 } |
| 666 | 756 |
| 667 | 757 |
| 668 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { | 758 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { |
| 669 Comment cmnt(masm_, "[ SwitchStatement"); | 759 Comment cmnt(masm_, "[ SwitchStatement"); |
| 670 Breakable nested_statement(this, stmt); | 760 Breakable nested_statement(this, stmt); |
| 671 SetStatementPosition(stmt); | 761 SetStatementPosition(stmt); |
| 762 |
| 672 // Keep the switch value on the stack until a case matches. | 763 // Keep the switch value on the stack until a case matches. |
| 673 VisitForStackValue(stmt->tag()); | 764 VisitForStackValue(stmt->tag()); |
| 765 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
| 674 | 766 |
| 675 ZoneList<CaseClause*>* clauses = stmt->cases(); | 767 ZoneList<CaseClause*>* clauses = stmt->cases(); |
| 676 CaseClause* default_clause = NULL; // Can occur anywhere in the list. | 768 CaseClause* default_clause = NULL; // Can occur anywhere in the list. |
| 677 | 769 |
| 678 Label next_test; // Recycled for each test. | 770 Label next_test; // Recycled for each test. |
| 679 // Compile all the tests with branches to their bodies. | 771 // Compile all the tests with branches to their bodies. |
| 680 for (int i = 0; i < clauses->length(); i++) { | 772 for (int i = 0; i < clauses->length(); i++) { |
| 681 CaseClause* clause = clauses->at(i); | 773 CaseClause* clause = clauses->at(i); |
| 774 clause->body_target()->entry_label()->Unuse(); |
| 775 |
| 682 // The default is not a test, but remember it as final fall through. | 776 // The default is not a test, but remember it as final fall through. |
| 683 if (clause->is_default()) { | 777 if (clause->is_default()) { |
| 684 default_clause = clause; | 778 default_clause = clause; |
| 685 continue; | 779 continue; |
| 686 } | 780 } |
| 687 | 781 |
| 688 Comment cmnt(masm_, "[ Case comparison"); | 782 Comment cmnt(masm_, "[ Case comparison"); |
| 689 __ bind(&next_test); | 783 __ bind(&next_test); |
| 690 next_test.Unuse(); | 784 next_test.Unuse(); |
| 691 | 785 |
| 692 // Compile the label expression. | 786 // Compile the label expression. |
| 693 VisitForAccumulatorValue(clause->label()); | 787 VisitForAccumulatorValue(clause->label()); |
| 694 | 788 |
| 695 // Perform the comparison as if via '==='. | 789 // Perform the comparison as if via '==='. |
| 696 __ movq(rdx, Operand(rsp, 0)); // Switch value. | 790 __ movq(rdx, Operand(rsp, 0)); // Switch value. |
| 697 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); | 791 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
| 792 JumpPatchSite patch_site(masm_); |
| 698 if (inline_smi_code) { | 793 if (inline_smi_code) { |
| 699 Label slow_case; | 794 NearLabel slow_case; |
| 700 __ JumpIfNotBothSmi(rdx, rax, &slow_case); | 795 __ movq(rcx, rdx); |
| 701 __ SmiCompare(rdx, rax); | 796 __ or_(rcx, rax); |
| 797 patch_site.EmitJumpIfNotSmi(rcx, &slow_case); |
| 798 |
| 799 __ cmpq(rdx, rax); |
| 702 __ j(not_equal, &next_test); | 800 __ j(not_equal, &next_test); |
| 703 __ Drop(1); // Switch value is no longer needed. | 801 __ Drop(1); // Switch value is no longer needed. |
| 704 __ jmp(clause->body_target()->entry_label()); | 802 __ jmp(clause->body_target()->entry_label()); |
| 705 __ bind(&slow_case); | 803 __ bind(&slow_case); |
| 706 } | 804 } |
| 707 | 805 |
| 708 CompareFlags flags = inline_smi_code | 806 // Record position before stub call for type feedback. |
| 709 ? NO_SMI_COMPARE_IN_STUB | 807 SetSourcePosition(clause->position()); |
| 710 : NO_COMPARE_FLAGS; | 808 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); |
| 711 CompareStub stub(equal, true, flags); | 809 EmitCallIC(ic, &patch_site); |
| 712 __ CallStub(&stub); | 810 |
| 713 __ testq(rax, rax); | 811 __ testq(rax, rax); |
| 714 __ j(not_equal, &next_test); | 812 __ j(not_equal, &next_test); |
| 715 __ Drop(1); // Switch value is no longer needed. | 813 __ Drop(1); // Switch value is no longer needed. |
| 716 __ jmp(clause->body_target()->entry_label()); | 814 __ jmp(clause->body_target()->entry_label()); |
| 717 } | 815 } |
| 718 | 816 |
| 719 // Discard the test value and jump to the default if present, otherwise to | 817 // Discard the test value and jump to the default if present, otherwise to |
| 720 // the end of the statement. | 818 // the end of the statement. |
| 721 __ bind(&next_test); | 819 __ bind(&next_test); |
| 722 __ Drop(1); // Switch value is no longer needed. | 820 __ Drop(1); // Switch value is no longer needed. |
| 723 if (default_clause == NULL) { | 821 if (default_clause == NULL) { |
| 724 __ jmp(nested_statement.break_target()); | 822 __ jmp(nested_statement.break_target()); |
| 725 } else { | 823 } else { |
| 726 __ jmp(default_clause->body_target()->entry_label()); | 824 __ jmp(default_clause->body_target()->entry_label()); |
| 727 } | 825 } |
| 728 | 826 |
| 729 // Compile all the case bodies. | 827 // Compile all the case bodies. |
| 730 for (int i = 0; i < clauses->length(); i++) { | 828 for (int i = 0; i < clauses->length(); i++) { |
| 731 Comment cmnt(masm_, "[ Case body"); | 829 Comment cmnt(masm_, "[ Case body"); |
| 732 CaseClause* clause = clauses->at(i); | 830 CaseClause* clause = clauses->at(i); |
| 733 __ bind(clause->body_target()->entry_label()); | 831 __ bind(clause->body_target()->entry_label()); |
| 734 VisitStatements(clause->statements()); | 832 VisitStatements(clause->statements()); |
| 735 } | 833 } |
| 736 | 834 |
| 737 __ bind(nested_statement.break_target()); | 835 __ bind(nested_statement.break_target()); |
| 836 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); |
| 738 } | 837 } |
| 739 | 838 |
| 740 | 839 |
| 741 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { | 840 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { |
| 742 Comment cmnt(masm_, "[ ForInStatement"); | 841 Comment cmnt(masm_, "[ ForInStatement"); |
| 743 SetStatementPosition(stmt); | 842 SetStatementPosition(stmt); |
| 744 | 843 |
| 745 Label loop, exit; | 844 Label loop, exit; |
| 746 ForIn loop_statement(this, stmt); | 845 ForIn loop_statement(this, stmt); |
| 747 increment_loop_depth(); | 846 increment_loop_depth(); |
| (...skipping 469 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1217 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); | 1316 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); |
| 1218 // Fall through. | 1317 // Fall through. |
| 1219 case ObjectLiteral::Property::COMPUTED: | 1318 case ObjectLiteral::Property::COMPUTED: |
| 1220 if (key->handle()->IsSymbol()) { | 1319 if (key->handle()->IsSymbol()) { |
| 1221 VisitForAccumulatorValue(value); | 1320 VisitForAccumulatorValue(value); |
| 1222 __ Move(rcx, key->handle()); | 1321 __ Move(rcx, key->handle()); |
| 1223 __ movq(rdx, Operand(rsp, 0)); | 1322 __ movq(rdx, Operand(rsp, 0)); |
| 1224 if (property->emit_store()) { | 1323 if (property->emit_store()) { |
| 1225 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); | 1324 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); |
| 1226 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 1325 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
| 1326 PrepareForBailoutForId(key->id(), NO_REGISTERS); |
| 1227 } | 1327 } |
| 1228 break; | 1328 break; |
| 1229 } | 1329 } |
| 1230 // Fall through. | 1330 // Fall through. |
| 1231 case ObjectLiteral::Property::PROTOTYPE: | 1331 case ObjectLiteral::Property::PROTOTYPE: |
| 1232 __ push(Operand(rsp, 0)); // Duplicate receiver. | 1332 __ push(Operand(rsp, 0)); // Duplicate receiver. |
| 1233 VisitForStackValue(key); | 1333 VisitForStackValue(key); |
| 1234 VisitForStackValue(value); | 1334 VisitForStackValue(value); |
| 1235 if (property->emit_store()) { | 1335 if (property->emit_store()) { |
| 1236 __ CallRuntime(Runtime::kSetProperty, 3); | 1336 __ CallRuntime(Runtime::kSetProperty, 3); |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1304 VisitForAccumulatorValue(subexpr); | 1404 VisitForAccumulatorValue(subexpr); |
| 1305 | 1405 |
| 1306 // Store the subexpression value in the array's elements. | 1406 // Store the subexpression value in the array's elements. |
| 1307 __ movq(rbx, Operand(rsp, 0)); // Copy of array literal. | 1407 __ movq(rbx, Operand(rsp, 0)); // Copy of array literal. |
| 1308 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); | 1408 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); |
| 1309 int offset = FixedArray::kHeaderSize + (i * kPointerSize); | 1409 int offset = FixedArray::kHeaderSize + (i * kPointerSize); |
| 1310 __ movq(FieldOperand(rbx, offset), result_register()); | 1410 __ movq(FieldOperand(rbx, offset), result_register()); |
| 1311 | 1411 |
| 1312 // Update the write barrier for the array store. | 1412 // Update the write barrier for the array store. |
| 1313 __ RecordWrite(rbx, offset, result_register(), rcx, kDontSaveFPRegs); | 1413 __ RecordWrite(rbx, offset, result_register(), rcx, kDontSaveFPRegs); |
| 1414 |
| 1415 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); |
| 1314 } | 1416 } |
| 1315 | 1417 |
| 1316 if (result_saved) { | 1418 if (result_saved) { |
| 1317 context()->PlugTOS(); | 1419 context()->PlugTOS(); |
| 1318 } else { | 1420 } else { |
| 1319 context()->Plug(rax); | 1421 context()->Plug(rax); |
| 1320 } | 1422 } |
| 1321 } | 1423 } |
| 1322 | 1424 |
| 1323 | 1425 |
| (...skipping 24 matching lines...) Expand all Loading... |
| 1348 break; | 1450 break; |
| 1349 case NAMED_PROPERTY: | 1451 case NAMED_PROPERTY: |
| 1350 if (expr->is_compound()) { | 1452 if (expr->is_compound()) { |
| 1351 // We need the receiver both on the stack and in the accumulator. | 1453 // We need the receiver both on the stack and in the accumulator. |
| 1352 VisitForAccumulatorValue(property->obj()); | 1454 VisitForAccumulatorValue(property->obj()); |
| 1353 __ push(result_register()); | 1455 __ push(result_register()); |
| 1354 } else { | 1456 } else { |
| 1355 VisitForStackValue(property->obj()); | 1457 VisitForStackValue(property->obj()); |
| 1356 } | 1458 } |
| 1357 break; | 1459 break; |
| 1358 case KEYED_PROPERTY: | 1460 case KEYED_PROPERTY: { |
| 1359 if (expr->is_compound()) { | 1461 if (expr->is_compound()) { |
| 1360 VisitForStackValue(property->obj()); | 1462 if (property->is_arguments_access()) { |
| 1361 VisitForAccumulatorValue(property->key()); | 1463 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); |
| 1464 MemOperand slot_operand = |
| 1465 EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx); |
| 1466 __ push(slot_operand); |
| 1467 __ Move(rax, property->key()->AsLiteral()->handle()); |
| 1468 } else { |
| 1469 VisitForStackValue(property->obj()); |
| 1470 VisitForAccumulatorValue(property->key()); |
| 1471 } |
| 1362 __ movq(rdx, Operand(rsp, 0)); | 1472 __ movq(rdx, Operand(rsp, 0)); |
| 1363 __ push(rax); | 1473 __ push(rax); |
| 1364 } else { | 1474 } else { |
| 1365 VisitForStackValue(property->obj()); | 1475 if (property->is_arguments_access()) { |
| 1366 VisitForStackValue(property->key()); | 1476 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); |
| 1477 MemOperand slot_operand = |
| 1478 EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx); |
| 1479 __ push(slot_operand); |
| 1480 __ Push(property->key()->AsLiteral()->handle()); |
| 1481 } else { |
| 1482 VisitForStackValue(property->obj()); |
| 1483 VisitForStackValue(property->key()); |
| 1484 } |
| 1367 } | 1485 } |
| 1368 break; | 1486 break; |
| 1487 } |
| 1369 } | 1488 } |
| 1370 | 1489 |
| 1371 if (expr->is_compound()) { | 1490 if (expr->is_compound()) { |
| 1372 { AccumulatorValueContext context(this); | 1491 { AccumulatorValueContext context(this); |
| 1373 switch (assign_type) { | 1492 switch (assign_type) { |
| 1374 case VARIABLE: | 1493 case VARIABLE: |
| 1375 EmitVariableLoad(expr->target()->AsVariableProxy()->var()); | 1494 EmitVariableLoad(expr->target()->AsVariableProxy()->var()); |
| 1376 break; | 1495 break; |
| 1377 case NAMED_PROPERTY: | 1496 case NAMED_PROPERTY: |
| 1378 EmitNamedPropertyLoad(property); | 1497 EmitNamedPropertyLoad(property); |
| 1379 break; | 1498 break; |
| 1380 case KEYED_PROPERTY: | 1499 case KEYED_PROPERTY: |
| 1381 EmitKeyedPropertyLoad(property); | 1500 EmitKeyedPropertyLoad(property); |
| 1382 break; | 1501 break; |
| 1383 } | 1502 } |
| 1384 } | 1503 } |
| 1385 | 1504 |
| 1505 // For property compound assignments we need another deoptimization |
| 1506 // point after the property load. |
| 1507 if (property != NULL) { |
| 1508 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG); |
| 1509 } |
| 1510 |
| 1386 Token::Value op = expr->binary_op(); | 1511 Token::Value op = expr->binary_op(); |
| 1387 ConstantOperand constant = ShouldInlineSmiCase(op) | 1512 ConstantOperand constant = ShouldInlineSmiCase(op) |
| 1388 ? GetConstantOperand(op, expr->target(), expr->value()) | 1513 ? GetConstantOperand(op, expr->target(), expr->value()) |
| 1389 : kNoConstants; | 1514 : kNoConstants; |
| 1390 ASSERT(constant == kRightConstant || constant == kNoConstants); | 1515 ASSERT(constant == kRightConstant || constant == kNoConstants); |
| 1391 if (constant == kNoConstants) { | 1516 if (constant == kNoConstants) { |
| 1392 __ push(rax); // Left operand goes on the stack. | 1517 __ push(rax); // Left operand goes on the stack. |
| 1393 VisitForAccumulatorValue(expr->value()); | 1518 VisitForAccumulatorValue(expr->value()); |
| 1394 } | 1519 } |
| 1395 | 1520 |
| 1396 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() | 1521 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() |
| 1397 ? OVERWRITE_RIGHT | 1522 ? OVERWRITE_RIGHT |
| 1398 : NO_OVERWRITE; | 1523 : NO_OVERWRITE; |
| 1399 SetSourcePosition(expr->position() + 1); | 1524 SetSourcePosition(expr->position() + 1); |
| 1400 AccumulatorValueContext context(this); | 1525 AccumulatorValueContext context(this); |
| 1401 if (ShouldInlineSmiCase(op)) { | 1526 if (ShouldInlineSmiCase(op)) { |
| 1402 EmitInlineSmiBinaryOp(expr, | 1527 EmitInlineSmiBinaryOp(expr, |
| 1403 op, | 1528 op, |
| 1404 mode, | 1529 mode, |
| 1405 expr->target(), | 1530 expr->target(), |
| 1406 expr->value(), | 1531 expr->value(), |
| 1407 constant); | 1532 constant); |
| 1408 } else { | 1533 } else { |
| 1409 EmitBinaryOp(op, mode); | 1534 EmitBinaryOp(op, mode); |
| 1410 } | 1535 } |
| 1536 // Deoptimization point in case the binary operation may have side effects. |
| 1537 PrepareForBailout(expr->binary_operation(), TOS_REG); |
| 1411 } else { | 1538 } else { |
| 1412 VisitForAccumulatorValue(expr->value()); | 1539 VisitForAccumulatorValue(expr->value()); |
| 1413 } | 1540 } |
| 1414 | 1541 |
| 1415 // Record source position before possible IC call. | 1542 // Record source position before possible IC call. |
| 1416 SetSourcePosition(expr->position()); | 1543 SetSourcePosition(expr->position()); |
| 1417 | 1544 |
| 1418 // Store the value. | 1545 // Store the value. |
| 1419 switch (assign_type) { | 1546 switch (assign_type) { |
| 1420 case VARIABLE: | 1547 case VARIABLE: |
| 1421 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), | 1548 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), |
| 1422 expr->op()); | 1549 expr->op()); |
| 1550 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 1423 context()->Plug(rax); | 1551 context()->Plug(rax); |
| 1424 break; | 1552 break; |
| 1425 case NAMED_PROPERTY: | 1553 case NAMED_PROPERTY: |
| 1426 EmitNamedPropertyAssignment(expr); | 1554 EmitNamedPropertyAssignment(expr); |
| 1427 break; | 1555 break; |
| 1428 case KEYED_PROPERTY: | 1556 case KEYED_PROPERTY: |
| 1429 EmitKeyedPropertyAssignment(expr); | 1557 EmitKeyedPropertyAssignment(expr); |
| 1430 break; | 1558 break; |
| 1431 } | 1559 } |
| 1432 } | 1560 } |
| (...skipping 19 matching lines...) Expand all Loading... |
| 1452 Token::Value op, | 1580 Token::Value op, |
| 1453 OverwriteMode mode, | 1581 OverwriteMode mode, |
| 1454 Expression* left, | 1582 Expression* left, |
| 1455 Expression* right, | 1583 Expression* right, |
| 1456 ConstantOperand constant) { | 1584 ConstantOperand constant) { |
| 1457 ASSERT(constant == kNoConstants); // Only handled case. | 1585 ASSERT(constant == kNoConstants); // Only handled case. |
| 1458 | 1586 |
| 1459 // Do combined smi check of the operands. Left operand is on the | 1587 // Do combined smi check of the operands. Left operand is on the |
| 1460 // stack (popped into rdx). Right operand is in rax but moved into | 1588 // stack (popped into rdx). Right operand is in rax but moved into |
| 1461 // rcx to make the shifts easier. | 1589 // rcx to make the shifts easier. |
| 1462 Label done, stub_call, smi_case; | 1590 NearLabel done, stub_call, smi_case; |
| 1463 __ pop(rdx); | 1591 __ pop(rdx); |
| 1464 __ movq(rcx, rax); | 1592 __ movq(rcx, rax); |
| 1465 Condition smi = masm()->CheckBothSmi(rdx, rax); | 1593 __ or_(rax, rdx); |
| 1466 __ j(smi, &smi_case); | 1594 JumpPatchSite patch_site(masm_); |
| 1595 patch_site.EmitJumpIfSmi(rax, &smi_case); |
| 1467 | 1596 |
| 1468 __ bind(&stub_call); | 1597 __ bind(&stub_call); |
| 1469 GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown()); | 1598 __ movq(rax, rcx); |
| 1470 if (stub.ArgsInRegistersSupported()) { | 1599 TypeRecordingBinaryOpStub stub(op, mode); |
| 1471 stub.GenerateCall(masm_, rdx, rcx); | 1600 EmitCallIC(stub.GetCode(), &patch_site); |
| 1472 } else { | |
| 1473 __ push(rdx); | |
| 1474 __ push(rcx); | |
| 1475 __ CallStub(&stub); | |
| 1476 } | |
| 1477 __ jmp(&done); | 1601 __ jmp(&done); |
| 1478 | 1602 |
| 1479 __ bind(&smi_case); | 1603 __ bind(&smi_case); |
| 1480 switch (op) { | 1604 switch (op) { |
| 1481 case Token::SAR: | 1605 case Token::SAR: |
| 1482 __ SmiShiftArithmeticRight(rax, rdx, rcx); | 1606 __ SmiShiftArithmeticRight(rax, rdx, rcx); |
| 1483 break; | 1607 break; |
| 1484 case Token::SHL: | 1608 case Token::SHL: |
| 1485 __ SmiShiftLeft(rax, rdx, rcx); | 1609 __ SmiShiftLeft(rax, rdx, rcx); |
| 1486 break; | 1610 break; |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1510 break; | 1634 break; |
| 1511 } | 1635 } |
| 1512 | 1636 |
| 1513 __ bind(&done); | 1637 __ bind(&done); |
| 1514 context()->Plug(rax); | 1638 context()->Plug(rax); |
| 1515 } | 1639 } |
| 1516 | 1640 |
| 1517 | 1641 |
| 1518 void FullCodeGenerator::EmitBinaryOp(Token::Value op, | 1642 void FullCodeGenerator::EmitBinaryOp(Token::Value op, |
| 1519 OverwriteMode mode) { | 1643 OverwriteMode mode) { |
| 1520 GenericBinaryOpStub stub(op, mode, NO_GENERIC_BINARY_FLAGS); | 1644 __ pop(rdx); |
| 1521 if (stub.ArgsInRegistersSupported()) { | 1645 TypeRecordingBinaryOpStub stub(op, mode); |
| 1522 __ pop(rdx); | 1646 EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code. |
| 1523 stub.GenerateCall(masm_, rdx, rax); | |
| 1524 } else { | |
| 1525 __ push(result_register()); | |
| 1526 __ CallStub(&stub); | |
| 1527 } | |
| 1528 context()->Plug(rax); | 1647 context()->Plug(rax); |
| 1529 } | 1648 } |
| 1530 | 1649 |
| 1531 | 1650 |
| 1532 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_id) { | 1651 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) { |
| 1533 // Invalid left-hand sides are rewritten to have a 'throw | 1652 // Invalid left-hand sides are rewritten to have a 'throw |
| 1534 // ReferenceError' on the left-hand side. | 1653 // ReferenceError' on the left-hand side. |
| 1535 if (!expr->IsValidLeftHandSide()) { | 1654 if (!expr->IsValidLeftHandSide()) { |
| 1536 VisitForEffect(expr); | 1655 VisitForEffect(expr); |
| 1537 return; | 1656 return; |
| 1538 } | 1657 } |
| 1539 | 1658 |
| 1540 // Left-hand side can only be a property, a global or a (parameter or local) | 1659 // Left-hand side can only be a property, a global or a (parameter or local) |
| 1541 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. | 1660 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. |
| 1542 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; | 1661 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; |
| (...skipping 17 matching lines...) Expand all Loading... |
| 1560 VisitForAccumulatorValue(prop->obj()); | 1679 VisitForAccumulatorValue(prop->obj()); |
| 1561 __ movq(rdx, rax); | 1680 __ movq(rdx, rax); |
| 1562 __ pop(rax); // Restore value. | 1681 __ pop(rax); // Restore value. |
| 1563 __ Move(rcx, prop->key()->AsLiteral()->handle()); | 1682 __ Move(rcx, prop->key()->AsLiteral()->handle()); |
| 1564 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); | 1683 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); |
| 1565 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 1684 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
| 1566 break; | 1685 break; |
| 1567 } | 1686 } |
| 1568 case KEYED_PROPERTY: { | 1687 case KEYED_PROPERTY: { |
| 1569 __ push(rax); // Preserve value. | 1688 __ push(rax); // Preserve value. |
| 1570 VisitForStackValue(prop->obj()); | 1689 if (prop->is_synthetic()) { |
| 1571 VisitForAccumulatorValue(prop->key()); | 1690 ASSERT(prop->obj()->AsVariableProxy() != NULL); |
| 1572 __ movq(rcx, rax); | 1691 ASSERT(prop->key()->AsLiteral() != NULL); |
| 1573 __ pop(rdx); | 1692 { AccumulatorValueContext for_object(this); |
| 1574 __ pop(rax); | 1693 EmitVariableLoad(prop->obj()->AsVariableProxy()->var()); |
| 1694 } |
| 1695 __ movq(rdx, rax); |
| 1696 __ Move(rcx, prop->key()->AsLiteral()->handle()); |
| 1697 } else { |
| 1698 VisitForStackValue(prop->obj()); |
| 1699 VisitForAccumulatorValue(prop->key()); |
| 1700 __ movq(rcx, rax); |
| 1701 __ pop(rdx); |
| 1702 } |
| 1703 __ pop(rax); // Restore value. |
| 1575 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); | 1704 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); |
| 1576 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 1705 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
| 1577 break; | 1706 break; |
| 1578 } | 1707 } |
| 1579 } | 1708 } |
| 1709 PrepareForBailoutForId(bailout_ast_id, TOS_REG); |
| 1580 context()->Plug(rax); | 1710 context()->Plug(rax); |
| 1581 } | 1711 } |
| 1582 | 1712 |
| 1583 | 1713 |
| 1584 void FullCodeGenerator::EmitVariableAssignment(Variable* var, | 1714 void FullCodeGenerator::EmitVariableAssignment(Variable* var, |
| 1585 Token::Value op) { | 1715 Token::Value op) { |
| 1586 // Left-hand sides that rewrite to explicit property accesses do not reach | 1716 // Left-hand sides that rewrite to explicit property accesses do not reach |
| 1587 // here. | 1717 // here. |
| 1588 ASSERT(var != NULL); | 1718 ASSERT(var != NULL); |
| 1589 ASSERT(var->is_global() || var->AsSlot() != NULL); | 1719 ASSERT(var->is_global() || var->AsSlot() != NULL); |
| 1590 | 1720 |
| 1591 if (var->is_global()) { | 1721 if (var->is_global()) { |
| 1592 ASSERT(!var->is_this()); | 1722 ASSERT(!var->is_this()); |
| 1593 // Assignment to a global variable. Use inline caching for the | 1723 // Assignment to a global variable. Use inline caching for the |
| 1594 // assignment. Right-hand-side value is passed in rax, variable name in | 1724 // assignment. Right-hand-side value is passed in rax, variable name in |
| 1595 // rcx, and the global object on the stack. | 1725 // rcx, and the global object on the stack. |
| 1596 __ Move(rcx, var->name()); | 1726 __ Move(rcx, var->name()); |
| 1597 __ movq(rdx, GlobalObjectOperand()); | 1727 __ movq(rdx, GlobalObjectOperand()); |
| 1598 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); | 1728 Handle<Code> ic(Builtins::builtin(is_strict() |
| 1599 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 1729 ? Builtins::StoreIC_Initialize_Strict |
| 1730 : Builtins::StoreIC_Initialize)); |
| 1731 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); |
| 1600 | 1732 |
| 1601 } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) { | 1733 } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) { |
| 1602 // Perform the assignment for non-const variables and for initialization | 1734 // Perform the assignment for non-const variables and for initialization |
| 1603 // of const variables. Const assignments are simply skipped. | 1735 // of const variables. Const assignments are simply skipped. |
| 1604 Label done; | 1736 Label done; |
| 1605 Slot* slot = var->AsSlot(); | 1737 Slot* slot = var->AsSlot(); |
| 1606 switch (slot->type()) { | 1738 switch (slot->type()) { |
| 1607 case Slot::PARAMETER: | 1739 case Slot::PARAMETER: |
| 1608 case Slot::LOCAL: | 1740 case Slot::LOCAL: |
| 1609 if (op == Token::INIT_CONST) { | 1741 if (op == Token::INIT_CONST) { |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1682 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 1814 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
| 1683 | 1815 |
| 1684 // If the assignment ends an initialization block, revert to fast case. | 1816 // If the assignment ends an initialization block, revert to fast case. |
| 1685 if (expr->ends_initialization_block()) { | 1817 if (expr->ends_initialization_block()) { |
| 1686 __ push(rax); // Result of assignment, saved even if not needed. | 1818 __ push(rax); // Result of assignment, saved even if not needed. |
| 1687 __ push(Operand(rsp, kPointerSize)); // Receiver is under value. | 1819 __ push(Operand(rsp, kPointerSize)); // Receiver is under value. |
| 1688 __ CallRuntime(Runtime::kToFastProperties, 1); | 1820 __ CallRuntime(Runtime::kToFastProperties, 1); |
| 1689 __ pop(rax); | 1821 __ pop(rax); |
| 1690 __ Drop(1); | 1822 __ Drop(1); |
| 1691 } | 1823 } |
| 1824 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 1692 context()->Plug(rax); | 1825 context()->Plug(rax); |
| 1693 } | 1826 } |
| 1694 | 1827 |
| 1695 | 1828 |
| 1696 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { | 1829 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { |
| 1697 // Assignment to a property, using a keyed store IC. | 1830 // Assignment to a property, using a keyed store IC. |
| 1698 | 1831 |
| 1699 // If the assignment starts a block of assignments to the same object, | 1832 // If the assignment starts a block of assignments to the same object, |
| 1700 // change to slow case to avoid the quadratic behavior of repeatedly | 1833 // change to slow case to avoid the quadratic behavior of repeatedly |
| 1701 // adding fast properties. | 1834 // adding fast properties. |
| (...skipping 18 matching lines...) Expand all Loading... |
| 1720 | 1853 |
| 1721 // If the assignment ends an initialization block, revert to fast case. | 1854 // If the assignment ends an initialization block, revert to fast case. |
| 1722 if (expr->ends_initialization_block()) { | 1855 if (expr->ends_initialization_block()) { |
| 1723 __ pop(rdx); | 1856 __ pop(rdx); |
| 1724 __ push(rax); // Result of assignment, saved even if not needed. | 1857 __ push(rax); // Result of assignment, saved even if not needed. |
| 1725 __ push(rdx); | 1858 __ push(rdx); |
| 1726 __ CallRuntime(Runtime::kToFastProperties, 1); | 1859 __ CallRuntime(Runtime::kToFastProperties, 1); |
| 1727 __ pop(rax); | 1860 __ pop(rax); |
| 1728 } | 1861 } |
| 1729 | 1862 |
| 1863 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 1730 context()->Plug(rax); | 1864 context()->Plug(rax); |
| 1731 } | 1865 } |
| 1732 | 1866 |
| 1733 | 1867 |
| 1734 void FullCodeGenerator::VisitProperty(Property* expr) { | 1868 void FullCodeGenerator::VisitProperty(Property* expr) { |
| 1735 Comment cmnt(masm_, "[ Property"); | 1869 Comment cmnt(masm_, "[ Property"); |
| 1736 Expression* key = expr->key(); | 1870 Expression* key = expr->key(); |
| 1737 | 1871 |
| 1738 if (key->IsPropertyName()) { | 1872 if (key->IsPropertyName()) { |
| 1739 VisitForAccumulatorValue(expr->obj()); | 1873 VisitForAccumulatorValue(expr->obj()); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 1760 VisitForStackValue(args->at(i)); | 1894 VisitForStackValue(args->at(i)); |
| 1761 } | 1895 } |
| 1762 __ Move(rcx, name); | 1896 __ Move(rcx, name); |
| 1763 } | 1897 } |
| 1764 // Record source position for debugger. | 1898 // Record source position for debugger. |
| 1765 SetSourcePosition(expr->position()); | 1899 SetSourcePosition(expr->position()); |
| 1766 // Call the IC initialization code. | 1900 // Call the IC initialization code. |
| 1767 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; | 1901 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
| 1768 Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop); | 1902 Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop); |
| 1769 EmitCallIC(ic, mode); | 1903 EmitCallIC(ic, mode); |
| 1904 RecordJSReturnSite(expr); |
| 1770 // Restore context register. | 1905 // Restore context register. |
| 1771 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 1906 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 1772 context()->Plug(rax); | 1907 context()->Plug(rax); |
| 1773 } | 1908 } |
| 1774 | 1909 |
| 1775 | 1910 |
| 1776 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, | 1911 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, |
| 1777 Expression* key, | 1912 Expression* key, |
| 1778 RelocInfo::Mode mode) { | 1913 RelocInfo::Mode mode) { |
| 1779 // Load the key. | 1914 // Load the key. |
| (...skipping 13 matching lines...) Expand all Loading... |
| 1793 VisitForStackValue(args->at(i)); | 1928 VisitForStackValue(args->at(i)); |
| 1794 } | 1929 } |
| 1795 } | 1930 } |
| 1796 // Record source position for debugger. | 1931 // Record source position for debugger. |
| 1797 SetSourcePosition(expr->position()); | 1932 SetSourcePosition(expr->position()); |
| 1798 // Call the IC initialization code. | 1933 // Call the IC initialization code. |
| 1799 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; | 1934 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
| 1800 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop); | 1935 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop); |
| 1801 __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key. | 1936 __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key. |
| 1802 EmitCallIC(ic, mode); | 1937 EmitCallIC(ic, mode); |
| 1938 RecordJSReturnSite(expr); |
| 1803 // Restore context register. | 1939 // Restore context register. |
| 1804 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 1940 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 1805 context()->DropAndPlug(1, rax); // Drop the key still on the stack. | 1941 context()->DropAndPlug(1, rax); // Drop the key still on the stack. |
| 1806 } | 1942 } |
| 1807 | 1943 |
| 1808 | 1944 |
| 1809 void FullCodeGenerator::EmitCallWithStub(Call* expr) { | 1945 void FullCodeGenerator::EmitCallWithStub(Call* expr) { |
| 1810 // Code common for calls using the call stub. | 1946 // Code common for calls using the call stub. |
| 1811 ZoneList<Expression*>* args = expr->arguments(); | 1947 ZoneList<Expression*>* args = expr->arguments(); |
| 1812 int arg_count = args->length(); | 1948 int arg_count = args->length(); |
| 1813 { PreservePositionScope scope(masm()->positions_recorder()); | 1949 { PreservePositionScope scope(masm()->positions_recorder()); |
| 1814 for (int i = 0; i < arg_count; i++) { | 1950 for (int i = 0; i < arg_count; i++) { |
| 1815 VisitForStackValue(args->at(i)); | 1951 VisitForStackValue(args->at(i)); |
| 1816 } | 1952 } |
| 1817 } | 1953 } |
| 1818 // Record source position for debugger. | 1954 // Record source position for debugger. |
| 1819 SetSourcePosition(expr->position()); | 1955 SetSourcePosition(expr->position()); |
| 1820 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; | 1956 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
| 1821 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); | 1957 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); |
| 1822 __ CallStub(&stub); | 1958 __ CallStub(&stub); |
| 1959 RecordJSReturnSite(expr); |
| 1823 // Restore context register. | 1960 // Restore context register. |
| 1824 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 1961 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 1825 // Discard the function left on TOS. | 1962 // Discard the function left on TOS. |
| 1826 context()->DropAndPlug(1, rax); | 1963 context()->DropAndPlug(1, rax); |
| 1827 } | 1964 } |
| 1828 | 1965 |
| 1829 | 1966 |
| 1830 void FullCodeGenerator::VisitCall(Call* expr) { | 1967 void FullCodeGenerator::VisitCall(Call* expr) { |
| 1968 #ifdef DEBUG |
| 1969 // We want to verify that RecordJSReturnSite gets called on all paths |
| 1970 // through this function. Avoid early returns. |
| 1971 expr->return_is_recorded_ = false; |
| 1972 #endif |
| 1973 |
| 1831 Comment cmnt(masm_, "[ Call"); | 1974 Comment cmnt(masm_, "[ Call"); |
| 1832 Expression* fun = expr->expression(); | 1975 Expression* fun = expr->expression(); |
| 1833 Variable* var = fun->AsVariableProxy()->AsVariable(); | 1976 Variable* var = fun->AsVariableProxy()->AsVariable(); |
| 1834 | 1977 |
| 1835 if (var != NULL && var->is_possibly_eval()) { | 1978 if (var != NULL && var->is_possibly_eval()) { |
| 1836 // In a call to eval, we first call %ResolvePossiblyDirectEval to | 1979 // In a call to eval, we first call %ResolvePossiblyDirectEval to |
| 1837 // resolve the function we need to call and the receiver of the | 1980 // resolve the function we need to call and the receiver of the |
| 1838 // call. The we call the resolved function using the given | 1981 // call. Then we call the resolved function using the given |
| 1839 // arguments. | 1982 // arguments. |
| 1840 ZoneList<Expression*>* args = expr->arguments(); | 1983 ZoneList<Expression*>* args = expr->arguments(); |
| 1841 int arg_count = args->length(); | 1984 int arg_count = args->length(); |
| 1842 { PreservePositionScope pos_scope(masm()->positions_recorder()); | 1985 { PreservePositionScope pos_scope(masm()->positions_recorder()); |
| 1843 VisitForStackValue(fun); | 1986 VisitForStackValue(fun); |
| 1844 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot. | 1987 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot. |
| 1845 | 1988 |
| 1846 // Push the arguments. | 1989 // Push the arguments. |
| 1847 for (int i = 0; i < arg_count; i++) { | 1990 for (int i = 0; i < arg_count; i++) { |
| 1848 VisitForStackValue(args->at(i)); | 1991 VisitForStackValue(args->at(i)); |
| 1849 } | 1992 } |
| 1850 | 1993 |
| 1851 // Push copy of the function - found below the arguments. | 1994 // Push copy of the function - found below the arguments. |
| 1852 __ push(Operand(rsp, (arg_count + 1) * kPointerSize)); | 1995 __ push(Operand(rsp, (arg_count + 1) * kPointerSize)); |
| 1853 | 1996 |
| 1854 // Push copy of the first argument or undefined if it doesn't exist. | 1997 // Push copy of the first argument or undefined if it doesn't exist. |
| 1855 if (arg_count > 0) { | 1998 if (arg_count > 0) { |
| 1856 __ push(Operand(rsp, arg_count * kPointerSize)); | 1999 __ push(Operand(rsp, arg_count * kPointerSize)); |
| 1857 } else { | 2000 } else { |
| 1858 __ PushRoot(Heap::kUndefinedValueRootIndex); | 2001 __ PushRoot(Heap::kUndefinedValueRootIndex); |
| 1859 } | 2002 } |
| 1860 | 2003 |
| 1861 // Push the receiver of the enclosing function and do runtime call. | 2004 // Push the receiver of the enclosing function and do runtime call. |
| 1862 __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize)); | 2005 __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize)); |
| 1863 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3); | 2006 // Push the strict mode flag. |
| 2007 __ Push(Smi::FromInt(strict_mode_flag())); |
| 2008 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 4); |
| 1864 | 2009 |
| 1865 // The runtime call returns a pair of values in rax (function) and | 2010 // The runtime call returns a pair of values in rax (function) and |
| 1866 // rdx (receiver). Touch up the stack with the right values. | 2011 // rdx (receiver). Touch up the stack with the right values. |
| 1867 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); | 2012 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); |
| 1868 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax); | 2013 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax); |
| 1869 } | 2014 } |
| 1870 // Record source position for debugger. | 2015 // Record source position for debugger. |
| 1871 SetSourcePosition(expr->position()); | 2016 SetSourcePosition(expr->position()); |
| 1872 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; | 2017 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
| 1873 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); | 2018 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); |
| 1874 __ CallStub(&stub); | 2019 __ CallStub(&stub); |
| 2020 RecordJSReturnSite(expr); |
| 1875 // Restore context register. | 2021 // Restore context register. |
| 1876 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2022 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 1877 context()->DropAndPlug(1, rax); | 2023 context()->DropAndPlug(1, rax); |
| 1878 } else if (var != NULL && !var->is_this() && var->is_global()) { | 2024 } else if (var != NULL && !var->is_this() && var->is_global()) { |
| 1879 // Call to a global variable. | 2025 // Call to a global variable. |
| 1880 // Push global object as receiver for the call IC lookup. | 2026 // Push global object as receiver for the call IC lookup. |
| 1881 __ push(GlobalObjectOperand()); | 2027 __ push(GlobalObjectOperand()); |
| 1882 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); | 2028 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); |
| 1883 } else if (var != NULL && var->AsSlot() != NULL && | 2029 } else if (var != NULL && var->AsSlot() != NULL && |
| 1884 var->AsSlot()->type() == Slot::LOOKUP) { | 2030 var->AsSlot()->type() == Slot::LOOKUP) { |
| 1885 // Call to a lookup slot (dynamically introduced variable). | 2031 // Call to a lookup slot (dynamically introduced variable). |
| 1886 Label slow, done; | 2032 Label slow, done; |
| 1887 | 2033 |
| 1888 { PreservePositionScope scope(masm()->positions_recorder()); | 2034 { PreservePositionScope scope(masm()->positions_recorder()); |
| 1889 // Generate code for loading from variables potentially shadowed | 2035 // Generate code for loading from variables potentially shadowed |
| 1890 // by eval-introduced variables. | 2036 // by eval-introduced variables. |
| 1891 EmitDynamicLoadFromSlotFastCase(var->AsSlot(), | 2037 EmitDynamicLoadFromSlotFastCase(var->AsSlot(), |
| 1892 NOT_INSIDE_TYPEOF, | 2038 NOT_INSIDE_TYPEOF, |
| 1893 &slow, | 2039 &slow, |
| 1894 &done); | 2040 &done); |
| 1895 | 2041 |
| 1896 __ bind(&slow); | 2042 __ bind(&slow); |
| 1897 // Call the runtime to find the function to call (returned in rax) | 2043 } |
| 1898 // and the object holding it (returned in rdx). | 2044 // Call the runtime to find the function to call (returned in rax) |
| 1899 __ push(context_register()); | 2045 // and the object holding it (returned in rdx). |
| 1900 __ Push(var->name()); | 2046 __ push(context_register()); |
| 1901 __ CallRuntime(Runtime::kLoadContextSlot, 2); | 2047 __ Push(var->name()); |
| 1902 __ push(rax); // Function. | 2048 __ CallRuntime(Runtime::kLoadContextSlot, 2); |
| 1903 __ push(rdx); // Receiver. | 2049 __ push(rax); // Function. |
| 2050 __ push(rdx); // Receiver. |
| 1904 | 2051 |
| 1905 // If fast case code has been generated, emit code to push the | 2052 // If fast case code has been generated, emit code to push the |
| 1906 // function and receiver and have the slow path jump around this | 2053 // function and receiver and have the slow path jump around this |
| 1907 // code. | 2054 // code. |
| 1908 if (done.is_linked()) { | 2055 if (done.is_linked()) { |
| 1909 NearLabel call; | 2056 NearLabel call; |
| 1910 __ jmp(&call); | 2057 __ jmp(&call); |
| 1911 __ bind(&done); | 2058 __ bind(&done); |
| 1912 // Push function. | 2059 // Push function. |
| 1913 __ push(rax); | 2060 __ push(rax); |
| 1914 // Push global receiver. | 2061 // Push global receiver. |
| 1915 __ movq(rbx, GlobalObjectOperand()); | 2062 __ movq(rbx, GlobalObjectOperand()); |
| 1916 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); | 2063 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); |
| 1917 __ bind(&call); | 2064 __ bind(&call); |
| 1918 } | |
| 1919 } | 2065 } |
| 1920 | 2066 |
| 1921 EmitCallWithStub(expr); | 2067 EmitCallWithStub(expr); |
| 1922 | |
| 1923 } else if (fun->AsProperty() != NULL) { | 2068 } else if (fun->AsProperty() != NULL) { |
| 1924 // Call to an object property. | 2069 // Call to an object property. |
| 1925 Property* prop = fun->AsProperty(); | 2070 Property* prop = fun->AsProperty(); |
| 1926 Literal* key = prop->key()->AsLiteral(); | 2071 Literal* key = prop->key()->AsLiteral(); |
| 1927 if (key != NULL && key->handle()->IsSymbol()) { | 2072 if (key != NULL && key->handle()->IsSymbol()) { |
| 1928 // Call to a named property, use call IC. | 2073 // Call to a named property, use call IC. |
| 1929 { PreservePositionScope scope(masm()->positions_recorder()); | 2074 { PreservePositionScope scope(masm()->positions_recorder()); |
| 1930 VisitForStackValue(prop->obj()); | 2075 VisitForStackValue(prop->obj()); |
| 1931 } | 2076 } |
| 1932 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); | 2077 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); |
| 1933 } else { | 2078 } else { |
| 1934 // Call to a keyed property. | 2079 // Call to a keyed property. |
| 1935 // For a synthetic property use keyed load IC followed by function call, | 2080 // For a synthetic property use keyed load IC followed by function call, |
| 1936 // for a regular property use KeyedCallIC. | 2081 // for a regular property use keyed EmitCallIC. |
| 1937 { PreservePositionScope scope(masm()->positions_recorder()); | |
| 1938 VisitForStackValue(prop->obj()); | |
| 1939 } | |
| 1940 if (prop->is_synthetic()) { | 2082 if (prop->is_synthetic()) { |
| 1941 { PreservePositionScope scope(masm()->positions_recorder()); | 2083 // Do not visit the object and key subexpressions (they are shared |
| 1942 VisitForAccumulatorValue(prop->key()); | 2084 // by all occurrences of the same rewritten parameter). |
| 1943 __ movq(rdx, Operand(rsp, 0)); | 2085 ASSERT(prop->obj()->AsVariableProxy() != NULL); |
| 1944 } | 2086 ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL); |
| 2087 Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot(); |
| 2088 MemOperand operand = EmitSlotSearch(slot, rdx); |
| 2089 __ movq(rdx, operand); |
| 2090 |
| 2091 ASSERT(prop->key()->AsLiteral() != NULL); |
| 2092 ASSERT(prop->key()->AsLiteral()->handle()->IsSmi()); |
| 2093 __ Move(rax, prop->key()->AsLiteral()->handle()); |
| 2094 |
| 1945 // Record source code position for IC call. | 2095 // Record source code position for IC call. |
| 1946 SetSourcePosition(prop->position()); | 2096 SetSourcePosition(prop->position()); |
| 2097 |
| 1947 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 2098 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
| 1948 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 2099 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
| 1949 // Pop receiver. | |
| 1950 __ pop(rbx); | |
| 1951 // Push result (function). | 2100 // Push result (function). |
| 1952 __ push(rax); | 2101 __ push(rax); |
| 1953 // Push receiver object on stack. | 2102 // Push Global receiver. |
| 1954 __ movq(rcx, GlobalObjectOperand()); | 2103 __ movq(rcx, GlobalObjectOperand()); |
| 1955 __ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); | 2104 __ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); |
| 1956 EmitCallWithStub(expr); | 2105 EmitCallWithStub(expr); |
| 1957 } else { | 2106 } else { |
| 2107 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2108 VisitForStackValue(prop->obj()); |
| 2109 } |
| 1958 EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET); | 2110 EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET); |
| 1959 } | 2111 } |
| 1960 } | 2112 } |
| 1961 } else { | 2113 } else { |
| 1962 // Call to some other expression. If the expression is an anonymous | 2114 // Call to some other expression. If the expression is an anonymous |
| 1963 // function literal not called in a loop, mark it as one that should | 2115 // function literal not called in a loop, mark it as one that should |
| 1964 // also use the fast code generator. | 2116 // also use the full code generator. |
| 1965 FunctionLiteral* lit = fun->AsFunctionLiteral(); | 2117 FunctionLiteral* lit = fun->AsFunctionLiteral(); |
| 1966 if (lit != NULL && | 2118 if (lit != NULL && |
| 1967 lit->name()->Equals(Heap::empty_string()) && | 2119 lit->name()->Equals(Heap::empty_string()) && |
| 1968 loop_depth() == 0) { | 2120 loop_depth() == 0) { |
| 1969 lit->set_try_full_codegen(true); | 2121 lit->set_try_full_codegen(true); |
| 1970 } | 2122 } |
| 1971 { PreservePositionScope scope(masm()->positions_recorder()); | 2123 { PreservePositionScope scope(masm()->positions_recorder()); |
| 1972 VisitForStackValue(fun); | 2124 VisitForStackValue(fun); |
| 1973 } | 2125 } |
| 1974 // Load global receiver object. | 2126 // Load global receiver object. |
| 1975 __ movq(rbx, GlobalObjectOperand()); | 2127 __ movq(rbx, GlobalObjectOperand()); |
| 1976 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); | 2128 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); |
| 1977 // Emit function call. | 2129 // Emit function call. |
| 1978 EmitCallWithStub(expr); | 2130 EmitCallWithStub(expr); |
| 1979 } | 2131 } |
| 2132 |
| 2133 #ifdef DEBUG |
| 2134 // RecordJSReturnSite should have been called. |
| 2135 ASSERT(expr->return_is_recorded_); |
| 2136 #endif |
| 1980 } | 2137 } |
| 1981 | 2138 |
| 1982 | 2139 |
| 1983 void FullCodeGenerator::VisitCallNew(CallNew* expr) { | 2140 void FullCodeGenerator::VisitCallNew(CallNew* expr) { |
| 1984 Comment cmnt(masm_, "[ CallNew"); | 2141 Comment cmnt(masm_, "[ CallNew"); |
| 1985 // According to ECMA-262, section 11.2.2, page 44, the function | 2142 // According to ECMA-262, section 11.2.2, page 44, the function |
| 1986 // expression in new calls must be evaluated before the | 2143 // expression in new calls must be evaluated before the |
| 1987 // arguments. | 2144 // arguments. |
| 1988 | 2145 |
| 1989 // Push constructor on the stack. If it's not a function it's used as | 2146 // Push constructor on the stack. If it's not a function it's used as |
| (...skipping 27 matching lines...) Expand all Loading... |
| 2017 | 2174 |
| 2018 VisitForAccumulatorValue(args->at(0)); | 2175 VisitForAccumulatorValue(args->at(0)); |
| 2019 | 2176 |
| 2020 Label materialize_true, materialize_false; | 2177 Label materialize_true, materialize_false; |
| 2021 Label* if_true = NULL; | 2178 Label* if_true = NULL; |
| 2022 Label* if_false = NULL; | 2179 Label* if_false = NULL; |
| 2023 Label* fall_through = NULL; | 2180 Label* fall_through = NULL; |
| 2024 context()->PrepareTest(&materialize_true, &materialize_false, | 2181 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2025 &if_true, &if_false, &fall_through); | 2182 &if_true, &if_false, &fall_through); |
| 2026 | 2183 |
| 2184 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2027 __ JumpIfSmi(rax, if_true); | 2185 __ JumpIfSmi(rax, if_true); |
| 2028 __ jmp(if_false); | 2186 __ jmp(if_false); |
| 2029 | 2187 |
| 2030 context()->Plug(if_true, if_false); | 2188 context()->Plug(if_true, if_false); |
| 2031 } | 2189 } |
| 2032 | 2190 |
| 2033 | 2191 |
| 2034 void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) { | 2192 void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) { |
| 2035 ASSERT(args->length() == 1); | 2193 ASSERT(args->length() == 1); |
| 2036 | 2194 |
| 2037 VisitForAccumulatorValue(args->at(0)); | 2195 VisitForAccumulatorValue(args->at(0)); |
| 2038 | 2196 |
| 2039 Label materialize_true, materialize_false; | 2197 Label materialize_true, materialize_false; |
| 2040 Label* if_true = NULL; | 2198 Label* if_true = NULL; |
| 2041 Label* if_false = NULL; | 2199 Label* if_false = NULL; |
| 2042 Label* fall_through = NULL; | 2200 Label* fall_through = NULL; |
| 2043 context()->PrepareTest(&materialize_true, &materialize_false, | 2201 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2044 &if_true, &if_false, &fall_through); | 2202 &if_true, &if_false, &fall_through); |
| 2045 | 2203 |
| 2204 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2046 Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax); | 2205 Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax); |
| 2047 Split(non_negative_smi, if_true, if_false, fall_through); | 2206 Split(non_negative_smi, if_true, if_false, fall_through); |
| 2048 | 2207 |
| 2049 context()->Plug(if_true, if_false); | 2208 context()->Plug(if_true, if_false); |
| 2050 } | 2209 } |
| 2051 | 2210 |
| 2052 | 2211 |
| 2053 void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) { | 2212 void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) { |
| 2054 ASSERT(args->length() == 1); | 2213 ASSERT(args->length() == 1); |
| 2055 | 2214 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 2067 __ j(equal, if_true); | 2226 __ j(equal, if_true); |
| 2068 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 2227 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
| 2069 // Undetectable objects behave like undefined when tested with typeof. | 2228 // Undetectable objects behave like undefined when tested with typeof. |
| 2070 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), | 2229 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), |
| 2071 Immediate(1 << Map::kIsUndetectable)); | 2230 Immediate(1 << Map::kIsUndetectable)); |
| 2072 __ j(not_zero, if_false); | 2231 __ j(not_zero, if_false); |
| 2073 __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); | 2232 __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); |
| 2074 __ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE)); | 2233 __ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE)); |
| 2075 __ j(below, if_false); | 2234 __ j(below, if_false); |
| 2076 __ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE)); | 2235 __ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE)); |
| 2236 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2077 Split(below_equal, if_true, if_false, fall_through); | 2237 Split(below_equal, if_true, if_false, fall_through); |
| 2078 | 2238 |
| 2079 context()->Plug(if_true, if_false); | 2239 context()->Plug(if_true, if_false); |
| 2080 } | 2240 } |
| 2081 | 2241 |
| 2082 | 2242 |
| 2083 void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) { | 2243 void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) { |
| 2084 ASSERT(args->length() == 1); | 2244 ASSERT(args->length() == 1); |
| 2085 | 2245 |
| 2086 VisitForAccumulatorValue(args->at(0)); | 2246 VisitForAccumulatorValue(args->at(0)); |
| 2087 | 2247 |
| 2088 Label materialize_true, materialize_false; | 2248 Label materialize_true, materialize_false; |
| 2089 Label* if_true = NULL; | 2249 Label* if_true = NULL; |
| 2090 Label* if_false = NULL; | 2250 Label* if_false = NULL; |
| 2091 Label* fall_through = NULL; | 2251 Label* fall_through = NULL; |
| 2092 context()->PrepareTest(&materialize_true, &materialize_false, | 2252 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2093 &if_true, &if_false, &fall_through); | 2253 &if_true, &if_false, &fall_through); |
| 2094 | 2254 |
| 2095 __ JumpIfSmi(rax, if_false); | 2255 __ JumpIfSmi(rax, if_false); |
| 2096 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx); | 2256 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx); |
| 2257 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2097 Split(above_equal, if_true, if_false, fall_through); | 2258 Split(above_equal, if_true, if_false, fall_through); |
| 2098 | 2259 |
| 2099 context()->Plug(if_true, if_false); | 2260 context()->Plug(if_true, if_false); |
| 2100 } | 2261 } |
| 2101 | 2262 |
| 2102 | 2263 |
| 2103 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) { | 2264 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) { |
| 2104 ASSERT(args->length() == 1); | 2265 ASSERT(args->length() == 1); |
| 2105 | 2266 |
| 2106 VisitForAccumulatorValue(args->at(0)); | 2267 VisitForAccumulatorValue(args->at(0)); |
| 2107 | 2268 |
| 2108 Label materialize_true, materialize_false; | 2269 Label materialize_true, materialize_false; |
| 2109 Label* if_true = NULL; | 2270 Label* if_true = NULL; |
| 2110 Label* if_false = NULL; | 2271 Label* if_false = NULL; |
| 2111 Label* fall_through = NULL; | 2272 Label* fall_through = NULL; |
| 2112 context()->PrepareTest(&materialize_true, &materialize_false, | 2273 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2113 &if_true, &if_false, &fall_through); | 2274 &if_true, &if_false, &fall_through); |
| 2114 | 2275 |
| 2115 __ JumpIfSmi(rax, if_false); | 2276 __ JumpIfSmi(rax, if_false); |
| 2116 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 2277 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
| 2117 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), | 2278 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), |
| 2118 Immediate(1 << Map::kIsUndetectable)); | 2279 Immediate(1 << Map::kIsUndetectable)); |
| 2280 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2119 Split(not_zero, if_true, if_false, fall_through); | 2281 Split(not_zero, if_true, if_false, fall_through); |
| 2120 | 2282 |
| 2121 context()->Plug(if_true, if_false); | 2283 context()->Plug(if_true, if_false); |
| 2122 } | 2284 } |
| 2123 | 2285 |
| 2124 | 2286 |
| 2125 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( | 2287 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( |
| 2126 ZoneList<Expression*>* args) { | 2288 ZoneList<Expression*>* args) { |
| 2127 ASSERT(args->length() == 1); | 2289 ASSERT(args->length() == 1); |
| 2128 | 2290 |
| 2129 VisitForAccumulatorValue(args->at(0)); | 2291 VisitForAccumulatorValue(args->at(0)); |
| 2130 | 2292 |
| 2131 Label materialize_true, materialize_false; | 2293 Label materialize_true, materialize_false; |
| 2132 Label* if_true = NULL; | 2294 Label* if_true = NULL; |
| 2133 Label* if_false = NULL; | 2295 Label* if_false = NULL; |
| 2134 Label* fall_through = NULL; | 2296 Label* fall_through = NULL; |
| 2135 context()->PrepareTest(&materialize_true, &materialize_false, | 2297 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2136 &if_true, &if_false, &fall_through); | 2298 &if_true, &if_false, &fall_through); |
| 2137 | 2299 |
| 2138 // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only | 2300 // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only |
| 2139 // used in a few functions in runtime.js which should not normally be hit by | 2301 // used in a few functions in runtime.js which should not normally be hit by |
| 2140 // this compiler. | 2302 // this compiler. |
| 2303 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2141 __ jmp(if_false); | 2304 __ jmp(if_false); |
| 2142 context()->Plug(if_true, if_false); | 2305 context()->Plug(if_true, if_false); |
| 2143 } | 2306 } |
| 2144 | 2307 |
| 2145 | 2308 |
| 2146 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) { | 2309 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) { |
| 2147 ASSERT(args->length() == 1); | 2310 ASSERT(args->length() == 1); |
| 2148 | 2311 |
| 2149 VisitForAccumulatorValue(args->at(0)); | 2312 VisitForAccumulatorValue(args->at(0)); |
| 2150 | 2313 |
| 2151 Label materialize_true, materialize_false; | 2314 Label materialize_true, materialize_false; |
| 2152 Label* if_true = NULL; | 2315 Label* if_true = NULL; |
| 2153 Label* if_false = NULL; | 2316 Label* if_false = NULL; |
| 2154 Label* fall_through = NULL; | 2317 Label* fall_through = NULL; |
| 2155 context()->PrepareTest(&materialize_true, &materialize_false, | 2318 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2156 &if_true, &if_false, &fall_through); | 2319 &if_true, &if_false, &fall_through); |
| 2157 | 2320 |
| 2158 __ JumpIfSmi(rax, if_false); | 2321 __ JumpIfSmi(rax, if_false); |
| 2159 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); | 2322 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); |
| 2323 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2160 Split(equal, if_true, if_false, fall_through); | 2324 Split(equal, if_true, if_false, fall_through); |
| 2161 | 2325 |
| 2162 context()->Plug(if_true, if_false); | 2326 context()->Plug(if_true, if_false); |
| 2163 } | 2327 } |
| 2164 | 2328 |
| 2165 | 2329 |
| 2166 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) { | 2330 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) { |
| 2167 ASSERT(args->length() == 1); | 2331 ASSERT(args->length() == 1); |
| 2168 | 2332 |
| 2169 VisitForAccumulatorValue(args->at(0)); | 2333 VisitForAccumulatorValue(args->at(0)); |
| 2170 | 2334 |
| 2171 Label materialize_true, materialize_false; | 2335 Label materialize_true, materialize_false; |
| 2172 Label* if_true = NULL; | 2336 Label* if_true = NULL; |
| 2173 Label* if_false = NULL; | 2337 Label* if_false = NULL; |
| 2174 Label* fall_through = NULL; | 2338 Label* fall_through = NULL; |
| 2175 context()->PrepareTest(&materialize_true, &materialize_false, | 2339 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2176 &if_true, &if_false, &fall_through); | 2340 &if_true, &if_false, &fall_through); |
| 2177 | 2341 |
| 2178 __ JumpIfSmi(rax, if_false); | 2342 __ JumpIfSmi(rax, if_false); |
| 2179 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx); | 2343 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx); |
| 2344 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2180 Split(equal, if_true, if_false, fall_through); | 2345 Split(equal, if_true, if_false, fall_through); |
| 2181 | 2346 |
| 2182 context()->Plug(if_true, if_false); | 2347 context()->Plug(if_true, if_false); |
| 2183 } | 2348 } |
| 2184 | 2349 |
| 2185 | 2350 |
| 2186 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) { | 2351 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) { |
| 2187 ASSERT(args->length() == 1); | 2352 ASSERT(args->length() == 1); |
| 2188 | 2353 |
| 2189 VisitForAccumulatorValue(args->at(0)); | 2354 VisitForAccumulatorValue(args->at(0)); |
| 2190 | 2355 |
| 2191 Label materialize_true, materialize_false; | 2356 Label materialize_true, materialize_false; |
| 2192 Label* if_true = NULL; | 2357 Label* if_true = NULL; |
| 2193 Label* if_false = NULL; | 2358 Label* if_false = NULL; |
| 2194 Label* fall_through = NULL; | 2359 Label* fall_through = NULL; |
| 2195 context()->PrepareTest(&materialize_true, &materialize_false, | 2360 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2196 &if_true, &if_false, &fall_through); | 2361 &if_true, &if_false, &fall_through); |
| 2197 | 2362 |
| 2198 __ JumpIfSmi(rax, if_false); | 2363 __ JumpIfSmi(rax, if_false); |
| 2199 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx); | 2364 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx); |
| 2365 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2200 Split(equal, if_true, if_false, fall_through); | 2366 Split(equal, if_true, if_false, fall_through); |
| 2201 | 2367 |
| 2202 context()->Plug(if_true, if_false); | 2368 context()->Plug(if_true, if_false); |
| 2203 } | 2369 } |
| 2204 | 2370 |
| 2205 | 2371 |
| 2206 | 2372 |
| 2207 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) { | 2373 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) { |
| 2208 ASSERT(args->length() == 0); | 2374 ASSERT(args->length() == 0); |
| 2209 | 2375 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 2221 Label check_frame_marker; | 2387 Label check_frame_marker; |
| 2222 __ SmiCompare(Operand(rax, StandardFrameConstants::kContextOffset), | 2388 __ SmiCompare(Operand(rax, StandardFrameConstants::kContextOffset), |
| 2223 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 2389 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 2224 __ j(not_equal, &check_frame_marker); | 2390 __ j(not_equal, &check_frame_marker); |
| 2225 __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset)); | 2391 __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset)); |
| 2226 | 2392 |
| 2227 // Check the marker in the calling frame. | 2393 // Check the marker in the calling frame. |
| 2228 __ bind(&check_frame_marker); | 2394 __ bind(&check_frame_marker); |
| 2229 __ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset), | 2395 __ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset), |
| 2230 Smi::FromInt(StackFrame::CONSTRUCT)); | 2396 Smi::FromInt(StackFrame::CONSTRUCT)); |
| 2397 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2231 Split(equal, if_true, if_false, fall_through); | 2398 Split(equal, if_true, if_false, fall_through); |
| 2232 | 2399 |
| 2233 context()->Plug(if_true, if_false); | 2400 context()->Plug(if_true, if_false); |
| 2234 } | 2401 } |
| 2235 | 2402 |
| 2236 | 2403 |
| 2237 void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) { | 2404 void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) { |
| 2238 ASSERT(args->length() == 2); | 2405 ASSERT(args->length() == 2); |
| 2239 | 2406 |
| 2240 // Load the two objects into registers and perform the comparison. | 2407 // Load the two objects into registers and perform the comparison. |
| 2241 VisitForStackValue(args->at(0)); | 2408 VisitForStackValue(args->at(0)); |
| 2242 VisitForAccumulatorValue(args->at(1)); | 2409 VisitForAccumulatorValue(args->at(1)); |
| 2243 | 2410 |
| 2244 Label materialize_true, materialize_false; | 2411 Label materialize_true, materialize_false; |
| 2245 Label* if_true = NULL; | 2412 Label* if_true = NULL; |
| 2246 Label* if_false = NULL; | 2413 Label* if_false = NULL; |
| 2247 Label* fall_through = NULL; | 2414 Label* fall_through = NULL; |
| 2248 context()->PrepareTest(&materialize_true, &materialize_false, | 2415 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2249 &if_true, &if_false, &fall_through); | 2416 &if_true, &if_false, &fall_through); |
| 2250 | 2417 |
| 2251 __ pop(rbx); | 2418 __ pop(rbx); |
| 2252 __ cmpq(rax, rbx); | 2419 __ cmpq(rax, rbx); |
| 2420 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2253 Split(equal, if_true, if_false, fall_through); | 2421 Split(equal, if_true, if_false, fall_through); |
| 2254 | 2422 |
| 2255 context()->Plug(if_true, if_false); | 2423 context()->Plug(if_true, if_false); |
| 2256 } | 2424 } |
| 2257 | 2425 |
| 2258 | 2426 |
| 2259 void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) { | 2427 void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) { |
| 2260 ASSERT(args->length() == 1); | 2428 ASSERT(args->length() == 1); |
| 2261 | 2429 |
| 2262 // ArgumentsAccessStub expects the key in rdx and the formal | 2430 // ArgumentsAccessStub expects the key in rdx and the formal |
| (...skipping 553 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2816 | 2984 |
| 2817 Label materialize_true, materialize_false; | 2985 Label materialize_true, materialize_false; |
| 2818 Label* if_true = NULL; | 2986 Label* if_true = NULL; |
| 2819 Label* if_false = NULL; | 2987 Label* if_false = NULL; |
| 2820 Label* fall_through = NULL; | 2988 Label* fall_through = NULL; |
| 2821 context()->PrepareTest(&materialize_true, &materialize_false, | 2989 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2822 &if_true, &if_false, &fall_through); | 2990 &if_true, &if_false, &fall_through); |
| 2823 | 2991 |
| 2824 __ testl(FieldOperand(rax, String::kHashFieldOffset), | 2992 __ testl(FieldOperand(rax, String::kHashFieldOffset), |
| 2825 Immediate(String::kContainsCachedArrayIndexMask)); | 2993 Immediate(String::kContainsCachedArrayIndexMask)); |
| 2994 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2826 __ j(zero, if_true); | 2995 __ j(zero, if_true); |
| 2827 __ jmp(if_false); | 2996 __ jmp(if_false); |
| 2828 | 2997 |
| 2829 context()->Plug(if_true, if_false); | 2998 context()->Plug(if_true, if_false); |
| 2830 } | 2999 } |
| 2831 | 3000 |
| 2832 | 3001 |
| 2833 void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) { | 3002 void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) { |
| 2834 ASSERT(args->length() == 1); | 3003 ASSERT(args->length() == 1); |
| 2835 | 3004 |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2885 context()->Plug(rax); | 3054 context()->Plug(rax); |
| 2886 } | 3055 } |
| 2887 | 3056 |
| 2888 | 3057 |
| 2889 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | 3058 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
| 2890 switch (expr->op()) { | 3059 switch (expr->op()) { |
| 2891 case Token::DELETE: { | 3060 case Token::DELETE: { |
| 2892 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | 3061 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); |
| 2893 Property* prop = expr->expression()->AsProperty(); | 3062 Property* prop = expr->expression()->AsProperty(); |
| 2894 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); | 3063 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); |
| 2895 if (prop == NULL && var == NULL) { | 3064 |
| 3065 if (prop != NULL) { |
| 3066 if (prop->is_synthetic()) { |
| 3067 // Result of deleting parameters is false, even when they rewrite |
| 3068 // to accesses on the arguments object. |
| 3069 context()->Plug(false); |
| 3070 } else { |
| 3071 VisitForStackValue(prop->obj()); |
| 3072 VisitForStackValue(prop->key()); |
| 3073 __ Push(Smi::FromInt(strict_mode_flag())); |
| 3074 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
| 3075 context()->Plug(rax); |
| 3076 } |
| 3077 } else if (var != NULL) { |
| 3078 // Delete of an unqualified identifier is disallowed in strict mode |
| 3079 // so this code can only be reached in non-strict mode. |
| 3080 ASSERT(strict_mode_flag() == kNonStrictMode); |
| 3081 if (var->is_global()) { |
| 3082 __ push(GlobalObjectOperand()); |
| 3083 __ Push(var->name()); |
| 3084 __ Push(Smi::FromInt(kNonStrictMode)); |
| 3085 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
| 3086 context()->Plug(rax); |
| 3087 } else if (var->AsSlot() != NULL && |
| 3088 var->AsSlot()->type() != Slot::LOOKUP) { |
| 3089 // Result of deleting non-global, non-dynamic variables is false. |
| 3090 // The subexpression does not have side effects. |
| 3091 context()->Plug(false); |
| 3092 } else { |
| 3093 // Non-global variable. Call the runtime to try to delete from the |
| 3094 // context where the variable was introduced. |
| 3095 __ push(context_register()); |
| 3096 __ Push(var->name()); |
| 3097 __ CallRuntime(Runtime::kDeleteContextSlot, 2); |
| 3098 context()->Plug(rax); |
| 3099 } |
| 3100 } else { |
| 2896 // Result of deleting non-property, non-variable reference is true. | 3101 // Result of deleting non-property, non-variable reference is true. |
| 2897 // The subexpression may have side effects. | 3102 // The subexpression may have side effects. |
| 2898 VisitForEffect(expr->expression()); | 3103 VisitForEffect(expr->expression()); |
| 2899 context()->Plug(true); | 3104 context()->Plug(true); |
| 2900 } else if (var != NULL && | |
| 2901 !var->is_global() && | |
| 2902 var->AsSlot() != NULL && | |
| 2903 var->AsSlot()->type() != Slot::LOOKUP) { | |
| 2904 // Result of deleting non-global, non-dynamic variables is false. | |
| 2905 // The subexpression does not have side effects. | |
| 2906 context()->Plug(false); | |
| 2907 } else { | |
| 2908 // Property or variable reference. Call the delete builtin with | |
| 2909 // object and property name as arguments. | |
| 2910 if (prop != NULL) { | |
| 2911 VisitForStackValue(prop->obj()); | |
| 2912 VisitForStackValue(prop->key()); | |
| 2913 } else if (var->is_global()) { | |
| 2914 __ push(GlobalObjectOperand()); | |
| 2915 __ Push(var->name()); | |
| 2916 } else { | |
| 2917 // Non-global variable. Call the runtime to look up the context | |
| 2918 // where the variable was introduced. | |
| 2919 __ push(context_register()); | |
| 2920 __ Push(var->name()); | |
| 2921 __ CallRuntime(Runtime::kLookupContext, 2); | |
| 2922 __ push(rax); | |
| 2923 __ Push(var->name()); | |
| 2924 } | |
| 2925 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | |
| 2926 context()->Plug(rax); | |
| 2927 } | 3105 } |
| 2928 break; | 3106 break; |
| 2929 } | 3107 } |
| 2930 | 3108 |
| 2931 case Token::VOID: { | 3109 case Token::VOID: { |
| 2932 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); | 3110 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); |
| 2933 VisitForEffect(expr->expression()); | 3111 VisitForEffect(expr->expression()); |
| 2934 context()->Plug(Heap::kUndefinedValueRootIndex); | 3112 context()->Plug(Heap::kUndefinedValueRootIndex); |
| 2935 break; | 3113 break; |
| 2936 } | 3114 } |
| 2937 | 3115 |
| 2938 case Token::NOT: { | 3116 case Token::NOT: { |
| 2939 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); | 3117 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); |
| 2940 Label materialize_true, materialize_false; | 3118 Label materialize_true, materialize_false; |
| 2941 Label* if_true = NULL; | 3119 Label* if_true = NULL; |
| 2942 Label* if_false = NULL; | 3120 Label* if_false = NULL; |
| 2943 Label* fall_through = NULL; | 3121 Label* fall_through = NULL; |
| 2944 // Notice that the labels are swapped. | 3122 // Notice that the labels are swapped. |
| 2945 context()->PrepareTest(&materialize_true, &materialize_false, | 3123 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2946 &if_false, &if_true, &fall_through); | 3124 &if_false, &if_true, &fall_through); |
| 3125 if (context()->IsTest()) ForwardBailoutToChild(expr); |
| 2947 VisitForControl(expr->expression(), if_true, if_false, fall_through); | 3126 VisitForControl(expr->expression(), if_true, if_false, fall_through); |
| 2948 context()->Plug(if_false, if_true); // Labels swapped. | 3127 context()->Plug(if_false, if_true); // Labels swapped. |
| 2949 break; | 3128 break; |
| 2950 } | 3129 } |
| 2951 | 3130 |
| 2952 case Token::TYPEOF: { | 3131 case Token::TYPEOF: { |
| 2953 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); | 3132 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); |
| 2954 { StackValueContext context(this); | 3133 { StackValueContext context(this); |
| 2955 VisitForTypeofValue(expr->expression()); | 3134 VisitForTypeofValue(expr->expression()); |
| 2956 } | 3135 } |
| 2957 __ CallRuntime(Runtime::kTypeof, 1); | 3136 __ CallRuntime(Runtime::kTypeof, 1); |
| 2958 context()->Plug(rax); | 3137 context()->Plug(rax); |
| 2959 break; | 3138 break; |
| 2960 } | 3139 } |
| 2961 | 3140 |
| 2962 case Token::ADD: { | 3141 case Token::ADD: { |
| 2963 Comment cmt(masm_, "[ UnaryOperation (ADD)"); | 3142 Comment cmt(masm_, "[ UnaryOperation (ADD)"); |
| 2964 VisitForAccumulatorValue(expr->expression()); | 3143 VisitForAccumulatorValue(expr->expression()); |
| 2965 Label no_conversion; | 3144 Label no_conversion; |
| 2966 Condition is_smi = masm_->CheckSmi(result_register()); | 3145 Condition is_smi = masm_->CheckSmi(result_register()); |
| 2967 __ j(is_smi, &no_conversion); | 3146 __ j(is_smi, &no_conversion); |
| 2968 __ push(result_register()); | 3147 ToNumberStub convert_stub; |
| 2969 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); | 3148 __ CallStub(&convert_stub); |
| 2970 __ bind(&no_conversion); | 3149 __ bind(&no_conversion); |
| 2971 context()->Plug(result_register()); | 3150 context()->Plug(result_register()); |
| 2972 break; | 3151 break; |
| 2973 } | 3152 } |
| 2974 | 3153 |
| 2975 case Token::SUB: { | 3154 case Token::SUB: { |
| 2976 Comment cmt(masm_, "[ UnaryOperation (SUB)"); | 3155 Comment cmt(masm_, "[ UnaryOperation (SUB)"); |
| 2977 bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); | 3156 bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); |
| 2978 UnaryOverwriteMode overwrite = | 3157 UnaryOverwriteMode overwrite = |
| 2979 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; | 3158 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3050 } else { | 3229 } else { |
| 3051 // Reserve space for result of postfix operation. | 3230 // Reserve space for result of postfix operation. |
| 3052 if (expr->is_postfix() && !context()->IsEffect()) { | 3231 if (expr->is_postfix() && !context()->IsEffect()) { |
| 3053 __ Push(Smi::FromInt(0)); | 3232 __ Push(Smi::FromInt(0)); |
| 3054 } | 3233 } |
| 3055 if (assign_type == NAMED_PROPERTY) { | 3234 if (assign_type == NAMED_PROPERTY) { |
| 3056 VisitForAccumulatorValue(prop->obj()); | 3235 VisitForAccumulatorValue(prop->obj()); |
| 3057 __ push(rax); // Copy of receiver, needed for later store. | 3236 __ push(rax); // Copy of receiver, needed for later store. |
| 3058 EmitNamedPropertyLoad(prop); | 3237 EmitNamedPropertyLoad(prop); |
| 3059 } else { | 3238 } else { |
| 3060 VisitForStackValue(prop->obj()); | 3239 if (prop->is_arguments_access()) { |
| 3061 VisitForAccumulatorValue(prop->key()); | 3240 VariableProxy* obj_proxy = prop->obj()->AsVariableProxy(); |
| 3241 MemOperand slot_operand = |
| 3242 EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx); |
| 3243 __ push(slot_operand); |
| 3244 __ Move(rax, prop->key()->AsLiteral()->handle()); |
| 3245 } else { |
| 3246 VisitForStackValue(prop->obj()); |
| 3247 VisitForAccumulatorValue(prop->key()); |
| 3248 } |
| 3062 __ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack | 3249 __ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack |
| 3063 __ push(rax); // Copy of key, needed for later store. | 3250 __ push(rax); // Copy of key, needed for later store. |
| 3064 EmitKeyedPropertyLoad(prop); | 3251 EmitKeyedPropertyLoad(prop); |
| 3065 } | 3252 } |
| 3066 } | 3253 } |
| 3067 | 3254 |
| 3255 // We need a second deoptimization point after loading the value |
| 3256 // in case evaluating the property load my have a side effect. |
| 3257 PrepareForBailout(expr->increment(), TOS_REG); |
| 3258 |
| 3068 // Call ToNumber only if operand is not a smi. | 3259 // Call ToNumber only if operand is not a smi. |
| 3069 NearLabel no_conversion; | 3260 NearLabel no_conversion; |
| 3070 Condition is_smi; | 3261 Condition is_smi; |
| 3071 is_smi = masm_->CheckSmi(rax); | 3262 is_smi = masm_->CheckSmi(rax); |
| 3072 __ j(is_smi, &no_conversion); | 3263 __ j(is_smi, &no_conversion); |
| 3073 __ push(rax); | 3264 ToNumberStub convert_stub; |
| 3074 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); | 3265 __ CallStub(&convert_stub); |
| 3075 __ bind(&no_conversion); | 3266 __ bind(&no_conversion); |
| 3076 | 3267 |
| 3077 // Save result for postfix expressions. | 3268 // Save result for postfix expressions. |
| 3078 if (expr->is_postfix()) { | 3269 if (expr->is_postfix()) { |
| 3079 if (!context()->IsEffect()) { | 3270 if (!context()->IsEffect()) { |
| 3080 // Save the result on the stack. If we have a named or keyed property | 3271 // Save the result on the stack. If we have a named or keyed property |
| 3081 // we store the result under the receiver that is currently on top | 3272 // we store the result under the receiver that is currently on top |
| 3082 // of the stack. | 3273 // of the stack. |
| 3083 switch (assign_type) { | 3274 switch (assign_type) { |
| 3084 case VARIABLE: | 3275 case VARIABLE: |
| 3085 __ push(rax); | 3276 __ push(rax); |
| 3086 break; | 3277 break; |
| 3087 case NAMED_PROPERTY: | 3278 case NAMED_PROPERTY: |
| 3088 __ movq(Operand(rsp, kPointerSize), rax); | 3279 __ movq(Operand(rsp, kPointerSize), rax); |
| 3089 break; | 3280 break; |
| 3090 case KEYED_PROPERTY: | 3281 case KEYED_PROPERTY: |
| 3091 __ movq(Operand(rsp, 2 * kPointerSize), rax); | 3282 __ movq(Operand(rsp, 2 * kPointerSize), rax); |
| 3092 break; | 3283 break; |
| 3093 } | 3284 } |
| 3094 } | 3285 } |
| 3095 } | 3286 } |
| 3096 | 3287 |
| 3097 // Inline smi case if we are in a loop. | 3288 // Inline smi case if we are in a loop. |
| 3098 Label stub_call, done; | 3289 NearLabel stub_call, done; |
| 3290 JumpPatchSite patch_site(masm_); |
| 3291 |
| 3099 if (ShouldInlineSmiCase(expr->op())) { | 3292 if (ShouldInlineSmiCase(expr->op())) { |
| 3100 if (expr->op() == Token::INC) { | 3293 if (expr->op() == Token::INC) { |
| 3101 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); | 3294 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
| 3102 } else { | 3295 } else { |
| 3103 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); | 3296 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); |
| 3104 } | 3297 } |
| 3105 __ j(overflow, &stub_call); | 3298 __ j(overflow, &stub_call); |
| 3106 // We could eliminate this smi check if we split the code at | 3299 // We could eliminate this smi check if we split the code at |
| 3107 // the first smi check before calling ToNumber. | 3300 // the first smi check before calling ToNumber. |
| 3108 is_smi = masm_->CheckSmi(rax); | 3301 patch_site.EmitJumpIfSmi(rax, &done); |
| 3109 __ j(is_smi, &done); | 3302 |
| 3110 __ bind(&stub_call); | 3303 __ bind(&stub_call); |
| 3111 // Call stub. Undo operation first. | 3304 // Call stub. Undo operation first. |
| 3112 if (expr->op() == Token::INC) { | 3305 if (expr->op() == Token::INC) { |
| 3113 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); | 3306 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); |
| 3114 } else { | 3307 } else { |
| 3115 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); | 3308 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
| 3116 } | 3309 } |
| 3117 } | 3310 } |
| 3118 | 3311 |
| 3119 // Record position before stub call. | 3312 // Record position before stub call. |
| 3120 SetSourcePosition(expr->position()); | 3313 SetSourcePosition(expr->position()); |
| 3121 | 3314 |
| 3122 // Call stub for +1/-1. | 3315 // Call stub for +1/-1. |
| 3123 GenericBinaryOpStub stub(expr->binary_op(), | 3316 TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE); |
| 3124 NO_OVERWRITE, | 3317 if (expr->op() == Token::INC) { |
| 3125 NO_GENERIC_BINARY_FLAGS); | 3318 __ Move(rdx, Smi::FromInt(1)); |
| 3126 stub.GenerateCall(masm_, rax, Smi::FromInt(1)); | 3319 } else { |
| 3320 __ movq(rdx, rax); |
| 3321 __ Move(rax, Smi::FromInt(1)); |
| 3322 } |
| 3323 EmitCallIC(stub.GetCode(), &patch_site); |
| 3127 __ bind(&done); | 3324 __ bind(&done); |
| 3128 | 3325 |
| 3129 // Store the value returned in rax. | 3326 // Store the value returned in rax. |
| 3130 switch (assign_type) { | 3327 switch (assign_type) { |
| 3131 case VARIABLE: | 3328 case VARIABLE: |
| 3132 if (expr->is_postfix()) { | 3329 if (expr->is_postfix()) { |
| 3133 // Perform the assignment as if via '='. | 3330 // Perform the assignment as if via '='. |
| 3134 { EffectContext context(this); | 3331 { EffectContext context(this); |
| 3135 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | 3332 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
| 3136 Token::ASSIGN); | 3333 Token::ASSIGN); |
| 3334 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3137 context.Plug(rax); | 3335 context.Plug(rax); |
| 3138 } | 3336 } |
| 3139 // For all contexts except kEffect: We have the result on | 3337 // For all contexts except kEffect: We have the result on |
| 3140 // top of the stack. | 3338 // top of the stack. |
| 3141 if (!context()->IsEffect()) { | 3339 if (!context()->IsEffect()) { |
| 3142 context()->PlugTOS(); | 3340 context()->PlugTOS(); |
| 3143 } | 3341 } |
| 3144 } else { | 3342 } else { |
| 3145 // Perform the assignment as if via '='. | 3343 // Perform the assignment as if via '='. |
| 3146 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | 3344 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
| 3147 Token::ASSIGN); | 3345 Token::ASSIGN); |
| 3346 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3148 context()->Plug(rax); | 3347 context()->Plug(rax); |
| 3149 } | 3348 } |
| 3150 break; | 3349 break; |
| 3151 case NAMED_PROPERTY: { | 3350 case NAMED_PROPERTY: { |
| 3152 __ Move(rcx, prop->key()->AsLiteral()->handle()); | 3351 __ Move(rcx, prop->key()->AsLiteral()->handle()); |
| 3153 __ pop(rdx); | 3352 __ pop(rdx); |
| 3154 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); | 3353 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); |
| 3155 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 3354 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
| 3355 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3156 if (expr->is_postfix()) { | 3356 if (expr->is_postfix()) { |
| 3157 if (!context()->IsEffect()) { | 3357 if (!context()->IsEffect()) { |
| 3158 context()->PlugTOS(); | 3358 context()->PlugTOS(); |
| 3159 } | 3359 } |
| 3160 } else { | 3360 } else { |
| 3161 context()->Plug(rax); | 3361 context()->Plug(rax); |
| 3162 } | 3362 } |
| 3163 break; | 3363 break; |
| 3164 } | 3364 } |
| 3165 case KEYED_PROPERTY: { | 3365 case KEYED_PROPERTY: { |
| 3166 __ pop(rcx); | 3366 __ pop(rcx); |
| 3167 __ pop(rdx); | 3367 __ pop(rdx); |
| 3168 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); | 3368 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); |
| 3169 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 3369 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
| 3370 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3170 if (expr->is_postfix()) { | 3371 if (expr->is_postfix()) { |
| 3171 if (!context()->IsEffect()) { | 3372 if (!context()->IsEffect()) { |
| 3172 context()->PlugTOS(); | 3373 context()->PlugTOS(); |
| 3173 } | 3374 } |
| 3174 } else { | 3375 } else { |
| 3175 context()->Plug(rax); | 3376 context()->Plug(rax); |
| 3176 } | 3377 } |
| 3177 break; | 3378 break; |
| 3178 } | 3379 } |
| 3179 } | 3380 } |
| 3180 } | 3381 } |
| 3181 | 3382 |
| 3182 | 3383 |
| 3183 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { | 3384 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { |
| 3184 VariableProxy* proxy = expr->AsVariableProxy(); | 3385 VariableProxy* proxy = expr->AsVariableProxy(); |
| 3185 ASSERT(!context()->IsEffect()); | 3386 ASSERT(!context()->IsEffect()); |
| 3186 ASSERT(!context()->IsTest()); | 3387 ASSERT(!context()->IsTest()); |
| 3187 | 3388 |
| 3188 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) { | 3389 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) { |
| 3189 Comment cmnt(masm_, "Global variable"); | 3390 Comment cmnt(masm_, "Global variable"); |
| 3190 __ Move(rcx, proxy->name()); | 3391 __ Move(rcx, proxy->name()); |
| 3191 __ movq(rax, GlobalObjectOperand()); | 3392 __ movq(rax, GlobalObjectOperand()); |
| 3192 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | 3393 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
| 3193 // Use a regular load, not a contextual load, to avoid a reference | 3394 // Use a regular load, not a contextual load, to avoid a reference |
| 3194 // error. | 3395 // error. |
| 3195 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 3396 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
| 3397 PrepareForBailout(expr, TOS_REG); |
| 3196 context()->Plug(rax); | 3398 context()->Plug(rax); |
| 3197 } else if (proxy != NULL && | 3399 } else if (proxy != NULL && |
| 3198 proxy->var()->AsSlot() != NULL && | 3400 proxy->var()->AsSlot() != NULL && |
| 3199 proxy->var()->AsSlot()->type() == Slot::LOOKUP) { | 3401 proxy->var()->AsSlot()->type() == Slot::LOOKUP) { |
| 3200 Label done, slow; | 3402 Label done, slow; |
| 3201 | 3403 |
| 3202 // Generate code for loading from variables potentially shadowed | 3404 // Generate code for loading from variables potentially shadowed |
| 3203 // by eval-introduced variables. | 3405 // by eval-introduced variables. |
| 3204 Slot* slot = proxy->var()->AsSlot(); | 3406 Slot* slot = proxy->var()->AsSlot(); |
| 3205 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done); | 3407 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done); |
| 3206 | 3408 |
| 3207 __ bind(&slow); | 3409 __ bind(&slow); |
| 3208 __ push(rsi); | 3410 __ push(rsi); |
| 3209 __ Push(proxy->name()); | 3411 __ Push(proxy->name()); |
| 3210 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); | 3412 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
| 3413 PrepareForBailout(expr, TOS_REG); |
| 3211 __ bind(&done); | 3414 __ bind(&done); |
| 3212 | 3415 |
| 3213 context()->Plug(rax); | 3416 context()->Plug(rax); |
| 3214 } else { | 3417 } else { |
| 3215 // This expression cannot throw a reference error at the top level. | 3418 // This expression cannot throw a reference error at the top level. |
| 3216 Visit(expr); | 3419 context()->HandleExpression(expr); |
| 3217 } | 3420 } |
| 3218 } | 3421 } |
| 3219 | 3422 |
| 3220 | 3423 |
| 3221 bool FullCodeGenerator::TryLiteralCompare(Token::Value op, | 3424 bool FullCodeGenerator::TryLiteralCompare(Token::Value op, |
| 3222 Expression* left, | 3425 Expression* left, |
| 3223 Expression* right, | 3426 Expression* right, |
| 3224 Label* if_true, | 3427 Label* if_true, |
| 3225 Label* if_false, | 3428 Label* if_false, |
| 3226 Label* fall_through) { | 3429 Label* fall_through) { |
| 3227 if (op != Token::EQ && op != Token::EQ_STRICT) return false; | 3430 if (op != Token::EQ && op != Token::EQ_STRICT) return false; |
| 3228 | 3431 |
| 3229 // Check for the pattern: typeof <expression> == <string literal>. | 3432 // Check for the pattern: typeof <expression> == <string literal>. |
| 3230 Literal* right_literal = right->AsLiteral(); | 3433 Literal* right_literal = right->AsLiteral(); |
| 3231 if (right_literal == NULL) return false; | 3434 if (right_literal == NULL) return false; |
| 3232 Handle<Object> right_literal_value = right_literal->handle(); | 3435 Handle<Object> right_literal_value = right_literal->handle(); |
| 3233 if (!right_literal_value->IsString()) return false; | 3436 if (!right_literal_value->IsString()) return false; |
| 3234 UnaryOperation* left_unary = left->AsUnaryOperation(); | 3437 UnaryOperation* left_unary = left->AsUnaryOperation(); |
| 3235 if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false; | 3438 if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false; |
| 3236 Handle<String> check = Handle<String>::cast(right_literal_value); | 3439 Handle<String> check = Handle<String>::cast(right_literal_value); |
| 3237 | 3440 |
| 3238 { AccumulatorValueContext context(this); | 3441 { AccumulatorValueContext context(this); |
| 3239 VisitForTypeofValue(left_unary->expression()); | 3442 VisitForTypeofValue(left_unary->expression()); |
| 3240 } | 3443 } |
| 3444 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 3241 | 3445 |
| 3242 if (check->Equals(Heap::number_symbol())) { | 3446 if (check->Equals(Heap::number_symbol())) { |
| 3243 Condition is_smi = masm_->CheckSmi(rax); | 3447 Condition is_smi = masm_->CheckSmi(rax); |
| 3244 __ j(is_smi, if_true); | 3448 __ j(is_smi, if_true); |
| 3245 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); | 3449 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); |
| 3246 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex); | 3450 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex); |
| 3247 Split(equal, if_true, if_false, fall_through); | 3451 Split(equal, if_true, if_false, fall_through); |
| 3248 } else if (check->Equals(Heap::string_symbol())) { | 3452 } else if (check->Equals(Heap::string_symbol())) { |
| 3249 Condition is_smi = masm_->CheckSmi(rax); | 3453 Condition is_smi = masm_->CheckSmi(rax); |
| 3250 __ j(is_smi, if_false); | 3454 __ j(is_smi, if_false); |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3324 if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) { | 3528 if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) { |
| 3325 context()->Plug(if_true, if_false); | 3529 context()->Plug(if_true, if_false); |
| 3326 return; | 3530 return; |
| 3327 } | 3531 } |
| 3328 | 3532 |
| 3329 VisitForStackValue(expr->left()); | 3533 VisitForStackValue(expr->left()); |
| 3330 switch (op) { | 3534 switch (op) { |
| 3331 case Token::IN: | 3535 case Token::IN: |
| 3332 VisitForStackValue(expr->right()); | 3536 VisitForStackValue(expr->right()); |
| 3333 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); | 3537 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); |
| 3538 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); |
| 3334 __ CompareRoot(rax, Heap::kTrueValueRootIndex); | 3539 __ CompareRoot(rax, Heap::kTrueValueRootIndex); |
| 3335 Split(equal, if_true, if_false, fall_through); | 3540 Split(equal, if_true, if_false, fall_through); |
| 3336 break; | 3541 break; |
| 3337 | 3542 |
| 3338 case Token::INSTANCEOF: { | 3543 case Token::INSTANCEOF: { |
| 3339 VisitForStackValue(expr->right()); | 3544 VisitForStackValue(expr->right()); |
| 3340 InstanceofStub stub(InstanceofStub::kNoFlags); | 3545 InstanceofStub stub(InstanceofStub::kNoFlags); |
| 3341 __ CallStub(&stub); | 3546 __ CallStub(&stub); |
| 3547 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 3342 __ testq(rax, rax); | 3548 __ testq(rax, rax); |
| 3343 // The stub returns 0 for true. | 3549 // The stub returns 0 for true. |
| 3344 Split(zero, if_true, if_false, fall_through); | 3550 Split(zero, if_true, if_false, fall_through); |
| 3345 break; | 3551 break; |
| 3346 } | 3552 } |
| 3347 | 3553 |
| 3348 default: { | 3554 default: { |
| 3349 VisitForAccumulatorValue(expr->right()); | 3555 VisitForAccumulatorValue(expr->right()); |
| 3350 Condition cc = no_condition; | 3556 Condition cc = no_condition; |
| 3351 bool strict = false; | 3557 bool strict = false; |
| (...skipping 25 matching lines...) Expand all Loading... |
| 3377 cc = greater_equal; | 3583 cc = greater_equal; |
| 3378 __ pop(rdx); | 3584 __ pop(rdx); |
| 3379 break; | 3585 break; |
| 3380 case Token::IN: | 3586 case Token::IN: |
| 3381 case Token::INSTANCEOF: | 3587 case Token::INSTANCEOF: |
| 3382 default: | 3588 default: |
| 3383 UNREACHABLE(); | 3589 UNREACHABLE(); |
| 3384 } | 3590 } |
| 3385 | 3591 |
| 3386 bool inline_smi_code = ShouldInlineSmiCase(op); | 3592 bool inline_smi_code = ShouldInlineSmiCase(op); |
| 3593 JumpPatchSite patch_site(masm_); |
| 3387 if (inline_smi_code) { | 3594 if (inline_smi_code) { |
| 3388 Label slow_case; | 3595 NearLabel slow_case; |
| 3389 __ JumpIfNotBothSmi(rax, rdx, &slow_case); | 3596 __ movq(rcx, rdx); |
| 3390 __ SmiCompare(rdx, rax); | 3597 __ or_(rcx, rax); |
| 3598 patch_site.EmitJumpIfNotSmi(rcx, &slow_case); |
| 3599 __ cmpq(rdx, rax); |
| 3391 Split(cc, if_true, if_false, NULL); | 3600 Split(cc, if_true, if_false, NULL); |
| 3392 __ bind(&slow_case); | 3601 __ bind(&slow_case); |
| 3393 } | 3602 } |
| 3394 | 3603 |
| 3395 CompareFlags flags = inline_smi_code | 3604 // Record position and call the compare IC. |
| 3396 ? NO_SMI_COMPARE_IN_STUB | 3605 SetSourcePosition(expr->position()); |
| 3397 : NO_COMPARE_FLAGS; | 3606 Handle<Code> ic = CompareIC::GetUninitialized(op); |
| 3398 CompareStub stub(cc, strict, flags); | 3607 EmitCallIC(ic, &patch_site); |
| 3399 __ CallStub(&stub); | 3608 |
| 3609 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 3400 __ testq(rax, rax); | 3610 __ testq(rax, rax); |
| 3401 Split(cc, if_true, if_false, fall_through); | 3611 Split(cc, if_true, if_false, fall_through); |
| 3402 } | 3612 } |
| 3403 } | 3613 } |
| 3404 | 3614 |
| 3405 // Convert the result of the comparison into one expected for this | 3615 // Convert the result of the comparison into one expected for this |
| 3406 // expression's context. | 3616 // expression's context. |
| 3407 context()->Plug(if_true, if_false); | 3617 context()->Plug(if_true, if_false); |
| 3408 } | 3618 } |
| 3409 | 3619 |
| 3410 | 3620 |
| 3411 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { | 3621 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { |
| 3412 Comment cmnt(masm_, "[ CompareToNull"); | 3622 Comment cmnt(masm_, "[ CompareToNull"); |
| 3413 Label materialize_true, materialize_false; | 3623 Label materialize_true, materialize_false; |
| 3414 Label* if_true = NULL; | 3624 Label* if_true = NULL; |
| 3415 Label* if_false = NULL; | 3625 Label* if_false = NULL; |
| 3416 Label* fall_through = NULL; | 3626 Label* fall_through = NULL; |
| 3417 context()->PrepareTest(&materialize_true, &materialize_false, | 3627 context()->PrepareTest(&materialize_true, &materialize_false, |
| 3418 &if_true, &if_false, &fall_through); | 3628 &if_true, &if_false, &fall_through); |
| 3419 | 3629 |
| 3420 VisitForAccumulatorValue(expr->expression()); | 3630 VisitForAccumulatorValue(expr->expression()); |
| 3631 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 3421 __ CompareRoot(rax, Heap::kNullValueRootIndex); | 3632 __ CompareRoot(rax, Heap::kNullValueRootIndex); |
| 3422 if (expr->is_strict()) { | 3633 if (expr->is_strict()) { |
| 3423 Split(equal, if_true, if_false, fall_through); | 3634 Split(equal, if_true, if_false, fall_through); |
| 3424 } else { | 3635 } else { |
| 3425 __ j(equal, if_true); | 3636 __ j(equal, if_true); |
| 3426 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); | 3637 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); |
| 3427 __ j(equal, if_true); | 3638 __ j(equal, if_true); |
| 3428 Condition is_smi = masm_->CheckSmi(rax); | 3639 Condition is_smi = masm_->CheckSmi(rax); |
| 3429 __ j(is_smi, if_false); | 3640 __ j(is_smi, if_false); |
| 3430 // It can be an undetectable object. | 3641 // It can be an undetectable object. |
| (...skipping 18 matching lines...) Expand all Loading... |
| 3449 | 3660 |
| 3450 | 3661 |
| 3451 Register FullCodeGenerator::context_register() { | 3662 Register FullCodeGenerator::context_register() { |
| 3452 return rsi; | 3663 return rsi; |
| 3453 } | 3664 } |
| 3454 | 3665 |
| 3455 | 3666 |
| 3456 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) { | 3667 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) { |
| 3457 ASSERT(mode == RelocInfo::CODE_TARGET || | 3668 ASSERT(mode == RelocInfo::CODE_TARGET || |
| 3458 mode == RelocInfo::CODE_TARGET_CONTEXT); | 3669 mode == RelocInfo::CODE_TARGET_CONTEXT); |
| 3670 switch (ic->kind()) { |
| 3671 case Code::LOAD_IC: |
| 3672 __ IncrementCounter(&Counters::named_load_full, 1); |
| 3673 break; |
| 3674 case Code::KEYED_LOAD_IC: |
| 3675 __ IncrementCounter(&Counters::keyed_load_full, 1); |
| 3676 break; |
| 3677 case Code::STORE_IC: |
| 3678 __ IncrementCounter(&Counters::named_store_full, 1); |
| 3679 break; |
| 3680 case Code::KEYED_STORE_IC: |
| 3681 __ IncrementCounter(&Counters::keyed_store_full, 1); |
| 3682 default: |
| 3683 break; |
| 3684 } |
| 3685 |
| 3459 __ call(ic, mode); | 3686 __ call(ic, mode); |
| 3460 | 3687 |
| 3461 // Crankshaft doesn't need patching of inlined loads and stores. | 3688 // Crankshaft doesn't need patching of inlined loads and stores. |
| 3462 if (V8::UseCrankshaft()) return; | 3689 // When compiling the snapshot we need to produce code that works |
| 3690 // with and without Crankshaft. |
| 3691 if (V8::UseCrankshaft() && !Serializer::enabled()) { |
| 3692 return; |
| 3693 } |
| 3463 | 3694 |
| 3464 // If we're calling a (keyed) load or store stub, we have to mark | 3695 // If we're calling a (keyed) load or store stub, we have to mark |
| 3465 // the call as containing no inlined code so we will not attempt to | 3696 // the call as containing no inlined code so we will not attempt to |
| 3466 // patch it. | 3697 // patch it. |
| 3467 switch (ic->kind()) { | 3698 switch (ic->kind()) { |
| 3468 case Code::LOAD_IC: | 3699 case Code::LOAD_IC: |
| 3469 case Code::KEYED_LOAD_IC: | 3700 case Code::KEYED_LOAD_IC: |
| 3470 case Code::STORE_IC: | 3701 case Code::STORE_IC: |
| 3471 case Code::KEYED_STORE_IC: | 3702 case Code::KEYED_STORE_IC: |
| 3472 __ nop(); // Signals no inlined code. | 3703 __ nop(); // Signals no inlined code. |
| 3473 break; | 3704 break; |
| 3474 default: | 3705 default: |
| 3475 // Do nothing. | 3706 // Do nothing. |
| 3476 break; | 3707 break; |
| 3477 } | 3708 } |
| 3478 } | 3709 } |
| 3479 | 3710 |
| 3480 | 3711 |
| 3712 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) { |
| 3713 __ call(ic, RelocInfo::CODE_TARGET); |
| 3714 if (patch_site != NULL && patch_site->is_bound()) { |
| 3715 patch_site->EmitPatchInfo(); |
| 3716 } else { |
| 3717 __ nop(); // Signals no inlined code. |
| 3718 } |
| 3719 } |
| 3720 |
| 3721 |
| 3481 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | 3722 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
| 3482 ASSERT(IsAligned(frame_offset, kPointerSize)); | 3723 ASSERT(IsAligned(frame_offset, kPointerSize)); |
| 3483 __ movq(Operand(rbp, frame_offset), value); | 3724 __ movq(Operand(rbp, frame_offset), value); |
| 3484 } | 3725 } |
| 3485 | 3726 |
| 3486 | 3727 |
| 3487 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | 3728 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { |
| 3488 __ movq(dst, ContextOperand(rsi, context_index)); | 3729 __ movq(dst, ContextOperand(rsi, context_index)); |
| 3489 } | 3730 } |
| 3490 | 3731 |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3522 __ ret(0); | 3763 __ ret(0); |
| 3523 } | 3764 } |
| 3524 | 3765 |
| 3525 | 3766 |
| 3526 #undef __ | 3767 #undef __ |
| 3527 | 3768 |
| 3528 | 3769 |
| 3529 } } // namespace v8::internal | 3770 } } // namespace v8::internal |
| 3530 | 3771 |
| 3531 #endif // V8_TARGET_ARCH_X64 | 3772 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |