| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 46 return GeneratePrologue() && | 46 return GeneratePrologue() && |
| 47 GenerateBody() && | 47 GenerateBody() && |
| 48 GenerateDeferredCode() && | 48 GenerateDeferredCode() && |
| 49 GenerateSafepointTable(); | 49 GenerateSafepointTable(); |
| 50 } | 50 } |
| 51 | 51 |
| 52 | 52 |
| 53 void LCodeGen::FinishCode(Handle<Code> code) { | 53 void LCodeGen::FinishCode(Handle<Code> code) { |
| 54 ASSERT(is_done()); | 54 ASSERT(is_done()); |
| 55 code->set_stack_slots(StackSlotCount()); | 55 code->set_stack_slots(StackSlotCount()); |
| 56 code->set_safepoint_table_start(safepoints_.GetCodeOffset()); | 56 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 57 PopulateDeoptimizationData(code); | 57 PopulateDeoptimizationData(code); |
| 58 } | 58 } |
| 59 | 59 |
| 60 | 60 |
| 61 void LCodeGen::Abort(const char* format, ...) { | 61 void LCodeGen::Abort(const char* format, ...) { |
| 62 if (FLAG_trace_bailout) { | 62 if (FLAG_trace_bailout) { |
| 63 SmartPointer<char> debug_name = graph()->debug_name()->ToCString(); | 63 SmartPointer<char> debug_name = graph()->debug_name()->ToCString(); |
| 64 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name); | 64 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name); |
| 65 va_list arguments; | 65 va_list arguments; |
| 66 va_start(arguments, format); | 66 va_start(arguments, format); |
| (...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 181 | 181 |
| 182 // Deferred code is the last part of the instruction sequence. Mark | 182 // Deferred code is the last part of the instruction sequence. Mark |
| 183 // the generated code as done unless we bailed out. | 183 // the generated code as done unless we bailed out. |
| 184 if (!is_aborted()) status_ = DONE; | 184 if (!is_aborted()) status_ = DONE; |
| 185 return !is_aborted(); | 185 return !is_aborted(); |
| 186 } | 186 } |
| 187 | 187 |
| 188 | 188 |
| 189 bool LCodeGen::GenerateSafepointTable() { | 189 bool LCodeGen::GenerateSafepointTable() { |
| 190 ASSERT(is_done()); | 190 ASSERT(is_done()); |
| 191 // Ensure that there is space at the end of the code to write a number |
| 192 // of jump instructions, as well as to afford writing a call near the end |
| 193 // of the code. |
| 194 // The jumps are used when there isn't room in the code stream to write |
| 195 // a long call instruction. Instead it writes a shorter call to a |
| 196 // jump instruction in the same code object. |
| 197 // The calls are used when lazy deoptimizing a function and calls to a |
| 198 // deoptimization function. |
| 199 int short_deopts = safepoints_.CountShortDeoptimizationIntervals( |
| 200 static_cast<unsigned>(MacroAssembler::kJumpInstructionLength)); |
| 201 int byte_count = (short_deopts) * MacroAssembler::kJumpInstructionLength; |
| 202 while (byte_count-- > 0) { |
| 203 __ int3(); |
| 204 } |
| 191 safepoints_.Emit(masm(), StackSlotCount()); | 205 safepoints_.Emit(masm(), StackSlotCount()); |
| 192 return !is_aborted(); | 206 return !is_aborted(); |
| 193 } | 207 } |
| 194 | 208 |
| 195 | 209 |
| 196 Register LCodeGen::ToRegister(int index) const { | 210 Register LCodeGen::ToRegister(int index) const { |
| 197 return Register::FromAllocationIndex(index); | 211 return Register::FromAllocationIndex(index); |
| 198 } | 212 } |
| 199 | 213 |
| 200 | 214 |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 335 translation->StoreLiteral(src_index); | 349 translation->StoreLiteral(src_index); |
| 336 } else { | 350 } else { |
| 337 UNREACHABLE(); | 351 UNREACHABLE(); |
| 338 } | 352 } |
| 339 } | 353 } |
| 340 | 354 |
| 341 | 355 |
| 342 void LCodeGen::CallCode(Handle<Code> code, | 356 void LCodeGen::CallCode(Handle<Code> code, |
| 343 RelocInfo::Mode mode, | 357 RelocInfo::Mode mode, |
| 344 LInstruction* instr) { | 358 LInstruction* instr) { |
| 345 if (instr != NULL) { | 359 ASSERT(instr != NULL); |
| 346 LPointerMap* pointers = instr->pointer_map(); | 360 LPointerMap* pointers = instr->pointer_map(); |
| 347 RecordPosition(pointers->position()); | 361 RecordPosition(pointers->position()); |
| 348 __ call(code, mode); | 362 __ call(code, mode); |
| 349 RegisterLazyDeoptimization(instr); | 363 RegisterLazyDeoptimization(instr); |
| 350 } else { | |
| 351 LPointerMap no_pointers(0); | |
| 352 RecordPosition(no_pointers.position()); | |
| 353 __ call(code, mode); | |
| 354 RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex); | |
| 355 } | |
| 356 | 364 |
| 357 // Signal that we don't inline smi code before these stubs in the | 365 // Signal that we don't inline smi code before these stubs in the |
| 358 // optimizing code generator. | 366 // optimizing code generator. |
| 359 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || | 367 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || |
| 360 code->kind() == Code::COMPARE_IC) { | 368 code->kind() == Code::COMPARE_IC) { |
| 361 __ nop(); | 369 __ nop(); |
| 362 } | 370 } |
| 363 } | 371 } |
| 364 | 372 |
| 365 | 373 |
| 366 void LCodeGen::CallRuntime(const Runtime::Function* function, | 374 void LCodeGen::CallRuntime(const Runtime::Function* function, |
| 367 int num_arguments, | 375 int num_arguments, |
| 368 LInstruction* instr) { | 376 LInstruction* instr) { |
| 369 Abort("Unimplemented: %s", "CallRuntime"); | 377 ASSERT(instr != NULL); |
| 378 ASSERT(instr->HasPointerMap()); |
| 379 LPointerMap* pointers = instr->pointer_map(); |
| 380 RecordPosition(pointers->position()); |
| 381 |
| 382 __ CallRuntime(function, num_arguments); |
| 383 RegisterLazyDeoptimization(instr); |
| 370 } | 384 } |
| 371 | 385 |
| 372 | 386 |
| 373 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 387 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { |
| 374 // Create the environment to bailout to. If the call has side effects | 388 // Create the environment to bailout to. If the call has side effects |
| 375 // execution has to continue after the call otherwise execution can continue | 389 // execution has to continue after the call otherwise execution can continue |
| 376 // from a previous bailout point repeating the call. | 390 // from a previous bailout point repeating the call. |
| 377 LEnvironment* deoptimization_environment; | 391 LEnvironment* deoptimization_environment; |
| 378 if (instr->HasDeoptimizationEnvironment()) { | 392 if (instr->HasDeoptimizationEnvironment()) { |
| 379 deoptimization_environment = instr->deoptimization_environment(); | 393 deoptimization_environment = instr->deoptimization_environment(); |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 488 for (int i = 0, length = inlined_closures->length(); | 502 for (int i = 0, length = inlined_closures->length(); |
| 489 i < length; | 503 i < length; |
| 490 i++) { | 504 i++) { |
| 491 DefineDeoptimizationLiteral(inlined_closures->at(i)); | 505 DefineDeoptimizationLiteral(inlined_closures->at(i)); |
| 492 } | 506 } |
| 493 | 507 |
| 494 inlined_function_count_ = deoptimization_literals_.length(); | 508 inlined_function_count_ = deoptimization_literals_.length(); |
| 495 } | 509 } |
| 496 | 510 |
| 497 | 511 |
| 512 void LCodeGen::RecordSafepoint( |
| 513 LPointerMap* pointers, |
| 514 Safepoint::Kind kind, |
| 515 int arguments, |
| 516 int deoptimization_index) { |
| 517 const ZoneList<LOperand*>* operands = pointers->operands(); |
| 518 |
| 519 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
| 520 kind, arguments, deoptimization_index); |
| 521 for (int i = 0; i < operands->length(); i++) { |
| 522 LOperand* pointer = operands->at(i); |
| 523 if (pointer->IsStackSlot()) { |
| 524 safepoint.DefinePointerSlot(pointer->index()); |
| 525 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
| 526 safepoint.DefinePointerRegister(ToRegister(pointer)); |
| 527 } |
| 528 } |
| 529 if (kind & Safepoint::kWithRegisters) { |
| 530 // Register rsi always contains a pointer to the context. |
| 531 safepoint.DefinePointerRegister(rsi); |
| 532 } |
| 533 } |
| 534 |
| 535 |
| 498 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 536 void LCodeGen::RecordSafepoint(LPointerMap* pointers, |
| 499 int deoptimization_index) { | 537 int deoptimization_index) { |
| 500 const ZoneList<LOperand*>* operands = pointers->operands(); | 538 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); |
| 501 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | |
| 502 deoptimization_index); | |
| 503 for (int i = 0; i < operands->length(); i++) { | |
| 504 LOperand* pointer = operands->at(i); | |
| 505 if (pointer->IsStackSlot()) { | |
| 506 safepoint.DefinePointerSlot(pointer->index()); | |
| 507 } | |
| 508 } | |
| 509 } | 539 } |
| 510 | 540 |
| 511 | 541 |
| 512 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, | 542 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, |
| 513 int arguments, | 543 int arguments, |
| 514 int deoptimization_index) { | 544 int deoptimization_index) { |
| 515 const ZoneList<LOperand*>* operands = pointers->operands(); | 545 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, |
| 516 Safepoint safepoint = | 546 deoptimization_index); |
| 517 safepoints_.DefineSafepointWithRegisters( | |
| 518 masm(), arguments, deoptimization_index); | |
| 519 for (int i = 0; i < operands->length(); i++) { | |
| 520 LOperand* pointer = operands->at(i); | |
| 521 if (pointer->IsStackSlot()) { | |
| 522 safepoint.DefinePointerSlot(pointer->index()); | |
| 523 } else if (pointer->IsRegister()) { | |
| 524 safepoint.DefinePointerRegister(ToRegister(pointer)); | |
| 525 } | |
| 526 } | |
| 527 // Register rsi always contains a pointer to the context. | |
| 528 safepoint.DefinePointerRegister(rsi); | |
| 529 } | 547 } |
| 530 | 548 |
| 531 | 549 |
| 532 void LCodeGen::RecordPosition(int position) { | 550 void LCodeGen::RecordPosition(int position) { |
| 533 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return; | 551 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return; |
| 534 masm()->positions_recorder()->RecordPosition(position); | 552 masm()->positions_recorder()->RecordPosition(position); |
| 535 } | 553 } |
| 536 | 554 |
| 537 | 555 |
| 538 void LCodeGen::DoLabel(LLabel* label) { | 556 void LCodeGen::DoLabel(LLabel* label) { |
| (...skipping 29 matching lines...) Expand all Loading... |
| 568 } | 586 } |
| 569 } | 587 } |
| 570 | 588 |
| 571 | 589 |
| 572 void LCodeGen::DoParameter(LParameter* instr) { | 590 void LCodeGen::DoParameter(LParameter* instr) { |
| 573 // Nothing to do. | 591 // Nothing to do. |
| 574 } | 592 } |
| 575 | 593 |
| 576 | 594 |
| 577 void LCodeGen::DoCallStub(LCallStub* instr) { | 595 void LCodeGen::DoCallStub(LCallStub* instr) { |
| 578 Abort("Unimplemented: %s", "DoCallStub"); | 596 ASSERT(ToRegister(instr->result()).is(rax)); |
| 597 switch (instr->hydrogen()->major_key()) { |
| 598 case CodeStub::RegExpConstructResult: { |
| 599 RegExpConstructResultStub stub; |
| 600 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 601 break; |
| 602 } |
| 603 case CodeStub::RegExpExec: { |
| 604 RegExpExecStub stub; |
| 605 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 606 break; |
| 607 } |
| 608 case CodeStub::SubString: { |
| 609 SubStringStub stub; |
| 610 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 611 break; |
| 612 } |
| 613 case CodeStub::StringCharAt: { |
| 614 // TODO(1116): Add StringCharAt stub to x64. |
| 615 Abort("Unimplemented: %s", "StringCharAt Stub"); |
| 616 break; |
| 617 } |
| 618 case CodeStub::MathPow: { |
| 619 // TODO(1115): Add MathPow stub to x64. |
| 620 Abort("Unimplemented: %s", "MathPow Stub"); |
| 621 break; |
| 622 } |
| 623 case CodeStub::NumberToString: { |
| 624 NumberToStringStub stub; |
| 625 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 626 break; |
| 627 } |
| 628 case CodeStub::StringAdd: { |
| 629 StringAddStub stub(NO_STRING_ADD_FLAGS); |
| 630 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 631 break; |
| 632 } |
| 633 case CodeStub::StringCompare: { |
| 634 StringCompareStub stub; |
| 635 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 636 break; |
| 637 } |
| 638 case CodeStub::TranscendentalCache: { |
| 639 TranscendentalCacheStub stub(instr->transcendental_type()); |
| 640 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 641 break; |
| 642 } |
| 643 default: |
| 644 UNREACHABLE(); |
| 645 } |
| 579 } | 646 } |
| 580 | 647 |
| 581 | 648 |
| 582 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 649 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
| 583 // Nothing to do. | 650 // Nothing to do. |
| 584 } | 651 } |
| 585 | 652 |
| 586 | 653 |
| 587 void LCodeGen::DoModI(LModI* instr) { | 654 void LCodeGen::DoModI(LModI* instr) { |
| 588 Abort("Unimplemented: %s", "DoModI"); | 655 Abort("Unimplemented: %s", "DoModI"); |
| 589 } | 656 } |
| 590 | 657 |
| 591 | 658 |
| 592 void LCodeGen::DoDivI(LDivI* instr) { | 659 void LCodeGen::DoDivI(LDivI* instr) { |
| 593 Abort("Unimplemented: %s", "DoDivI");} | 660 LOperand* right = instr->InputAt(1); |
| 661 ASSERT(ToRegister(instr->result()).is(rax)); |
| 662 ASSERT(ToRegister(instr->InputAt(0)).is(rax)); |
| 663 ASSERT(!ToRegister(instr->InputAt(1)).is(rax)); |
| 664 ASSERT(!ToRegister(instr->InputAt(1)).is(rdx)); |
| 665 |
| 666 Register left_reg = rax; |
| 667 |
| 668 // Check for x / 0. |
| 669 Register right_reg = ToRegister(right); |
| 670 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { |
| 671 __ testl(right_reg, right_reg); |
| 672 DeoptimizeIf(zero, instr->environment()); |
| 673 } |
| 674 |
| 675 // Check for (0 / -x) that will produce negative zero. |
| 676 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 677 NearLabel left_not_zero; |
| 678 __ testl(left_reg, left_reg); |
| 679 __ j(not_zero, &left_not_zero); |
| 680 __ testl(right_reg, right_reg); |
| 681 DeoptimizeIf(sign, instr->environment()); |
| 682 __ bind(&left_not_zero); |
| 683 } |
| 684 |
| 685 // Check for (-kMinInt / -1). |
| 686 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 687 NearLabel left_not_min_int; |
| 688 __ cmpl(left_reg, Immediate(kMinInt)); |
| 689 __ j(not_zero, &left_not_min_int); |
| 690 __ cmpl(right_reg, Immediate(-1)); |
| 691 DeoptimizeIf(zero, instr->environment()); |
| 692 __ bind(&left_not_min_int); |
| 693 } |
| 694 |
| 695 // Sign extend to rdx. |
| 696 __ cdq(); |
| 697 __ idivl(right_reg); |
| 698 |
| 699 // Deoptimize if remainder is not 0. |
| 700 __ testl(rdx, rdx); |
| 701 DeoptimizeIf(not_zero, instr->environment()); |
| 702 } |
| 594 | 703 |
| 595 | 704 |
| 596 void LCodeGen::DoMulI(LMulI* instr) { | 705 void LCodeGen::DoMulI(LMulI* instr) { |
| 597 Abort("Unimplemented: %s", "DoMultI");} | 706 Register left = ToRegister(instr->InputAt(0)); |
| 707 LOperand* right = instr->InputAt(1); |
| 708 |
| 709 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 710 __ movl(kScratchRegister, left); |
| 711 } |
| 712 |
| 713 if (right->IsConstantOperand()) { |
| 714 int right_value = ToInteger32(LConstantOperand::cast(right)); |
| 715 __ imull(left, left, Immediate(right_value)); |
| 716 } else if (right->IsStackSlot()) { |
| 717 __ imull(left, ToOperand(right)); |
| 718 } else { |
| 719 __ imull(left, ToRegister(right)); |
| 720 } |
| 721 |
| 722 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 723 DeoptimizeIf(overflow, instr->environment()); |
| 724 } |
| 725 |
| 726 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 727 // Bail out if the result is supposed to be negative zero. |
| 728 NearLabel done; |
| 729 __ testl(left, left); |
| 730 __ j(not_zero, &done); |
| 731 if (right->IsConstantOperand()) { |
| 732 if (ToInteger32(LConstantOperand::cast(right)) <= 0) { |
| 733 DeoptimizeIf(no_condition, instr->environment()); |
| 734 } |
| 735 } else if (right->IsStackSlot()) { |
| 736 __ or_(kScratchRegister, ToOperand(right)); |
| 737 DeoptimizeIf(sign, instr->environment()); |
| 738 } else { |
| 739 // Test the non-zero operand for negative sign. |
| 740 __ or_(kScratchRegister, ToRegister(right)); |
| 741 DeoptimizeIf(sign, instr->environment()); |
| 742 } |
| 743 __ bind(&done); |
| 744 } |
| 745 } |
| 598 | 746 |
| 599 | 747 |
| 600 void LCodeGen::DoBitI(LBitI* instr) { | 748 void LCodeGen::DoBitI(LBitI* instr) { |
| 601 Abort("Unimplemented: %s", "DoBitI");} | 749 LOperand* left = instr->InputAt(0); |
| 750 LOperand* right = instr->InputAt(1); |
| 751 ASSERT(left->Equals(instr->result())); |
| 752 ASSERT(left->IsRegister()); |
| 753 |
| 754 if (right->IsConstantOperand()) { |
| 755 int right_operand = ToInteger32(LConstantOperand::cast(right)); |
| 756 switch (instr->op()) { |
| 757 case Token::BIT_AND: |
| 758 __ andl(ToRegister(left), Immediate(right_operand)); |
| 759 break; |
| 760 case Token::BIT_OR: |
| 761 __ orl(ToRegister(left), Immediate(right_operand)); |
| 762 break; |
| 763 case Token::BIT_XOR: |
| 764 __ xorl(ToRegister(left), Immediate(right_operand)); |
| 765 break; |
| 766 default: |
| 767 UNREACHABLE(); |
| 768 break; |
| 769 } |
| 770 } else if (right->IsStackSlot()) { |
| 771 switch (instr->op()) { |
| 772 case Token::BIT_AND: |
| 773 __ andl(ToRegister(left), ToOperand(right)); |
| 774 break; |
| 775 case Token::BIT_OR: |
| 776 __ orl(ToRegister(left), ToOperand(right)); |
| 777 break; |
| 778 case Token::BIT_XOR: |
| 779 __ xorl(ToRegister(left), ToOperand(right)); |
| 780 break; |
| 781 default: |
| 782 UNREACHABLE(); |
| 783 break; |
| 784 } |
| 785 } else { |
| 786 ASSERT(right->IsRegister()); |
| 787 switch (instr->op()) { |
| 788 case Token::BIT_AND: |
| 789 __ andl(ToRegister(left), ToRegister(right)); |
| 790 break; |
| 791 case Token::BIT_OR: |
| 792 __ orl(ToRegister(left), ToRegister(right)); |
| 793 break; |
| 794 case Token::BIT_XOR: |
| 795 __ xorl(ToRegister(left), ToRegister(right)); |
| 796 break; |
| 797 default: |
| 798 UNREACHABLE(); |
| 799 break; |
| 800 } |
| 801 } |
| 802 } |
| 602 | 803 |
| 603 | 804 |
| 604 void LCodeGen::DoShiftI(LShiftI* instr) { | 805 void LCodeGen::DoShiftI(LShiftI* instr) { |
| 605 Abort("Unimplemented: %s", "DoShiftI"); | 806 LOperand* left = instr->InputAt(0); |
| 606 } | 807 LOperand* right = instr->InputAt(1); |
| 607 | 808 ASSERT(left->Equals(instr->result())); |
| 608 | 809 ASSERT(left->IsRegister()); |
| 810 if (right->IsRegister()) { |
| 811 ASSERT(ToRegister(right).is(rcx)); |
| 812 |
| 813 switch (instr->op()) { |
| 814 case Token::SAR: |
| 815 __ sarl_cl(ToRegister(left)); |
| 816 break; |
| 817 case Token::SHR: |
| 818 __ shrl_cl(ToRegister(left)); |
| 819 if (instr->can_deopt()) { |
| 820 __ testl(ToRegister(left), ToRegister(left)); |
| 821 DeoptimizeIf(negative, instr->environment()); |
| 822 } |
| 823 break; |
| 824 case Token::SHL: |
| 825 __ shll_cl(ToRegister(left)); |
| 826 break; |
| 827 default: |
| 828 UNREACHABLE(); |
| 829 break; |
| 830 } |
| 831 } else { |
| 832 int value = ToInteger32(LConstantOperand::cast(right)); |
| 833 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F); |
| 834 switch (instr->op()) { |
| 835 case Token::SAR: |
| 836 if (shift_count != 0) { |
| 837 __ sarl(ToRegister(left), Immediate(shift_count)); |
| 838 } |
| 839 break; |
| 840 case Token::SHR: |
| 841 if (shift_count == 0 && instr->can_deopt()) { |
| 842 __ testl(ToRegister(left), ToRegister(left)); |
| 843 DeoptimizeIf(negative, instr->environment()); |
| 844 } else { |
| 845 __ shrl(ToRegister(left), Immediate(shift_count)); |
| 846 } |
| 847 break; |
| 848 case Token::SHL: |
| 849 if (shift_count != 0) { |
| 850 __ shll(ToRegister(left), Immediate(shift_count)); |
| 851 } |
| 852 break; |
| 853 default: |
| 854 UNREACHABLE(); |
| 855 break; |
| 856 } |
| 857 } |
| 858 } |
| 859 |
| 860 |
| 609 void LCodeGen::DoSubI(LSubI* instr) { | 861 void LCodeGen::DoSubI(LSubI* instr) { |
| 610 LOperand* left = instr->InputAt(0); | 862 LOperand* left = instr->InputAt(0); |
| 611 LOperand* right = instr->InputAt(1); | 863 LOperand* right = instr->InputAt(1); |
| 612 ASSERT(left->Equals(instr->result())); | 864 ASSERT(left->Equals(instr->result())); |
| 613 | 865 |
| 614 if (right->IsConstantOperand()) { | 866 if (right->IsConstantOperand()) { |
| 615 __ subl(ToRegister(left), | 867 __ subl(ToRegister(left), |
| 616 Immediate(ToInteger32(LConstantOperand::cast(right)))); | 868 Immediate(ToInteger32(LConstantOperand::cast(right)))); |
| 617 } else if (right->IsRegister()) { | 869 } else if (right->IsRegister()) { |
| 618 __ subl(ToRegister(left), ToRegister(right)); | 870 __ subl(ToRegister(left), ToRegister(right)); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 649 } else { | 901 } else { |
| 650 uint64_t int_val = BitCast<uint64_t, double>(v); | 902 uint64_t int_val = BitCast<uint64_t, double>(v); |
| 651 __ Set(tmp, int_val); | 903 __ Set(tmp, int_val); |
| 652 __ movd(res, tmp); | 904 __ movd(res, tmp); |
| 653 } | 905 } |
| 654 } | 906 } |
| 655 } | 907 } |
| 656 | 908 |
| 657 | 909 |
| 658 void LCodeGen::DoConstantT(LConstantT* instr) { | 910 void LCodeGen::DoConstantT(LConstantT* instr) { |
| 659 ASSERT(instr->result()->IsRegister()); | 911 ASSERT(instr->result()->IsRegister()); |
| 660 __ Move(ToRegister(instr->result()), instr->value()); | 912 __ Move(ToRegister(instr->result()), instr->value()); |
| 661 } | 913 } |
| 662 | 914 |
| 663 | 915 |
| 664 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { | 916 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { |
| 665 Abort("Unimplemented: %s", "DoJSArrayLength"); | 917 Register result = ToRegister(instr->result()); |
| 918 Register array = ToRegister(instr->InputAt(0)); |
| 919 __ movq(result, FieldOperand(array, JSArray::kLengthOffset)); |
| 666 } | 920 } |
| 667 | 921 |
| 668 | 922 |
| 669 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { | 923 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { |
| 670 Abort("Unimplemented: %s", "DoFixedArrayLength"); | 924 Register result = ToRegister(instr->result()); |
| 925 Register array = ToRegister(instr->InputAt(0)); |
| 926 __ movq(result, FieldOperand(array, FixedArray::kLengthOffset)); |
| 671 } | 927 } |
| 672 | 928 |
| 673 | 929 |
| 674 void LCodeGen::DoValueOf(LValueOf* instr) { | 930 void LCodeGen::DoValueOf(LValueOf* instr) { |
| 675 Abort("Unimplemented: %s", "DoValueOf"); | 931 Abort("Unimplemented: %s", "DoValueOf"); |
| 676 } | 932 } |
| 677 | 933 |
| 678 | 934 |
| 679 void LCodeGen::DoBitNotI(LBitNotI* instr) { | 935 void LCodeGen::DoBitNotI(LBitNotI* instr) { |
| 680 Abort("Unimplemented: %s", "DoBitNotI"); | 936 LOperand* input = instr->InputAt(0); |
| 937 ASSERT(input->Equals(instr->result())); |
| 938 __ not_(ToRegister(input)); |
| 681 } | 939 } |
| 682 | 940 |
| 683 | 941 |
| 684 void LCodeGen::DoThrow(LThrow* instr) { | 942 void LCodeGen::DoThrow(LThrow* instr) { |
| 685 Abort("Unimplemented: %s", "DoThrow"); | 943 __ push(ToRegister(instr->InputAt(0))); |
| 944 CallRuntime(Runtime::kThrow, 1, instr); |
| 945 |
| 946 if (FLAG_debug_code) { |
| 947 Comment("Unreachable code."); |
| 948 __ int3(); |
| 949 } |
| 686 } | 950 } |
| 687 | 951 |
| 688 | 952 |
| 689 void LCodeGen::DoAddI(LAddI* instr) { | 953 void LCodeGen::DoAddI(LAddI* instr) { |
| 690 LOperand* left = instr->InputAt(0); | 954 LOperand* left = instr->InputAt(0); |
| 691 LOperand* right = instr->InputAt(1); | 955 LOperand* right = instr->InputAt(1); |
| 692 ASSERT(left->Equals(instr->result())); | 956 ASSERT(left->Equals(instr->result())); |
| 693 | 957 |
| 694 if (right->IsConstantOperand()) { | 958 if (right->IsConstantOperand()) { |
| 695 __ addl(ToRegister(left), | 959 __ addl(ToRegister(left), |
| (...skipping 13 matching lines...) Expand all Loading... |
| 709 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { | 973 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { |
| 710 Abort("Unimplemented: %s", "DoArithmeticD"); | 974 Abort("Unimplemented: %s", "DoArithmeticD"); |
| 711 } | 975 } |
| 712 | 976 |
| 713 | 977 |
| 714 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 978 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 715 ASSERT(ToRegister(instr->InputAt(0)).is(rdx)); | 979 ASSERT(ToRegister(instr->InputAt(0)).is(rdx)); |
| 716 ASSERT(ToRegister(instr->InputAt(1)).is(rax)); | 980 ASSERT(ToRegister(instr->InputAt(1)).is(rax)); |
| 717 ASSERT(ToRegister(instr->result()).is(rax)); | 981 ASSERT(ToRegister(instr->result()).is(rax)); |
| 718 | 982 |
| 719 GenericBinaryOpStub stub(instr->op(), NO_OVERWRITE, NO_GENERIC_BINARY_FLAGS); | 983 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); |
| 720 stub.SetArgsInRegisters(); | |
| 721 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 984 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 722 } | 985 } |
| 723 | 986 |
| 724 | 987 |
| 725 int LCodeGen::GetNextEmittedBlock(int block) { | 988 int LCodeGen::GetNextEmittedBlock(int block) { |
| 726 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { | 989 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { |
| 727 LLabel* label = chunk_->GetLabel(i); | 990 LLabel* label = chunk_->GetLabel(i); |
| 728 if (!label->HasReplacement()) return i; | 991 if (!label->HasReplacement()) return i; |
| 729 } | 992 } |
| 730 return -1; | 993 return -1; |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 828 __ jmp(deferred_stack_check->entry()); | 1091 __ jmp(deferred_stack_check->entry()); |
| 829 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); | 1092 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); |
| 830 } else { | 1093 } else { |
| 831 __ jmp(chunk_->GetAssemblyLabel(block)); | 1094 __ jmp(chunk_->GetAssemblyLabel(block)); |
| 832 } | 1095 } |
| 833 } | 1096 } |
| 834 } | 1097 } |
| 835 | 1098 |
| 836 | 1099 |
| 837 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { | 1100 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { |
| 838 Abort("Unimplemented: %s", "DoDeferredStackCheck"); | 1101 __ Pushad(); |
| 1102 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
| 1103 RecordSafepointWithRegisters( |
| 1104 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 1105 __ Popad(); |
| 839 } | 1106 } |
| 840 | 1107 |
| 841 | 1108 |
| 842 void LCodeGen::DoGoto(LGoto* instr) { | 1109 void LCodeGen::DoGoto(LGoto* instr) { |
| 843 class DeferredStackCheck: public LDeferredCode { | 1110 class DeferredStackCheck: public LDeferredCode { |
| 844 public: | 1111 public: |
| 845 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) | 1112 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) |
| 846 : LDeferredCode(codegen), instr_(instr) { } | 1113 : LDeferredCode(codegen), instr_(instr) { } |
| 847 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } | 1114 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } |
| 848 private: | 1115 private: |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 887 | 1154 |
| 888 void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) { | 1155 void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) { |
| 889 if (right->IsConstantOperand()) { | 1156 if (right->IsConstantOperand()) { |
| 890 int32_t value = ToInteger32(LConstantOperand::cast(right)); | 1157 int32_t value = ToInteger32(LConstantOperand::cast(right)); |
| 891 if (left->IsRegister()) { | 1158 if (left->IsRegister()) { |
| 892 __ cmpl(ToRegister(left), Immediate(value)); | 1159 __ cmpl(ToRegister(left), Immediate(value)); |
| 893 } else { | 1160 } else { |
| 894 __ cmpl(ToOperand(left), Immediate(value)); | 1161 __ cmpl(ToOperand(left), Immediate(value)); |
| 895 } | 1162 } |
| 896 } else if (right->IsRegister()) { | 1163 } else if (right->IsRegister()) { |
| 897 __ cmpq(ToRegister(left), ToRegister(right)); | 1164 __ cmpl(ToRegister(left), ToRegister(right)); |
| 898 } else { | 1165 } else { |
| 899 __ cmpq(ToRegister(left), ToOperand(right)); | 1166 __ cmpl(ToRegister(left), ToOperand(right)); |
| 900 } | 1167 } |
| 901 } | 1168 } |
| 902 | 1169 |
| 903 | 1170 |
| 904 void LCodeGen::DoCmpID(LCmpID* instr) { | 1171 void LCodeGen::DoCmpID(LCmpID* instr) { |
| 905 LOperand* left = instr->InputAt(0); | 1172 LOperand* left = instr->InputAt(0); |
| 906 LOperand* right = instr->InputAt(1); | 1173 LOperand* right = instr->InputAt(1); |
| 907 LOperand* result = instr->result(); | 1174 LOperand* result = instr->result(); |
| 908 | 1175 |
| 909 NearLabel unordered; | 1176 NearLabel unordered; |
| (...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1185 } | 1452 } |
| 1186 | 1453 |
| 1187 | 1454 |
| 1188 void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) { | 1455 void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) { |
| 1189 Abort("Unimplemented: %s", "DoHasCachedArrayIndex"); | 1456 Abort("Unimplemented: %s", "DoHasCachedArrayIndex"); |
| 1190 } | 1457 } |
| 1191 | 1458 |
| 1192 | 1459 |
| 1193 void LCodeGen::DoHasCachedArrayIndexAndBranch( | 1460 void LCodeGen::DoHasCachedArrayIndexAndBranch( |
| 1194 LHasCachedArrayIndexAndBranch* instr) { | 1461 LHasCachedArrayIndexAndBranch* instr) { |
| 1195 Register input = ToRegister(instr->InputAt(0)); | 1462 Register input = ToRegister(instr->InputAt(0)); |
| 1196 | 1463 |
| 1197 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1464 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1198 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1465 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1199 | 1466 |
| 1200 __ testl(FieldOperand(input, String::kHashFieldOffset), | 1467 __ testl(FieldOperand(input, String::kHashFieldOffset), |
| 1201 Immediate(String::kContainsCachedArrayIndexMask)); | 1468 Immediate(String::kContainsCachedArrayIndexMask)); |
| 1202 EmitBranch(true_block, false_block, not_equal); | 1469 EmitBranch(true_block, false_block, not_equal); |
| 1203 } | 1470 } |
| 1204 | 1471 |
| 1205 | 1472 |
| (...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1376 | 1643 |
| 1377 void LCodeGen::DoReturn(LReturn* instr) { | 1644 void LCodeGen::DoReturn(LReturn* instr) { |
| 1378 if (FLAG_trace) { | 1645 if (FLAG_trace) { |
| 1379 // Preserve the return value on the stack and rely on the runtime | 1646 // Preserve the return value on the stack and rely on the runtime |
| 1380 // call to return the value in the same register. | 1647 // call to return the value in the same register. |
| 1381 __ push(rax); | 1648 __ push(rax); |
| 1382 __ CallRuntime(Runtime::kTraceExit, 1); | 1649 __ CallRuntime(Runtime::kTraceExit, 1); |
| 1383 } | 1650 } |
| 1384 __ movq(rsp, rbp); | 1651 __ movq(rsp, rbp); |
| 1385 __ pop(rbp); | 1652 __ pop(rbp); |
| 1386 __ ret((ParameterCount() + 1) * kPointerSize); | 1653 __ Ret((ParameterCount() + 1) * kPointerSize, rcx); |
| 1387 } | 1654 } |
| 1388 | 1655 |
| 1389 | 1656 |
| 1390 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) { | 1657 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) { |
| 1391 Abort("Unimplemented: %s", "DoLoadGlobal"); | 1658 Register result = ToRegister(instr->result()); |
| 1659 if (result.is(rax)) { |
| 1660 __ load_rax(instr->hydrogen()->cell().location(), |
| 1661 RelocInfo::GLOBAL_PROPERTY_CELL); |
| 1662 } else { |
| 1663 __ movq(result, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL); |
| 1664 __ movq(result, Operand(result, 0)); |
| 1665 } |
| 1666 if (instr->hydrogen()->check_hole_value()) { |
| 1667 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
| 1668 DeoptimizeIf(equal, instr->environment()); |
| 1669 } |
| 1392 } | 1670 } |
| 1393 | 1671 |
| 1394 | 1672 |
| 1395 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { | 1673 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { |
| 1396 Abort("Unimplemented: %s", "DoStoreGlobal"); | 1674 Register value = ToRegister(instr->InputAt(0)); |
| 1675 Register temp = ToRegister(instr->TempAt(0)); |
| 1676 ASSERT(!value.is(temp)); |
| 1677 bool check_hole = instr->hydrogen()->check_hole_value(); |
| 1678 if (!check_hole && value.is(rax)) { |
| 1679 __ store_rax(instr->hydrogen()->cell().location(), |
| 1680 RelocInfo::GLOBAL_PROPERTY_CELL); |
| 1681 return; |
| 1682 } |
| 1683 // If the cell we are storing to contains the hole it could have |
| 1684 // been deleted from the property dictionary. In that case, we need |
| 1685 // to update the property details in the property dictionary to mark |
| 1686 // it as no longer deleted. We deoptimize in that case. |
| 1687 __ movq(temp, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL); |
| 1688 if (check_hole) { |
| 1689 __ CompareRoot(Operand(temp, 0), Heap::kTheHoleValueRootIndex); |
| 1690 DeoptimizeIf(equal, instr->environment()); |
| 1691 } |
| 1692 __ movq(Operand(temp, 0), value); |
| 1397 } | 1693 } |
| 1398 | 1694 |
| 1399 | 1695 |
| 1400 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 1696 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
| 1401 Abort("Unimplemented: %s", "DoLoadContextSlot"); | 1697 Abort("Unimplemented: %s", "DoLoadContextSlot"); |
| 1402 } | 1698 } |
| 1403 | 1699 |
| 1404 | 1700 |
| 1405 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { | 1701 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
| 1406 Abort("Unimplemented: %s", "DoLoadNamedField"); | 1702 Register object = ToRegister(instr->InputAt(0)); |
| 1703 Register result = ToRegister(instr->result()); |
| 1704 if (instr->hydrogen()->is_in_object()) { |
| 1705 __ movq(result, FieldOperand(object, instr->hydrogen()->offset())); |
| 1706 } else { |
| 1707 __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 1708 __ movq(result, FieldOperand(result, instr->hydrogen()->offset())); |
| 1709 } |
| 1407 } | 1710 } |
| 1408 | 1711 |
| 1409 | 1712 |
| 1410 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 1713 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
| 1411 Abort("Unimplemented: %s", "DoLoadNamedGeneric"); | 1714 ASSERT(ToRegister(instr->object()).is(rax)); |
| 1715 ASSERT(ToRegister(instr->result()).is(rax)); |
| 1716 |
| 1717 __ Move(rcx, instr->name()); |
| 1718 Handle<Code> ic(isolate()->builtins()->builtin(Builtins::LoadIC_Initialize)); |
| 1719 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 1412 } | 1720 } |
| 1413 | 1721 |
| 1414 | 1722 |
| 1415 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { | 1723 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { |
| 1416 Abort("Unimplemented: %s", "DoLoadFunctionPrototype"); | 1724 Abort("Unimplemented: %s", "DoLoadFunctionPrototype"); |
| 1417 } | 1725 } |
| 1418 | 1726 |
| 1419 | 1727 |
| 1420 void LCodeGen::DoLoadElements(LLoadElements* instr) { | 1728 void LCodeGen::DoLoadElements(LLoadElements* instr) { |
| 1421 Abort("Unimplemented: %s", "DoLoadElements"); | 1729 ASSERT(instr->result()->Equals(instr->InputAt(0))); |
| 1730 Register reg = ToRegister(instr->InputAt(0)); |
| 1731 __ movq(reg, FieldOperand(reg, JSObject::kElementsOffset)); |
| 1732 if (FLAG_debug_code) { |
| 1733 NearLabel done; |
| 1734 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), |
| 1735 FACTORY->fixed_array_map()); |
| 1736 __ j(equal, &done); |
| 1737 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), |
| 1738 FACTORY->fixed_cow_array_map()); |
| 1739 __ Check(equal, "Check for fast elements failed."); |
| 1740 __ bind(&done); |
| 1741 } |
| 1422 } | 1742 } |
| 1423 | 1743 |
| 1424 | 1744 |
| 1425 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { | 1745 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { |
| 1426 Abort("Unimplemented: %s", "DoAccessArgumentsAt"); | 1746 Abort("Unimplemented: %s", "DoAccessArgumentsAt"); |
| 1427 } | 1747 } |
| 1428 | 1748 |
| 1429 | 1749 |
| 1430 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { | 1750 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { |
| 1431 Abort("Unimplemented: %s", "DoLoadKeyedFastElement"); | 1751 Register elements = ToRegister(instr->elements()); |
| 1752 Register key = ToRegister(instr->key()); |
| 1753 Register result = ToRegister(instr->result()); |
| 1754 ASSERT(result.is(elements)); |
| 1755 |
| 1756 // Load the result. |
| 1757 __ movq(result, FieldOperand(elements, |
| 1758 key, |
| 1759 times_pointer_size, |
| 1760 FixedArray::kHeaderSize)); |
| 1761 |
| 1762 // Check for the hole value. |
| 1763 __ Cmp(result, FACTORY->the_hole_value()); |
| 1764 DeoptimizeIf(equal, instr->environment()); |
| 1432 } | 1765 } |
| 1433 | 1766 |
| 1434 | 1767 |
| 1435 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 1768 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
| 1436 Abort("Unimplemented: %s", "DoLoadKeyedGeneric"); | 1769 Abort("Unimplemented: %s", "DoLoadKeyedGeneric"); |
| 1437 } | 1770 } |
| 1438 | 1771 |
| 1439 | 1772 |
| 1440 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 1773 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
| 1441 Abort("Unimplemented: %s", "DoArgumentsElements"); | 1774 Abort("Unimplemented: %s", "DoArgumentsElements"); |
| 1442 } | 1775 } |
| 1443 | 1776 |
| 1444 | 1777 |
| 1445 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { | 1778 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { |
| 1446 Abort("Unimplemented: %s", "DoArgumentsLength"); | 1779 Abort("Unimplemented: %s", "DoArgumentsLength"); |
| 1447 } | 1780 } |
| 1448 | 1781 |
| 1449 | 1782 |
| 1450 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 1783 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
| 1451 Abort("Unimplemented: %s", "DoApplyArguments"); | 1784 Abort("Unimplemented: %s", "DoApplyArguments"); |
| 1452 } | 1785 } |
| 1453 | 1786 |
| 1454 | 1787 |
| 1455 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 1788 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
| 1456 Abort("Unimplemented: %s", "DoPushArgument"); | 1789 LOperand* argument = instr->InputAt(0); |
| 1790 if (argument->IsConstantOperand()) { |
| 1791 LConstantOperand* const_op = LConstantOperand::cast(argument); |
| 1792 Handle<Object> literal = chunk_->LookupLiteral(const_op); |
| 1793 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 1794 if (r.IsInteger32()) { |
| 1795 ASSERT(literal->IsNumber()); |
| 1796 __ push(Immediate(static_cast<int32_t>(literal->Number()))); |
| 1797 } else if (r.IsDouble()) { |
| 1798 Abort("unsupported double immediate"); |
| 1799 } else { |
| 1800 ASSERT(r.IsTagged()); |
| 1801 __ Push(literal); |
| 1802 } |
| 1803 } else if (argument->IsRegister()) { |
| 1804 __ push(ToRegister(argument)); |
| 1805 } else { |
| 1806 ASSERT(!argument->IsDoubleRegister()); |
| 1807 __ push(ToOperand(argument)); |
| 1808 } |
| 1457 } | 1809 } |
| 1458 | 1810 |
| 1459 | 1811 |
| 1460 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { | 1812 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { |
| 1461 Register result = ToRegister(instr->result()); | 1813 Register result = ToRegister(instr->result()); |
| 1462 __ movq(result, GlobalObjectOperand()); | 1814 __ movq(result, GlobalObjectOperand()); |
| 1463 } | 1815 } |
| 1464 | 1816 |
| 1465 | 1817 |
| 1466 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { | 1818 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { |
| 1467 Abort("Unimplemented: %s", "DoGlobalReceiver"); | 1819 Register result = ToRegister(instr->result()); |
| 1820 __ movq(result, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 1821 __ movq(result, FieldOperand(result, GlobalObject::kGlobalReceiverOffset)); |
| 1468 } | 1822 } |
| 1469 | 1823 |
| 1470 | 1824 |
| 1471 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 1825 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
| 1472 int arity, | 1826 int arity, |
| 1473 LInstruction* instr) { | 1827 LInstruction* instr) { |
| 1474 Abort("Unimplemented: %s", "CallKnownFunction"); | 1828 // Change context if needed. |
| 1829 bool change_context = |
| 1830 (graph()->info()->closure()->context() != function->context()) || |
| 1831 scope()->contains_with() || |
| 1832 (scope()->num_heap_slots() > 0); |
| 1833 if (change_context) { |
| 1834 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 1835 } |
| 1836 |
| 1837 // Set rax to arguments count if adaption is not needed. Assumes that rax |
| 1838 // is available to write to at this point. |
| 1839 if (!function->NeedsArgumentsAdaption()) { |
| 1840 __ Set(rax, arity); |
| 1841 } |
| 1842 |
| 1843 LPointerMap* pointers = instr->pointer_map(); |
| 1844 RecordPosition(pointers->position()); |
| 1845 |
| 1846 // Invoke function. |
| 1847 if (*function == *graph()->info()->closure()) { |
| 1848 __ CallSelf(); |
| 1849 } else { |
| 1850 __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); |
| 1851 } |
| 1852 |
| 1853 // Setup deoptimization. |
| 1854 RegisterLazyDeoptimization(instr); |
| 1855 |
| 1856 // Restore context. |
| 1857 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 1475 } | 1858 } |
| 1476 | 1859 |
| 1477 | 1860 |
| 1478 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 1861 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
| 1479 Abort("Unimplemented: %s", "DoCallConstantFunction"); | 1862 ASSERT(ToRegister(instr->result()).is(rax)); |
| 1863 __ Move(rdi, instr->function()); |
| 1864 CallKnownFunction(instr->function(), instr->arity(), instr); |
| 1480 } | 1865 } |
| 1481 | 1866 |
| 1482 | 1867 |
| 1483 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { | 1868 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { |
| 1484 Abort("Unimplemented: %s", "DoDeferredMathAbsTaggedHeapNumber"); | 1869 Abort("Unimplemented: %s", "DoDeferredMathAbsTaggedHeapNumber"); |
| 1485 } | 1870 } |
| 1486 | 1871 |
| 1487 | 1872 |
| 1488 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) { | 1873 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) { |
| 1489 Abort("Unimplemented: %s", "DoMathAbs"); | 1874 Abort("Unimplemented: %s", "DoMathAbs"); |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1549 Abort("Unimplemented: %s", "DoCallFunction"); | 1934 Abort("Unimplemented: %s", "DoCallFunction"); |
| 1550 } | 1935 } |
| 1551 | 1936 |
| 1552 | 1937 |
| 1553 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { | 1938 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { |
| 1554 Abort("Unimplemented: %s", "DoCallGlobal"); | 1939 Abort("Unimplemented: %s", "DoCallGlobal"); |
| 1555 } | 1940 } |
| 1556 | 1941 |
| 1557 | 1942 |
| 1558 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { | 1943 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { |
| 1559 Abort("Unimplemented: %s", "DoCallKnownGlobal"); | 1944 ASSERT(ToRegister(instr->result()).is(rax)); |
| 1945 __ Move(rdi, instr->target()); |
| 1946 CallKnownFunction(instr->target(), instr->arity(), instr); |
| 1560 } | 1947 } |
| 1561 | 1948 |
| 1562 | 1949 |
| 1563 void LCodeGen::DoCallNew(LCallNew* instr) { | 1950 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 1564 Abort("Unimplemented: %s", "DoCallNew"); | 1951 ASSERT(ToRegister(instr->InputAt(0)).is(rdi)); |
| 1952 ASSERT(ToRegister(instr->result()).is(rax)); |
| 1953 |
| 1954 Handle<Code> builtin(isolate()->builtins()->builtin( |
| 1955 Builtins::JSConstructCall)); |
| 1956 __ Set(rax, instr->arity()); |
| 1957 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); |
| 1565 } | 1958 } |
| 1566 | 1959 |
| 1567 | 1960 |
| 1568 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 1961 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
| 1569 Abort("Unimplemented: %s", "DoCallRuntime"); | 1962 Abort("Unimplemented: %s", "DoCallRuntime"); |
| 1570 } | 1963 } |
| 1571 | 1964 |
| 1572 | 1965 |
| 1573 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { | 1966 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
| 1574 Abort("Unimplemented: %s", "DoStoreNamedField"); | 1967 Register object = ToRegister(instr->object()); |
| 1968 Register value = ToRegister(instr->value()); |
| 1969 int offset = instr->offset(); |
| 1970 |
| 1971 if (!instr->transition().is_null()) { |
| 1972 __ Move(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); |
| 1973 } |
| 1974 |
| 1975 // Do the store. |
| 1976 if (instr->is_in_object()) { |
| 1977 __ movq(FieldOperand(object, offset), value); |
| 1978 if (instr->needs_write_barrier()) { |
| 1979 Register temp = ToRegister(instr->TempAt(0)); |
| 1980 // Update the write barrier for the object for in-object properties. |
| 1981 __ RecordWrite(object, offset, value, temp); |
| 1982 } |
| 1983 } else { |
| 1984 Register temp = ToRegister(instr->TempAt(0)); |
| 1985 __ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 1986 __ movq(FieldOperand(temp, offset), value); |
| 1987 if (instr->needs_write_barrier()) { |
| 1988 // Update the write barrier for the properties array. |
| 1989 // object is used as a scratch register. |
| 1990 __ RecordWrite(temp, offset, value, object); |
| 1991 } |
| 1992 } |
| 1575 } | 1993 } |
| 1576 | 1994 |
| 1577 | 1995 |
| 1578 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { | 1996 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
| 1579 Abort("Unimplemented: %s", "DoStoreNamedGeneric"); | 1997 Abort("Unimplemented: %s", "DoStoreNamedGeneric"); |
| 1580 } | 1998 } |
| 1581 | 1999 |
| 1582 | 2000 |
| 1583 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { | 2001 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
| 1584 Abort("Unimplemented: %s", "DoBoundsCheck"); | 2002 if (instr->length()->IsRegister()) { |
| 2003 __ cmpq(ToRegister(instr->index()), ToRegister(instr->length())); |
| 2004 } else { |
| 2005 __ cmpq(ToRegister(instr->index()), ToOperand(instr->length())); |
| 2006 } |
| 2007 DeoptimizeIf(above_equal, instr->environment()); |
| 1585 } | 2008 } |
| 1586 | 2009 |
| 1587 | 2010 |
| 1588 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { | 2011 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { |
| 1589 Abort("Unimplemented: %s", "DoStoreKeyedFastElement"); | 2012 Register value = ToRegister(instr->value()); |
| 2013 Register elements = ToRegister(instr->object()); |
| 2014 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; |
| 2015 |
| 2016 // Do the store. |
| 2017 if (instr->key()->IsConstantOperand()) { |
| 2018 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
| 2019 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
| 2020 int offset = |
| 2021 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize; |
| 2022 __ movq(FieldOperand(elements, offset), value); |
| 2023 } else { |
| 2024 __ movq(FieldOperand(elements, |
| 2025 key, |
| 2026 times_pointer_size, |
| 2027 FixedArray::kHeaderSize), |
| 2028 value); |
| 2029 } |
| 2030 |
| 2031 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 2032 // Compute address of modified element and store it into key register. |
| 2033 __ lea(key, FieldOperand(elements, |
| 2034 key, |
| 2035 times_pointer_size, |
| 2036 FixedArray::kHeaderSize)); |
| 2037 __ RecordWrite(elements, key, value); |
| 2038 } |
| 1590 } | 2039 } |
| 1591 | 2040 |
| 1592 | 2041 |
| 1593 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 2042 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
| 1594 Abort("Unimplemented: %s", "DoStoreKeyedGeneric"); | 2043 Abort("Unimplemented: %s", "DoStoreKeyedGeneric"); |
| 1595 } | 2044 } |
| 1596 | 2045 |
| 1597 | 2046 |
| 1598 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { | 2047 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { |
| 1599 LOperand* input = instr->InputAt(0); | 2048 LOperand* input = instr->InputAt(0); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1650 RecordSafepointWithRegisters( | 2099 RecordSafepointWithRegisters( |
| 1651 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | 2100 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 1652 // Ensure that value in rax survives popping registers. | 2101 // Ensure that value in rax survives popping registers. |
| 1653 __ movq(kScratchRegister, rax); | 2102 __ movq(kScratchRegister, rax); |
| 1654 __ PopSafepointRegisters(); | 2103 __ PopSafepointRegisters(); |
| 1655 __ movq(reg, kScratchRegister); | 2104 __ movq(reg, kScratchRegister); |
| 1656 } | 2105 } |
| 1657 | 2106 |
| 1658 | 2107 |
| 1659 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 2108 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
| 1660 Abort("Unimplemented: %s", "DoSmiTag"); | 2109 ASSERT(instr->InputAt(0)->Equals(instr->result())); |
| 2110 Register input = ToRegister(instr->InputAt(0)); |
| 2111 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); |
| 2112 __ Integer32ToSmi(input, input); |
| 1661 } | 2113 } |
| 1662 | 2114 |
| 1663 | 2115 |
| 1664 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { | 2116 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { |
| 1665 Abort("Unimplemented: %s", "DoSmiUntag"); | 2117 ASSERT(instr->InputAt(0)->Equals(instr->result())); |
| 2118 Register input = ToRegister(instr->InputAt(0)); |
| 2119 if (instr->needs_check()) { |
| 2120 Condition is_smi = __ CheckSmi(input); |
| 2121 DeoptimizeIf(NegateCondition(is_smi), instr->environment()); |
| 2122 } |
| 2123 __ SmiToInteger32(input, input); |
| 1666 } | 2124 } |
| 1667 | 2125 |
| 1668 | 2126 |
| 1669 void LCodeGen::EmitNumberUntagD(Register input_reg, | 2127 void LCodeGen::EmitNumberUntagD(Register input_reg, |
| 1670 XMMRegister result_reg, | 2128 XMMRegister result_reg, |
| 1671 LEnvironment* env) { | 2129 LEnvironment* env) { |
| 1672 NearLabel load_smi, heap_number, done; | 2130 NearLabel load_smi, heap_number, done; |
| 1673 | 2131 |
| 1674 // Smi check. | 2132 // Smi check. |
| 1675 __ JumpIfSmi(input_reg, &load_smi); | 2133 __ JumpIfSmi(input_reg, &load_smi); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 1693 __ jmp(&done); | 2151 __ jmp(&done); |
| 1694 | 2152 |
| 1695 // Smi to XMM conversion | 2153 // Smi to XMM conversion |
| 1696 __ bind(&load_smi); | 2154 __ bind(&load_smi); |
| 1697 __ SmiToInteger32(kScratchRegister, input_reg); // Untag smi first. | 2155 __ SmiToInteger32(kScratchRegister, input_reg); // Untag smi first. |
| 1698 __ cvtlsi2sd(result_reg, kScratchRegister); | 2156 __ cvtlsi2sd(result_reg, kScratchRegister); |
| 1699 __ bind(&done); | 2157 __ bind(&done); |
| 1700 } | 2158 } |
| 1701 | 2159 |
| 1702 | 2160 |
| 2161 class DeferredTaggedToI: public LDeferredCode { |
| 2162 public: |
| 2163 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) |
| 2164 : LDeferredCode(codegen), instr_(instr) { } |
| 2165 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); } |
| 2166 private: |
| 2167 LTaggedToI* instr_; |
| 2168 }; |
| 2169 |
| 2170 |
| 1703 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { | 2171 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { |
| 1704 Abort("Unimplemented: %s", "DoDeferredTaggedToI"); | 2172 NearLabel done, heap_number; |
| 2173 Register input_reg = ToRegister(instr->InputAt(0)); |
| 2174 |
| 2175 // Heap number map check. |
| 2176 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 2177 Heap::kHeapNumberMapRootIndex); |
| 2178 |
| 2179 if (instr->truncating()) { |
| 2180 __ j(equal, &heap_number); |
| 2181 // Check for undefined. Undefined is converted to zero for truncating |
| 2182 // conversions. |
| 2183 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex); |
| 2184 DeoptimizeIf(not_equal, instr->environment()); |
| 2185 __ movl(input_reg, Immediate(0)); |
| 2186 __ jmp(&done); |
| 2187 |
| 2188 __ bind(&heap_number); |
| 2189 |
| 2190 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 2191 __ cvttsd2siq(input_reg, xmm0); |
| 2192 __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000)); |
| 2193 __ cmpl(input_reg, kScratchRegister); |
| 2194 DeoptimizeIf(equal, instr->environment()); |
| 2195 } else { |
| 2196 // Deoptimize if we don't have a heap number. |
| 2197 DeoptimizeIf(not_equal, instr->environment()); |
| 2198 |
| 2199 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0)); |
| 2200 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 2201 __ cvttsd2si(input_reg, xmm0); |
| 2202 __ cvtlsi2sd(xmm_temp, input_reg); |
| 2203 __ ucomisd(xmm0, xmm_temp); |
| 2204 DeoptimizeIf(not_equal, instr->environment()); |
| 2205 DeoptimizeIf(parity_even, instr->environment()); // NaN. |
| 2206 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 2207 __ testl(input_reg, input_reg); |
| 2208 __ j(not_zero, &done); |
| 2209 __ movmskpd(input_reg, xmm0); |
| 2210 __ andl(input_reg, Immediate(1)); |
| 2211 DeoptimizeIf(not_zero, instr->environment()); |
| 2212 } |
| 2213 } |
| 2214 __ bind(&done); |
| 1705 } | 2215 } |
| 1706 | 2216 |
| 1707 | 2217 |
| 1708 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { | 2218 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { |
| 1709 Abort("Unimplemented: %s", "DoTaggedToI"); | 2219 LOperand* input = instr->InputAt(0); |
| 2220 ASSERT(input->IsRegister()); |
| 2221 ASSERT(input->Equals(instr->result())); |
| 2222 |
| 2223 Register input_reg = ToRegister(input); |
| 2224 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr); |
| 2225 __ JumpIfNotSmi(input_reg, deferred->entry()); |
| 2226 __ SmiToInteger32(input_reg, input_reg); |
| 2227 __ bind(deferred->exit()); |
| 1710 } | 2228 } |
| 1711 | 2229 |
| 1712 | 2230 |
| 1713 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { | 2231 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { |
| 1714 Abort("Unimplemented: %s", "DoNumberUntagD"); | 2232 Abort("Unimplemented: %s", "DoNumberUntagD"); |
| 1715 } | 2233 } |
| 1716 | 2234 |
| 1717 | 2235 |
| 1718 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { | 2236 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { |
| 1719 Abort("Unimplemented: %s", "DoDoubleToI"); | 2237 Abort("Unimplemented: %s", "DoDoubleToI"); |
| 1720 } | 2238 } |
| 1721 | 2239 |
| 1722 | 2240 |
| 1723 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 2241 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { |
| 1724 Abort("Unimplemented: %s", "DoCheckSmi"); | 2242 LOperand* input = instr->InputAt(0); |
| 2243 ASSERT(input->IsRegister()); |
| 2244 Condition cc = masm()->CheckSmi(ToRegister(input)); |
| 2245 if (instr->condition() != equal) { |
| 2246 cc = NegateCondition(cc); |
| 2247 } |
| 2248 DeoptimizeIf(cc, instr->environment()); |
| 1725 } | 2249 } |
| 1726 | 2250 |
| 1727 | 2251 |
| 1728 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 2252 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { |
| 1729 Abort("Unimplemented: %s", "DoCheckInstanceType"); | 2253 Register input = ToRegister(instr->InputAt(0)); |
| 2254 InstanceType first = instr->hydrogen()->first(); |
| 2255 InstanceType last = instr->hydrogen()->last(); |
| 2256 |
| 2257 __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset)); |
| 2258 |
| 2259 // If there is only one type in the interval check for equality. |
| 2260 if (first == last) { |
| 2261 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), |
| 2262 Immediate(static_cast<int8_t>(first))); |
| 2263 DeoptimizeIf(not_equal, instr->environment()); |
| 2264 } else if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) { |
| 2265 // String has a dedicated bit in instance type. |
| 2266 __ testb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), |
| 2267 Immediate(kIsNotStringMask)); |
| 2268 DeoptimizeIf(not_zero, instr->environment()); |
| 2269 } else { |
| 2270 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), |
| 2271 Immediate(static_cast<int8_t>(first))); |
| 2272 DeoptimizeIf(below, instr->environment()); |
| 2273 // Omit check for the last type. |
| 2274 if (last != LAST_TYPE) { |
| 2275 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), |
| 2276 Immediate(static_cast<int8_t>(last))); |
| 2277 DeoptimizeIf(above, instr->environment()); |
| 2278 } |
| 2279 } |
| 1730 } | 2280 } |
| 1731 | 2281 |
| 1732 | 2282 |
| 1733 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { | 2283 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { |
| 1734 Abort("Unimplemented: %s", "DoCheckFunction"); | 2284 ASSERT(instr->InputAt(0)->IsRegister()); |
| 2285 Register reg = ToRegister(instr->InputAt(0)); |
| 2286 __ Cmp(reg, instr->hydrogen()->target()); |
| 2287 DeoptimizeIf(not_equal, instr->environment()); |
| 1735 } | 2288 } |
| 1736 | 2289 |
| 1737 | 2290 |
| 1738 void LCodeGen::DoCheckMap(LCheckMap* instr) { | 2291 void LCodeGen::DoCheckMap(LCheckMap* instr) { |
| 1739 Abort("Unimplemented: %s", "DoCheckMap"); | 2292 LOperand* input = instr->InputAt(0); |
| 2293 ASSERT(input->IsRegister()); |
| 2294 Register reg = ToRegister(input); |
| 2295 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), |
| 2296 instr->hydrogen()->map()); |
| 2297 DeoptimizeIf(not_equal, instr->environment()); |
| 1740 } | 2298 } |
| 1741 | 2299 |
| 1742 | 2300 |
| 1743 void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) { | 2301 void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) { |
| 1744 Abort("Unimplemented: %s", "LoadHeapObject"); | 2302 if (HEAP->InNewSpace(*object)) { |
| 2303 Handle<JSGlobalPropertyCell> cell = |
| 2304 FACTORY->NewJSGlobalPropertyCell(object); |
| 2305 __ movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL); |
| 2306 __ movq(result, Operand(result, 0)); |
| 2307 } else { |
| 2308 __ Move(result, object); |
| 2309 } |
| 1745 } | 2310 } |
| 1746 | 2311 |
| 1747 | 2312 |
| 1748 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { | 2313 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
| 1749 Abort("Unimplemented: %s", "DoCheckPrototypeMaps"); | 2314 Register reg = ToRegister(instr->TempAt(0)); |
| 2315 |
| 2316 Handle<JSObject> holder = instr->holder(); |
| 2317 Handle<JSObject> current_prototype = instr->prototype(); |
| 2318 |
| 2319 // Load prototype object. |
| 2320 LoadHeapObject(reg, current_prototype); |
| 2321 |
| 2322 // Check prototype maps up to the holder. |
| 2323 while (!current_prototype.is_identical_to(holder)) { |
| 2324 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), |
| 2325 Handle<Map>(current_prototype->map())); |
| 2326 DeoptimizeIf(not_equal, instr->environment()); |
| 2327 current_prototype = |
| 2328 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); |
| 2329 // Load next prototype object. |
| 2330 LoadHeapObject(reg, current_prototype); |
| 2331 } |
| 2332 |
| 2333 // Check the holder map. |
| 2334 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), |
| 2335 Handle<Map>(current_prototype->map())); |
| 2336 DeoptimizeIf(not_equal, instr->environment()); |
| 1750 } | 2337 } |
| 1751 | 2338 |
| 1752 | 2339 |
| 1753 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { | 2340 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { |
| 1754 Abort("Unimplemented: %s", "DoArrayLiteral"); | 2341 // Setup the parameters to the stub/runtime call. |
| 2342 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
| 2343 __ push(FieldOperand(rax, JSFunction::kLiteralsOffset)); |
| 2344 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); |
| 2345 __ Push(instr->hydrogen()->constant_elements()); |
| 2346 |
| 2347 // Pick the right runtime function or stub to call. |
| 2348 int length = instr->hydrogen()->length(); |
| 2349 if (instr->hydrogen()->IsCopyOnWrite()) { |
| 2350 ASSERT(instr->hydrogen()->depth() == 1); |
| 2351 FastCloneShallowArrayStub::Mode mode = |
| 2352 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
| 2353 FastCloneShallowArrayStub stub(mode, length); |
| 2354 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 2355 } else if (instr->hydrogen()->depth() > 1) { |
| 2356 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); |
| 2357 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
| 2358 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); |
| 2359 } else { |
| 2360 FastCloneShallowArrayStub::Mode mode = |
| 2361 FastCloneShallowArrayStub::CLONE_ELEMENTS; |
| 2362 FastCloneShallowArrayStub stub(mode, length); |
| 2363 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 2364 } |
| 1755 } | 2365 } |
| 1756 | 2366 |
| 1757 | 2367 |
| 1758 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 2368 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
| 1759 Abort("Unimplemented: %s", "DoObjectLiteral"); | 2369 // Setup the parameters to the stub/runtime call. |
| 2370 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
| 2371 __ push(FieldOperand(rax, JSFunction::kLiteralsOffset)); |
| 2372 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); |
| 2373 __ Push(instr->hydrogen()->constant_properties()); |
| 2374 __ Push(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)); |
| 2375 |
| 2376 // Pick the right runtime function to call. |
| 2377 if (instr->hydrogen()->depth() > 1) { |
| 2378 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); |
| 2379 } else { |
| 2380 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); |
| 2381 } |
| 1760 } | 2382 } |
| 1761 | 2383 |
| 1762 | 2384 |
| 1763 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 2385 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
| 1764 Abort("Unimplemented: %s", "DoRegExpLiteral"); | 2386 Abort("Unimplemented: %s", "DoRegExpLiteral"); |
| 1765 } | 2387 } |
| 1766 | 2388 |
| 1767 | 2389 |
| 1768 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 2390 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 1769 Abort("Unimplemented: %s", "DoFunctionLiteral"); | 2391 // Use the fast case closure allocation code that allocates in new |
| 2392 // space for nested functions that don't need literals cloning. |
| 2393 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); |
| 2394 bool pretenure = instr->hydrogen()->pretenure(); |
| 2395 if (shared_info->num_literals() == 0 && !pretenure) { |
| 2396 FastNewClosureStub stub; |
| 2397 __ Push(shared_info); |
| 2398 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 2399 } else { |
| 2400 __ push(rsi); |
| 2401 __ Push(shared_info); |
| 2402 __ Push(pretenure ? FACTORY->true_value() : FACTORY->false_value()); |
| 2403 CallRuntime(Runtime::kNewClosure, 3, instr); |
| 2404 } |
| 1770 } | 2405 } |
| 1771 | 2406 |
| 1772 | 2407 |
| 1773 void LCodeGen::DoTypeof(LTypeof* instr) { | 2408 void LCodeGen::DoTypeof(LTypeof* instr) { |
| 1774 Abort("Unimplemented: %s", "DoTypeof"); | 2409 Abort("Unimplemented: %s", "DoTypeof"); |
| 1775 } | 2410 } |
| 1776 | 2411 |
| 1777 | 2412 |
| 1778 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { | 2413 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { |
| 1779 Abort("Unimplemented: %s", "DoTypeofIs"); | 2414 Abort("Unimplemented: %s", "DoTypeofIs"); |
| 1780 } | 2415 } |
| 1781 | 2416 |
| 1782 | 2417 |
| 2418 void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) { |
| 2419 Register result = ToRegister(instr->result()); |
| 2420 NearLabel true_label; |
| 2421 NearLabel false_label; |
| 2422 NearLabel done; |
| 2423 |
| 2424 EmitIsConstructCall(result); |
| 2425 __ j(equal, &true_label); |
| 2426 |
| 2427 __ LoadRoot(result, Heap::kFalseValueRootIndex); |
| 2428 __ jmp(&done); |
| 2429 |
| 2430 __ bind(&true_label); |
| 2431 __ LoadRoot(result, Heap::kTrueValueRootIndex); |
| 2432 |
| 2433 |
| 2434 __ bind(&done); |
| 2435 } |
| 2436 |
| 2437 |
| 2438 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { |
| 2439 Register temp = ToRegister(instr->TempAt(0)); |
| 2440 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 2441 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 2442 |
| 2443 EmitIsConstructCall(temp); |
| 2444 EmitBranch(true_block, false_block, equal); |
| 2445 } |
| 2446 |
| 2447 |
| 2448 void LCodeGen::EmitIsConstructCall(Register temp) { |
| 2449 // Get the frame pointer for the calling frame. |
| 2450 __ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 2451 |
| 2452 // Skip the arguments adaptor frame if it exists. |
| 2453 NearLabel check_frame_marker; |
| 2454 __ SmiCompare(Operand(temp, StandardFrameConstants::kContextOffset), |
| 2455 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 2456 __ j(not_equal, &check_frame_marker); |
| 2457 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); |
| 2458 |
| 2459 // Check the marker in the calling frame. |
| 2460 __ bind(&check_frame_marker); |
| 2461 __ SmiCompare(Operand(temp, StandardFrameConstants::kMarkerOffset), |
| 2462 Smi::FromInt(StackFrame::CONSTRUCT)); |
| 2463 } |
| 2464 |
| 2465 |
| 1783 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { | 2466 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { |
| 1784 Register input = ToRegister(instr->InputAt(0)); | 2467 Register input = ToRegister(instr->InputAt(0)); |
| 1785 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 2468 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1786 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 2469 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1787 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 2470 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
| 1788 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 2471 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
| 1789 | 2472 |
| 1790 Condition final_branch_condition = EmitTypeofIs(true_label, | 2473 Condition final_branch_condition = EmitTypeofIs(true_label, |
| 1791 false_label, | 2474 false_label, |
| 1792 input, | 2475 input, |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1872 | 2555 |
| 1873 | 2556 |
| 1874 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { | 2557 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { |
| 1875 Abort("Unimplemented: %s", "DoDeleteProperty"); | 2558 Abort("Unimplemented: %s", "DoDeleteProperty"); |
| 1876 } | 2559 } |
| 1877 | 2560 |
| 1878 | 2561 |
| 1879 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 2562 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
| 1880 // Perform stack overflow check. | 2563 // Perform stack overflow check. |
| 1881 NearLabel done; | 2564 NearLabel done; |
| 1882 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); | |
| 1883 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 2565 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 1884 __ j(above_equal, &done); | 2566 __ j(above_equal, &done); |
| 1885 | 2567 |
| 1886 StackCheckStub stub; | 2568 StackCheckStub stub; |
| 1887 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2569 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1888 __ bind(&done); | 2570 __ bind(&done); |
| 1889 } | 2571 } |
| 1890 | 2572 |
| 1891 | 2573 |
| 1892 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 2574 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
| 1893 Abort("Unimplemented: %s", "DoOsrEntry"); | 2575 Abort("Unimplemented: %s", "DoOsrEntry"); |
| 1894 } | 2576 } |
| 1895 | 2577 |
| 1896 #undef __ | 2578 #undef __ |
| 1897 | 2579 |
| 1898 } } // namespace v8::internal | 2580 } } // namespace v8::internal |
| 1899 | 2581 |
| 1900 #endif // V8_TARGET_ARCH_X64 | 2582 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |