Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/ia32/codegen-ia32.cc

Issue 6759031: Cleanup of isolate(), FACTORY and HEAP usage in ia32/codegen-ia32.cc (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 531 matching lines...) Expand 10 before | Expand all | Expand 10 after
542 ASSERT(value->handle()->IsSmi()); 542 ASSERT(value->handle()->IsSmi());
543 } 543 }
544 value->set_untagged_int32(false); 544 value->set_untagged_int32(false);
545 value->set_type_info(TypeInfo::Smi()); 545 value->set_type_info(TypeInfo::Smi());
546 } 546 }
547 547
548 548
549 void CodeGenerator::ConvertInt32ResultToNumber(Result* value) { 549 void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
550 ASSERT(value->is_untagged_int32()); 550 ASSERT(value->is_untagged_int32());
551 if (value->is_register()) { 551 if (value->is_register()) {
552 Isolate* isolate = masm()->isolate();
552 Register val = value->reg(); 553 Register val = value->reg();
553 JumpTarget done; 554 JumpTarget done;
554 __ add(val, Operand(val)); 555 __ add(val, Operand(val));
555 done.Branch(no_overflow, value); 556 done.Branch(no_overflow, value);
556 __ sar(val, 1); 557 __ sar(val, 1);
557 // If there was an overflow, bits 30 and 31 of the original number disagree. 558 // If there was an overflow, bits 30 and 31 of the original number disagree.
558 __ xor_(val, 0x80000000u); 559 __ xor_(val, 0x80000000u);
559 if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) { 560 if (isolate->cpu_features()->IsSupported(SSE2)) {
560 CpuFeatures::Scope fscope(SSE2); 561 CpuFeatures::Scope fscope(SSE2);
561 __ cvtsi2sd(xmm0, Operand(val)); 562 __ cvtsi2sd(xmm0, Operand(val));
562 } else { 563 } else {
563 // Move val to ST[0] in the FPU 564 // Move val to ST[0] in the FPU
564 // Push and pop are safe with respect to the virtual frame because 565 // Push and pop are safe with respect to the virtual frame because
565 // all synced elements are below the actual stack pointer. 566 // all synced elements are below the actual stack pointer.
566 __ push(val); 567 __ push(val);
567 __ fild_s(Operand(esp, 0)); 568 __ fild_s(Operand(esp, 0));
568 __ pop(val); 569 __ pop(val);
569 } 570 }
570 Result scratch = allocator_->Allocate(); 571 Result scratch = allocator_->Allocate();
571 ASSERT(scratch.is_register()); 572 ASSERT(scratch.is_register());
572 Label allocation_failed; 573 Label allocation_failed;
573 __ AllocateHeapNumber(val, scratch.reg(), 574 __ AllocateHeapNumber(val, scratch.reg(),
574 no_reg, &allocation_failed); 575 no_reg, &allocation_failed);
575 VirtualFrame* clone = new VirtualFrame(frame_); 576 VirtualFrame* clone = new VirtualFrame(frame_);
576 scratch.Unuse(); 577 scratch.Unuse();
577 if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) { 578 if (isolate->cpu_features()->IsSupported(SSE2)) {
578 CpuFeatures::Scope fscope(SSE2); 579 CpuFeatures::Scope fscope(SSE2);
579 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0); 580 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
580 } else { 581 } else {
581 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset)); 582 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset));
582 } 583 }
583 done.Jump(value); 584 done.Jump(value);
584 585
585 // Establish the virtual frame, cloned from where AllocateHeapNumber 586 // Establish the virtual frame, cloned from where AllocateHeapNumber
586 // jumped to allocation_failed. 587 // jumped to allocation_failed.
587 RegisterFile empty_regs; 588 RegisterFile empty_regs;
588 SetFrame(clone, &empty_regs); 589 SetFrame(clone, &empty_regs);
589 __ bind(&allocation_failed); 590 __ bind(&allocation_failed);
590 if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) { 591 if (!isolate->cpu_features()->IsSupported(SSE2)) {
591 // Pop the value from the floating point stack. 592 // Pop the value from the floating point stack.
592 __ fstp(0); 593 __ fstp(0);
593 } 594 }
594 unsafe_bailout_->Jump(); 595 unsafe_bailout_->Jump();
595 596
596 done.Bind(value); 597 done.Bind(value);
597 } else { 598 } else {
598 ASSERT(value->is_constant()); 599 ASSERT(value->is_constant());
599 } 600 }
600 value->set_untagged_int32(false); 601 value->set_untagged_int32(false);
601 value->set_type_info(TypeInfo::Integer32()); 602 value->set_type_info(TypeInfo::Integer32());
602 } 603 }
603 604
604 605
605 void CodeGenerator::Load(Expression* expr) { 606 void CodeGenerator::Load(Expression* expr) {
606 #ifdef DEBUG 607 #ifdef DEBUG
607 int original_height = frame_->height(); 608 int original_height = frame_->height();
608 #endif 609 #endif
609 ASSERT(!in_spilled_code()); 610 ASSERT(!in_spilled_code());
611 Isolate* isolate = masm()->isolate();
612 Factory* factory = isolate->factory();
610 613
611 // If the expression should be a side-effect-free 32-bit int computation, 614 // If the expression should be a side-effect-free 32-bit int computation,
612 // compile that SafeInt32 path, and a bailout path. 615 // compile that SafeInt32 path, and a bailout path.
613 if (!in_safe_int32_mode() && 616 if (!in_safe_int32_mode() &&
614 safe_int32_mode_enabled() && 617 safe_int32_mode_enabled() &&
615 expr->side_effect_free() && 618 expr->side_effect_free() &&
616 expr->num_bit_ops() > 2 && 619 expr->num_bit_ops() > 2 &&
617 masm()->isolate()->cpu_features()->IsSupported(SSE2)) { 620 isolate->cpu_features()->IsSupported(SSE2)) {
618 BreakTarget unsafe_bailout; 621 BreakTarget unsafe_bailout;
619 JumpTarget done; 622 JumpTarget done;
620 unsafe_bailout.set_expected_height(frame_->height()); 623 unsafe_bailout.set_expected_height(frame_->height());
621 LoadInSafeInt32Mode(expr, &unsafe_bailout); 624 LoadInSafeInt32Mode(expr, &unsafe_bailout);
622 done.Jump(); 625 done.Jump();
623 626
624 if (unsafe_bailout.is_linked()) { 627 if (unsafe_bailout.is_linked()) {
625 unsafe_bailout.Bind(); 628 unsafe_bailout.Bind();
626 LoadWithSafeInt32ModeDisabled(expr); 629 LoadWithSafeInt32ModeDisabled(expr);
627 } 630 }
628 done.Bind(); 631 done.Bind();
629 } else { 632 } else {
630 JumpTarget true_target; 633 JumpTarget true_target;
631 JumpTarget false_target; 634 JumpTarget false_target;
632 ControlDestination dest(&true_target, &false_target, true); 635 ControlDestination dest(&true_target, &false_target, true);
633 LoadCondition(expr, &dest, false); 636 LoadCondition(expr, &dest, false);
634 637
635 if (dest.false_was_fall_through()) { 638 if (dest.false_was_fall_through()) {
636 // The false target was just bound. 639 // The false target was just bound.
637 JumpTarget loaded; 640 JumpTarget loaded;
638 frame_->Push(FACTORY->false_value()); 641 frame_->Push(factory->false_value());
639 // There may be dangling jumps to the true target. 642 // There may be dangling jumps to the true target.
640 if (true_target.is_linked()) { 643 if (true_target.is_linked()) {
641 loaded.Jump(); 644 loaded.Jump();
642 true_target.Bind(); 645 true_target.Bind();
643 frame_->Push(FACTORY->true_value()); 646 frame_->Push(factory->true_value());
644 loaded.Bind(); 647 loaded.Bind();
645 } 648 }
646 649
647 } else if (dest.is_used()) { 650 } else if (dest.is_used()) {
648 // There is true, and possibly false, control flow (with true as 651 // There is true, and possibly false, control flow (with true as
649 // the fall through). 652 // the fall through).
650 JumpTarget loaded; 653 JumpTarget loaded;
651 frame_->Push(FACTORY->true_value()); 654 frame_->Push(factory->true_value());
652 if (false_target.is_linked()) { 655 if (false_target.is_linked()) {
653 loaded.Jump(); 656 loaded.Jump();
654 false_target.Bind(); 657 false_target.Bind();
655 frame_->Push(FACTORY->false_value()); 658 frame_->Push(factory->false_value());
656 loaded.Bind(); 659 loaded.Bind();
657 } 660 }
658 661
659 } else { 662 } else {
660 // We have a valid value on top of the frame, but we still may 663 // We have a valid value on top of the frame, but we still may
661 // have dangling jumps to the true and false targets from nested 664 // have dangling jumps to the true and false targets from nested
662 // subexpressions (eg, the left subexpressions of the 665 // subexpressions (eg, the left subexpressions of the
663 // short-circuited boolean operators). 666 // short-circuited boolean operators).
664 ASSERT(has_valid_frame()); 667 ASSERT(has_valid_frame());
665 if (true_target.is_linked() || false_target.is_linked()) { 668 if (true_target.is_linked() || false_target.is_linked()) {
666 JumpTarget loaded; 669 JumpTarget loaded;
667 loaded.Jump(); // Don't lose the current TOS. 670 loaded.Jump(); // Don't lose the current TOS.
668 if (true_target.is_linked()) { 671 if (true_target.is_linked()) {
669 true_target.Bind(); 672 true_target.Bind();
670 frame_->Push(FACTORY->true_value()); 673 frame_->Push(factory->true_value());
671 if (false_target.is_linked()) { 674 if (false_target.is_linked()) {
672 loaded.Jump(); 675 loaded.Jump();
673 } 676 }
674 } 677 }
675 if (false_target.is_linked()) { 678 if (false_target.is_linked()) {
676 false_target.Bind(); 679 false_target.Bind();
677 frame_->Push(FACTORY->false_value()); 680 frame_->Push(factory->false_value());
678 } 681 }
679 loaded.Bind(); 682 loaded.Bind();
680 } 683 }
681 } 684 }
682 } 685 }
683 ASSERT(has_valid_frame()); 686 ASSERT(has_valid_frame());
684 ASSERT(frame_->height() == original_height + 1); 687 ASSERT(frame_->height() == original_height + 1);
685 } 688 }
686 689
687 690
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
741 ? EAGER_ARGUMENTS_ALLOCATION 744 ? EAGER_ARGUMENTS_ALLOCATION
742 : LAZY_ARGUMENTS_ALLOCATION; 745 : LAZY_ARGUMENTS_ALLOCATION;
743 } 746 }
744 747
745 748
746 Result CodeGenerator::StoreArgumentsObject(bool initial) { 749 Result CodeGenerator::StoreArgumentsObject(bool initial) {
747 ArgumentsAllocationMode mode = ArgumentsMode(); 750 ArgumentsAllocationMode mode = ArgumentsMode();
748 ASSERT(mode != NO_ARGUMENTS_ALLOCATION); 751 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
749 752
750 Comment cmnt(masm_, "[ store arguments object"); 753 Comment cmnt(masm_, "[ store arguments object");
754
755 Factory* factory = masm()->isolate()->factory();
751 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) { 756 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
752 // When using lazy arguments allocation, we store the arguments marker value 757 // When using lazy arguments allocation, we store the arguments marker value
753 // as a sentinel indicating that the arguments object hasn't been 758 // as a sentinel indicating that the arguments object hasn't been
754 // allocated yet. 759 // allocated yet.
755 frame_->Push(FACTORY->arguments_marker()); 760 frame_->Push(factory->arguments_marker());
756 } else { 761 } else {
757 ArgumentsAccessStub stub(is_strict_mode() 762 ArgumentsAccessStub stub(is_strict_mode()
758 ? ArgumentsAccessStub::NEW_STRICT 763 ? ArgumentsAccessStub::NEW_STRICT
759 : ArgumentsAccessStub::NEW_NON_STRICT); 764 : ArgumentsAccessStub::NEW_NON_STRICT);
760 frame_->PushFunction(); 765 frame_->PushFunction();
761 frame_->PushReceiverSlotAddress(); 766 frame_->PushReceiverSlotAddress();
762 frame_->Push(Smi::FromInt(scope()->num_parameters())); 767 frame_->Push(Smi::FromInt(scope()->num_parameters()));
763 Result result = frame_->CallStub(&stub, 3); 768 Result result = frame_->CallStub(&stub, 3);
764 frame_->Push(&result); 769 frame_->Push(&result);
765 } 770 }
(...skipping 11 matching lines...) Expand all
777 // We have to skip storing into the arguments slot if it has 782 // We have to skip storing into the arguments slot if it has
778 // already been written to. This can happen if the a function 783 // already been written to. This can happen if the a function
779 // has a local variable named 'arguments'. 784 // has a local variable named 'arguments'.
780 LoadFromSlot(arguments->AsSlot(), NOT_INSIDE_TYPEOF); 785 LoadFromSlot(arguments->AsSlot(), NOT_INSIDE_TYPEOF);
781 Result probe = frame_->Pop(); 786 Result probe = frame_->Pop();
782 if (probe.is_constant()) { 787 if (probe.is_constant()) {
783 // We have to skip updating the arguments object if it has 788 // We have to skip updating the arguments object if it has
784 // been assigned a proper value. 789 // been assigned a proper value.
785 skip_arguments = !probe.handle()->IsArgumentsMarker(); 790 skip_arguments = !probe.handle()->IsArgumentsMarker();
786 } else { 791 } else {
787 __ cmp(Operand(probe.reg()), Immediate(FACTORY->arguments_marker())); 792 __ cmp(Operand(probe.reg()), Immediate(factory->arguments_marker()));
788 probe.Unuse(); 793 probe.Unuse();
789 done.Branch(not_equal); 794 done.Branch(not_equal);
790 } 795 }
791 } 796 }
792 if (!skip_arguments) { 797 if (!skip_arguments) {
793 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT); 798 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
794 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind(); 799 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
795 } 800 }
796 if (shadow != NULL) { 801 if (shadow != NULL) {
797 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT); 802 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
906 __ test(value.reg(), Operand(value.reg())); 911 __ test(value.reg(), Operand(value.reg()));
907 dest->false_target()->Branch(zero); 912 dest->false_target()->Branch(zero);
908 __ test(value.reg(), Immediate(kSmiTagMask)); 913 __ test(value.reg(), Immediate(kSmiTagMask));
909 dest->true_target()->Branch(zero); 914 dest->true_target()->Branch(zero);
910 __ fldz(); 915 __ fldz();
911 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset)); 916 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
912 __ FCmp(); 917 __ FCmp();
913 value.Unuse(); 918 value.Unuse();
914 dest->Split(not_zero); 919 dest->Split(not_zero);
915 } else { 920 } else {
921 Factory* factory = masm()->isolate()->factory();
916 // Fast case checks. 922 // Fast case checks.
917 // 'false' => false. 923 // 'false' => false.
918 __ cmp(value.reg(), FACTORY->false_value()); 924 __ cmp(value.reg(), factory->false_value());
919 dest->false_target()->Branch(equal); 925 dest->false_target()->Branch(equal);
920 926
921 // 'true' => true. 927 // 'true' => true.
922 __ cmp(value.reg(), FACTORY->true_value()); 928 __ cmp(value.reg(), factory->true_value());
923 dest->true_target()->Branch(equal); 929 dest->true_target()->Branch(equal);
924 930
925 // 'undefined' => false. 931 // 'undefined' => false.
926 __ cmp(value.reg(), FACTORY->undefined_value()); 932 __ cmp(value.reg(), factory->undefined_value());
927 dest->false_target()->Branch(equal); 933 dest->false_target()->Branch(equal);
928 934
929 // Smi => false iff zero. 935 // Smi => false iff zero.
930 STATIC_ASSERT(kSmiTag == 0); 936 STATIC_ASSERT(kSmiTag == 0);
931 __ test(value.reg(), Operand(value.reg())); 937 __ test(value.reg(), Operand(value.reg()));
932 dest->false_target()->Branch(zero); 938 dest->false_target()->Branch(zero);
933 __ test(value.reg(), Immediate(kSmiTagMask)); 939 __ test(value.reg(), Immediate(kSmiTagMask));
934 dest->true_target()->Branch(zero); 940 dest->true_target()->Branch(zero);
935 941
936 // Call the stub for all other cases. 942 // Call the stub for all other cases.
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
1009 1015
1010 1016
1011 void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond, 1017 void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond,
1012 Smi* smi_value) { 1018 Smi* smi_value) {
1013 smi_value_ = smi_value; 1019 smi_value_ = smi_value;
1014 __ j(cond, &constant_rhs_); 1020 __ j(cond, &constant_rhs_);
1015 } 1021 }
1016 1022
1017 1023
1018 void DeferredInlineBinaryOperation::Generate() { 1024 void DeferredInlineBinaryOperation::Generate() {
1025 Isolate* isolate = masm()->isolate();
1026 Factory* factory = isolate->factory();
1019 // Registers are not saved implicitly for this stub, so we should not 1027 // Registers are not saved implicitly for this stub, so we should not
1020 // tread on the registers that were not passed to us. 1028 // tread on the registers that were not passed to us.
1021 if (masm()->isolate()->cpu_features()->IsSupported(SSE2) && 1029 if (isolate->cpu_features()->IsSupported(SSE2) &&
1022 ((op_ == Token::ADD) || 1030 ((op_ == Token::ADD) ||
1023 (op_ == Token::SUB) || 1031 (op_ == Token::SUB) ||
1024 (op_ == Token::MUL) || 1032 (op_ == Token::MUL) ||
1025 (op_ == Token::DIV))) { 1033 (op_ == Token::DIV))) {
1026 CpuFeatures::Scope use_sse2(SSE2); 1034 CpuFeatures::Scope use_sse2(SSE2);
1027 Label call_runtime, after_alloc_failure; 1035 Label call_runtime, after_alloc_failure;
1028 Label left_smi, right_smi, load_right, do_op; 1036 Label left_smi, right_smi, load_right, do_op;
1029 if (!left_info_.IsSmi()) { 1037 if (!left_info_.IsSmi()) {
1030 __ test(left_, Immediate(kSmiTagMask)); 1038 __ test(left_, Immediate(kSmiTagMask));
1031 __ j(zero, &left_smi); 1039 __ j(zero, &left_smi);
1032 if (!left_info_.IsNumber()) { 1040 if (!left_info_.IsNumber()) {
1033 __ cmp(FieldOperand(left_, HeapObject::kMapOffset), 1041 __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
1034 FACTORY->heap_number_map()); 1042 factory->heap_number_map());
1035 __ j(not_equal, &call_runtime); 1043 __ j(not_equal, &call_runtime);
1036 } 1044 }
1037 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset)); 1045 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
1038 if (mode_ == OVERWRITE_LEFT) { 1046 if (mode_ == OVERWRITE_LEFT) {
1039 __ mov(dst_, left_); 1047 __ mov(dst_, left_);
1040 } 1048 }
1041 __ jmp(&load_right); 1049 __ jmp(&load_right);
1042 1050
1043 __ bind(&left_smi); 1051 __ bind(&left_smi);
1044 } else { 1052 } else {
1045 if (FLAG_debug_code) __ AbortIfNotSmi(left_); 1053 if (FLAG_debug_code) __ AbortIfNotSmi(left_);
1046 } 1054 }
1047 __ SmiUntag(left_); 1055 __ SmiUntag(left_);
1048 __ cvtsi2sd(xmm0, Operand(left_)); 1056 __ cvtsi2sd(xmm0, Operand(left_));
1049 __ SmiTag(left_); 1057 __ SmiTag(left_);
1050 if (mode_ == OVERWRITE_LEFT) { 1058 if (mode_ == OVERWRITE_LEFT) {
1051 Label alloc_failure; 1059 Label alloc_failure;
1052 __ push(left_); 1060 __ push(left_);
1053 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure); 1061 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1054 __ pop(left_); 1062 __ pop(left_);
1055 } 1063 }
1056 1064
1057 __ bind(&load_right); 1065 __ bind(&load_right);
1058 if (!right_info_.IsSmi()) { 1066 if (!right_info_.IsSmi()) {
1059 __ test(right_, Immediate(kSmiTagMask)); 1067 __ test(right_, Immediate(kSmiTagMask));
1060 __ j(zero, &right_smi); 1068 __ j(zero, &right_smi);
1061 if (!right_info_.IsNumber()) { 1069 if (!right_info_.IsNumber()) {
1062 __ cmp(FieldOperand(right_, HeapObject::kMapOffset), 1070 __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
1063 FACTORY->heap_number_map()); 1071 factory->heap_number_map());
1064 __ j(not_equal, &call_runtime); 1072 __ j(not_equal, &call_runtime);
1065 } 1073 }
1066 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset)); 1074 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
1067 if (mode_ == OVERWRITE_RIGHT) { 1075 if (mode_ == OVERWRITE_RIGHT) {
1068 __ mov(dst_, right_); 1076 __ mov(dst_, right_);
1069 } else if (mode_ == NO_OVERWRITE) { 1077 } else if (mode_ == NO_OVERWRITE) {
1070 Label alloc_failure; 1078 Label alloc_failure;
1071 __ push(left_); 1079 __ push(left_);
1072 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure); 1080 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1073 __ pop(left_); 1081 __ pop(left_);
(...skipping 1933 matching lines...) Expand 10 before | Expand all | Expand 10 after
3007 if (IsUnsafeSmi(right_side->handle())) { 3015 if (IsUnsafeSmi(right_side->handle())) {
3008 right_side->ToRegister(); 3016 right_side->ToRegister();
3009 __ cmp(left_reg, Operand(right_side->reg())); 3017 __ cmp(left_reg, Operand(right_side->reg()));
3010 } else { 3018 } else {
3011 __ cmp(Operand(left_reg), Immediate(right_side->handle())); 3019 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3012 } 3020 }
3013 left_side->Unuse(); 3021 left_side->Unuse();
3014 right_side->Unuse(); 3022 right_side->Unuse();
3015 dest->Split(cc); 3023 dest->Split(cc);
3016 } else { 3024 } else {
3025 Isolate* isolate = masm()->isolate();
3017 // Only the case where the left side could possibly be a non-smi is left. 3026 // Only the case where the left side could possibly be a non-smi is left.
3018 JumpTarget is_smi; 3027 JumpTarget is_smi;
3019 if (cc == equal) { 3028 if (cc == equal) {
3020 // We can do the equality comparison before the smi check. 3029 // We can do the equality comparison before the smi check.
3021 __ cmp(Operand(left_reg), Immediate(right_side->handle())); 3030 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3022 dest->true_target()->Branch(equal); 3031 dest->true_target()->Branch(equal);
3023 __ test(left_reg, Immediate(kSmiTagMask)); 3032 __ test(left_reg, Immediate(kSmiTagMask));
3024 dest->false_target()->Branch(zero); 3033 dest->false_target()->Branch(zero);
3025 } else { 3034 } else {
3026 // Do the smi check, then the comparison. 3035 // Do the smi check, then the comparison.
3027 __ test(left_reg, Immediate(kSmiTagMask)); 3036 __ test(left_reg, Immediate(kSmiTagMask));
3028 is_smi.Branch(zero, left_side, right_side); 3037 is_smi.Branch(zero, left_side, right_side);
3029 } 3038 }
3030 3039
3031 // Jump or fall through to here if we are comparing a non-smi to a 3040 // Jump or fall through to here if we are comparing a non-smi to a
3032 // constant smi. If the non-smi is a heap number and this is not 3041 // constant smi. If the non-smi is a heap number and this is not
3033 // a loop condition, inline the floating point code. 3042 // a loop condition, inline the floating point code.
3034 if (!is_loop_condition && 3043 if (!is_loop_condition &&
3035 masm()->isolate()->cpu_features()->IsSupported(SSE2)) { 3044 isolate->cpu_features()->IsSupported(SSE2)) {
3036 // Right side is a constant smi and left side has been checked 3045 // Right side is a constant smi and left side has been checked
3037 // not to be a smi. 3046 // not to be a smi.
3038 CpuFeatures::Scope use_sse2(SSE2); 3047 CpuFeatures::Scope use_sse2(SSE2);
3039 JumpTarget not_number; 3048 JumpTarget not_number;
3040 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset), 3049 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
3041 Immediate(FACTORY->heap_number_map())); 3050 Immediate(isolate->factory()->heap_number_map()));
3042 not_number.Branch(not_equal, left_side); 3051 not_number.Branch(not_equal, left_side);
3043 __ movdbl(xmm1, 3052 __ movdbl(xmm1,
3044 FieldOperand(left_reg, HeapNumber::kValueOffset)); 3053 FieldOperand(left_reg, HeapNumber::kValueOffset));
3045 int value = Smi::cast(*right_val)->value(); 3054 int value = Smi::cast(*right_val)->value();
3046 if (value == 0) { 3055 if (value == 0) {
3047 __ xorpd(xmm0, xmm0); 3056 __ xorpd(xmm0, xmm0);
3048 } else { 3057 } else {
3049 Result temp = allocator()->Allocate(); 3058 Result temp = allocator()->Allocate();
3050 __ mov(temp.reg(), Immediate(value)); 3059 __ mov(temp.reg(), Immediate(value));
3051 __ cvtsi2sd(xmm0, Operand(temp.reg())); 3060 __ cvtsi2sd(xmm0, Operand(temp.reg()));
(...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after
3267 // x.apply(y, arguments). 3276 // x.apply(y, arguments).
3268 // If the arguments object of the scope has not been allocated, 3277 // If the arguments object of the scope has not been allocated,
3269 // and x.apply is Function.prototype.apply, this optimization 3278 // and x.apply is Function.prototype.apply, this optimization
3270 // just copies y and the arguments of the current function on the 3279 // just copies y and the arguments of the current function on the
3271 // stack, as receiver and arguments, and calls x. 3280 // stack, as receiver and arguments, and calls x.
3272 // In the implementation comments, we call x the applicand 3281 // In the implementation comments, we call x the applicand
3273 // and y the receiver. 3282 // and y the receiver.
3274 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); 3283 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
3275 ASSERT(arguments->IsArguments()); 3284 ASSERT(arguments->IsArguments());
3276 3285
3286 Isolate* isolate = masm()->isolate();
3287 Factory* factory = isolate->factory();
3277 // Load applicand.apply onto the stack. This will usually 3288 // Load applicand.apply onto the stack. This will usually
3278 // give us a megamorphic load site. Not super, but it works. 3289 // give us a megamorphic load site. Not super, but it works.
3279 Load(applicand); 3290 Load(applicand);
3280 frame()->Dup(); 3291 frame()->Dup();
3281 Handle<String> name = FACTORY->LookupAsciiSymbol("apply"); 3292 Handle<String> name = factory->LookupAsciiSymbol("apply");
3282 frame()->Push(name); 3293 frame()->Push(name);
3283 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET); 3294 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
3284 __ nop(); 3295 __ nop();
3285 frame()->Push(&answer); 3296 frame()->Push(&answer);
3286 3297
3287 // Load the receiver and the existing arguments object onto the 3298 // Load the receiver and the existing arguments object onto the
3288 // expression stack. Avoid allocating the arguments object here. 3299 // expression stack. Avoid allocating the arguments object here.
3289 Load(receiver); 3300 Load(receiver);
3290 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF); 3301 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
3291 3302
(...skipping 11 matching lines...) Expand all
3303 // from the stack. This also deals with cases where a local variable 3314 // from the stack. This also deals with cases where a local variable
3304 // named 'arguments' has been introduced. 3315 // named 'arguments' has been introduced.
3305 frame_->Dup(); 3316 frame_->Dup();
3306 Result probe = frame_->Pop(); 3317 Result probe = frame_->Pop();
3307 { VirtualFrame::SpilledScope spilled_scope; 3318 { VirtualFrame::SpilledScope spilled_scope;
3308 Label slow, done; 3319 Label slow, done;
3309 bool try_lazy = true; 3320 bool try_lazy = true;
3310 if (probe.is_constant()) { 3321 if (probe.is_constant()) {
3311 try_lazy = probe.handle()->IsArgumentsMarker(); 3322 try_lazy = probe.handle()->IsArgumentsMarker();
3312 } else { 3323 } else {
3313 __ cmp(Operand(probe.reg()), Immediate(FACTORY->arguments_marker())); 3324 __ cmp(Operand(probe.reg()), Immediate(factory->arguments_marker()));
3314 probe.Unuse(); 3325 probe.Unuse();
3315 __ j(not_equal, &slow); 3326 __ j(not_equal, &slow);
3316 } 3327 }
3317 3328
3318 if (try_lazy) { 3329 if (try_lazy) {
3319 Label build_args; 3330 Label build_args;
3320 // Get rid of the arguments object probe. 3331 // Get rid of the arguments object probe.
3321 frame_->Drop(); // Can be called on a spilled frame. 3332 frame_->Drop(); // Can be called on a spilled frame.
3322 // Stack now has 3 elements on it. 3333 // Stack now has 3 elements on it.
3323 // Contents of stack at this point: 3334 // Contents of stack at this point:
(...skipping 15 matching lines...) Expand all
3339 __ j(below, &build_args); 3350 __ j(below, &build_args);
3340 3351
3341 // Check that applicand.apply is Function.prototype.apply. 3352 // Check that applicand.apply is Function.prototype.apply.
3342 __ mov(eax, Operand(esp, kPointerSize)); 3353 __ mov(eax, Operand(esp, kPointerSize));
3343 __ test(eax, Immediate(kSmiTagMask)); 3354 __ test(eax, Immediate(kSmiTagMask));
3344 __ j(zero, &build_args); 3355 __ j(zero, &build_args);
3345 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx); 3356 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
3346 __ j(not_equal, &build_args); 3357 __ j(not_equal, &build_args);
3347 __ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset)); 3358 __ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset));
3348 __ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag)); 3359 __ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag));
3349 Handle<Code> apply_code(masm()->isolate()->builtins()->builtin( 3360 Handle<Code> apply_code(isolate->builtins()->builtin(
3350 Builtins::kFunctionApply)); 3361 Builtins::kFunctionApply));
3351 __ cmp(Operand(ecx), Immediate(apply_code)); 3362 __ cmp(Operand(ecx), Immediate(apply_code));
3352 __ j(not_equal, &build_args); 3363 __ j(not_equal, &build_args);
3353 3364
3354 // Check that applicand is a function. 3365 // Check that applicand is a function.
3355 __ mov(edi, Operand(esp, 2 * kPointerSize)); 3366 __ mov(edi, Operand(esp, 2 * kPointerSize));
3356 __ test(edi, Immediate(kSmiTagMask)); 3367 __ test(edi, Immediate(kSmiTagMask));
3357 __ j(zero, &build_args); 3368 __ j(zero, &build_args);
3358 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 3369 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
3359 __ j(not_equal, &build_args); 3370 __ j(not_equal, &build_args);
(...skipping 989 matching lines...) Expand 10 before | Expand all | Expand 10 after
4349 DecrementLoopNesting(); 4360 DecrementLoopNesting();
4350 } 4361 }
4351 4362
4352 4363
4353 void CodeGenerator::VisitForInStatement(ForInStatement* node) { 4364 void CodeGenerator::VisitForInStatement(ForInStatement* node) {
4354 ASSERT(!in_spilled_code()); 4365 ASSERT(!in_spilled_code());
4355 VirtualFrame::SpilledScope spilled_scope; 4366 VirtualFrame::SpilledScope spilled_scope;
4356 Comment cmnt(masm_, "[ ForInStatement"); 4367 Comment cmnt(masm_, "[ ForInStatement");
4357 CodeForStatementPosition(node); 4368 CodeForStatementPosition(node);
4358 4369
4370 Factory* factory = masm()->isolate()->factory();
4371
4359 JumpTarget primitive; 4372 JumpTarget primitive;
4360 JumpTarget jsobject; 4373 JumpTarget jsobject;
4361 JumpTarget fixed_array; 4374 JumpTarget fixed_array;
4362 JumpTarget entry(JumpTarget::BIDIRECTIONAL); 4375 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
4363 JumpTarget end_del_check; 4376 JumpTarget end_del_check;
4364 JumpTarget exit; 4377 JumpTarget exit;
4365 4378
4366 // Get the object to enumerate over (converted to JSObject). 4379 // Get the object to enumerate over (converted to JSObject).
4367 LoadAndSpill(node->enumerable()); 4380 LoadAndSpill(node->enumerable());
4368 4381
4369 // Both SpiderMonkey and kjs ignore null and undefined in contrast 4382 // Both SpiderMonkey and kjs ignore null and undefined in contrast
4370 // to the specification. 12.6.4 mandates a call to ToObject. 4383 // to the specification. 12.6.4 mandates a call to ToObject.
4371 frame_->EmitPop(eax); 4384 frame_->EmitPop(eax);
4372 4385
4373 // eax: value to be iterated over 4386 // eax: value to be iterated over
4374 __ cmp(eax, FACTORY->undefined_value()); 4387 __ cmp(eax, factory->undefined_value());
4375 exit.Branch(equal); 4388 exit.Branch(equal);
4376 __ cmp(eax, FACTORY->null_value()); 4389 __ cmp(eax, factory->null_value());
4377 exit.Branch(equal); 4390 exit.Branch(equal);
4378 4391
4379 // Stack layout in body: 4392 // Stack layout in body:
4380 // [iteration counter (smi)] <- slot 0 4393 // [iteration counter (smi)] <- slot 0
4381 // [length of array] <- slot 1 4394 // [length of array] <- slot 1
4382 // [FixedArray] <- slot 2 4395 // [FixedArray] <- slot 2
4383 // [Map or 0] <- slot 3 4396 // [Map or 0] <- slot 3
4384 // [Object] <- slot 4 4397 // [Object] <- slot 4
4385 4398
4386 // Check if enumerable is already a JSObject 4399 // Check if enumerable is already a JSObject
(...skipping 18 matching lines...) Expand all
4405 // guarantee cache validity, call the runtime system to check cache 4418 // guarantee cache validity, call the runtime system to check cache
4406 // validity or get the property names in a fixed array. 4419 // validity or get the property names in a fixed array.
4407 JumpTarget call_runtime; 4420 JumpTarget call_runtime;
4408 JumpTarget loop(JumpTarget::BIDIRECTIONAL); 4421 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4409 JumpTarget check_prototype; 4422 JumpTarget check_prototype;
4410 JumpTarget use_cache; 4423 JumpTarget use_cache;
4411 __ mov(ecx, eax); 4424 __ mov(ecx, eax);
4412 loop.Bind(); 4425 loop.Bind();
4413 // Check that there are no elements. 4426 // Check that there are no elements.
4414 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset)); 4427 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
4415 __ cmp(Operand(edx), Immediate(FACTORY->empty_fixed_array())); 4428 __ cmp(Operand(edx), Immediate(factory->empty_fixed_array()));
4416 call_runtime.Branch(not_equal); 4429 call_runtime.Branch(not_equal);
4417 // Check that instance descriptors are not empty so that we can 4430 // Check that instance descriptors are not empty so that we can
4418 // check for an enum cache. Leave the map in ebx for the subsequent 4431 // check for an enum cache. Leave the map in ebx for the subsequent
4419 // prototype load. 4432 // prototype load.
4420 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset)); 4433 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
4421 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset)); 4434 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
4422 __ cmp(Operand(edx), Immediate(FACTORY->empty_descriptor_array())); 4435 __ cmp(Operand(edx), Immediate(factory->empty_descriptor_array()));
4423 call_runtime.Branch(equal); 4436 call_runtime.Branch(equal);
4424 // Check that there in an enum cache in the non-empty instance 4437 // Check that there in an enum cache in the non-empty instance
4425 // descriptors. This is the case if the next enumeration index 4438 // descriptors. This is the case if the next enumeration index
4426 // field does not contain a smi. 4439 // field does not contain a smi.
4427 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset)); 4440 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
4428 __ test(edx, Immediate(kSmiTagMask)); 4441 __ test(edx, Immediate(kSmiTagMask));
4429 call_runtime.Branch(zero); 4442 call_runtime.Branch(zero);
4430 // For all objects but the receiver, check that the cache is empty. 4443 // For all objects but the receiver, check that the cache is empty.
4431 __ cmp(ecx, Operand(eax)); 4444 __ cmp(ecx, Operand(eax));
4432 check_prototype.Branch(equal); 4445 check_prototype.Branch(equal);
4433 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 4446 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4434 __ cmp(Operand(edx), Immediate(FACTORY->empty_fixed_array())); 4447 __ cmp(Operand(edx), Immediate(factory->empty_fixed_array()));
4435 call_runtime.Branch(not_equal); 4448 call_runtime.Branch(not_equal);
4436 check_prototype.Bind(); 4449 check_prototype.Bind();
4437 // Load the prototype from the map and loop if non-null. 4450 // Load the prototype from the map and loop if non-null.
4438 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset)); 4451 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
4439 __ cmp(Operand(ecx), Immediate(FACTORY->null_value())); 4452 __ cmp(Operand(ecx), Immediate(factory->null_value()));
4440 loop.Branch(not_equal); 4453 loop.Branch(not_equal);
4441 // The enum cache is valid. Load the map of the object being 4454 // The enum cache is valid. Load the map of the object being
4442 // iterated over and use the cache for the iteration. 4455 // iterated over and use the cache for the iteration.
4443 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); 4456 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
4444 use_cache.Jump(); 4457 use_cache.Jump();
4445 4458
4446 call_runtime.Bind(); 4459 call_runtime.Bind();
4447 // Call the runtime to get the property names for the object. 4460 // Call the runtime to get the property names for the object.
4448 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call 4461 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call
4449 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1); 4462 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
4450 4463
4451 // If we got a map from the runtime call, we can do a fast 4464 // If we got a map from the runtime call, we can do a fast
4452 // modification check. Otherwise, we got a fixed array, and we have 4465 // modification check. Otherwise, we got a fixed array, and we have
4453 // to do a slow check. 4466 // to do a slow check.
4454 // eax: map or fixed array (result from call to 4467 // eax: map or fixed array (result from call to
4455 // Runtime::kGetPropertyNamesFast) 4468 // Runtime::kGetPropertyNamesFast)
4456 __ mov(edx, Operand(eax)); 4469 __ mov(edx, Operand(eax));
4457 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); 4470 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
4458 __ cmp(ecx, FACTORY->meta_map()); 4471 __ cmp(ecx, factory->meta_map());
4459 fixed_array.Branch(not_equal); 4472 fixed_array.Branch(not_equal);
4460 4473
4461 use_cache.Bind(); 4474 use_cache.Bind();
4462 // Get enum cache 4475 // Get enum cache
4463 // eax: map (either the result from a call to 4476 // eax: map (either the result from a call to
4464 // Runtime::kGetPropertyNamesFast or has been fetched directly from 4477 // Runtime::kGetPropertyNamesFast or has been fetched directly from
4465 // the object) 4478 // the object)
4466 __ mov(ecx, Operand(eax)); 4479 __ mov(ecx, Operand(eax));
4467 4480
4468 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset)); 4481 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after
4710 exit.Bind(); 4723 exit.Bind();
4711 } 4724 }
4712 4725
4713 4726
4714 void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) { 4727 void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
4715 ASSERT(!in_spilled_code()); 4728 ASSERT(!in_spilled_code());
4716 VirtualFrame::SpilledScope spilled_scope; 4729 VirtualFrame::SpilledScope spilled_scope;
4717 Comment cmnt(masm_, "[ TryFinallyStatement"); 4730 Comment cmnt(masm_, "[ TryFinallyStatement");
4718 CodeForStatementPosition(node); 4731 CodeForStatementPosition(node);
4719 4732
4733 Isolate* isolate = masm()->isolate();
4734 Factory* factory = isolate->factory();
4735
4720 // State: Used to keep track of reason for entering the finally 4736 // State: Used to keep track of reason for entering the finally
4721 // block. Should probably be extended to hold information for 4737 // block. Should probably be extended to hold information for
4722 // break/continue from within the try block. 4738 // break/continue from within the try block.
4723 enum { FALLING, THROWING, JUMPING }; 4739 enum { FALLING, THROWING, JUMPING };
4724 4740
4725 JumpTarget try_block; 4741 JumpTarget try_block;
4726 JumpTarget finally_block; 4742 JumpTarget finally_block;
4727 4743
4728 try_block.Call(); 4744 try_block.Call();
4729 4745
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
4768 // ShadowTargets represent the formerly shadowing targets. 4784 // ShadowTargets represent the formerly shadowing targets.
4769 int nof_unlinks = 0; 4785 int nof_unlinks = 0;
4770 for (int i = 0; i < shadows.length(); i++) { 4786 for (int i = 0; i < shadows.length(); i++) {
4771 shadows[i]->StopShadowing(); 4787 shadows[i]->StopShadowing();
4772 if (shadows[i]->is_linked()) nof_unlinks++; 4788 if (shadows[i]->is_linked()) nof_unlinks++;
4773 } 4789 }
4774 function_return_is_shadowed_ = function_return_was_shadowed; 4790 function_return_is_shadowed_ = function_return_was_shadowed;
4775 4791
4776 // Get an external reference to the handler address. 4792 // Get an external reference to the handler address.
4777 ExternalReference handler_address(Isolate::k_handler_address, 4793 ExternalReference handler_address(Isolate::k_handler_address,
4778 masm()->isolate()); 4794 isolate);
4779 4795
4780 // If we can fall off the end of the try block, unlink from the try 4796 // If we can fall off the end of the try block, unlink from the try
4781 // chain and set the state on the frame to FALLING. 4797 // chain and set the state on the frame to FALLING.
4782 if (has_valid_frame()) { 4798 if (has_valid_frame()) {
4783 // The next handler address is on top of the frame. 4799 // The next handler address is on top of the frame.
4784 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 4800 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
4785 frame_->EmitPop(Operand::StaticVariable(handler_address)); 4801 frame_->EmitPop(Operand::StaticVariable(handler_address));
4786 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 4802 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4787 4803
4788 // Fake a top of stack value (unneeded when FALLING) and set the 4804 // Fake a top of stack value (unneeded when FALLING) and set the
4789 // state in ecx, then jump around the unlink blocks if any. 4805 // state in ecx, then jump around the unlink blocks if any.
4790 frame_->EmitPush(Immediate(FACTORY->undefined_value())); 4806 frame_->EmitPush(Immediate(factory->undefined_value()));
4791 __ Set(ecx, Immediate(Smi::FromInt(FALLING))); 4807 __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
4792 if (nof_unlinks > 0) { 4808 if (nof_unlinks > 0) {
4793 finally_block.Jump(); 4809 finally_block.Jump();
4794 } 4810 }
4795 } 4811 }
4796 4812
4797 // Generate code to unlink and set the state for the (formerly) 4813 // Generate code to unlink and set the state for the (formerly)
4798 // shadowing targets that have been jumped to. 4814 // shadowing targets that have been jumped to.
4799 for (int i = 0; i < shadows.length(); i++) { 4815 for (int i = 0; i < shadows.length(); i++) {
4800 if (shadows[i]->is_linked()) { 4816 if (shadows[i]->is_linked()) {
(...skipping 22 matching lines...) Expand all
4823 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 4839 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
4824 frame_->EmitPop(Operand::StaticVariable(handler_address)); 4840 frame_->EmitPop(Operand::StaticVariable(handler_address));
4825 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 4841 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4826 4842
4827 if (i == kReturnShadowIndex) { 4843 if (i == kReturnShadowIndex) {
4828 // If this target shadowed the function return, materialize 4844 // If this target shadowed the function return, materialize
4829 // the return value on the stack. 4845 // the return value on the stack.
4830 frame_->EmitPush(eax); 4846 frame_->EmitPush(eax);
4831 } else { 4847 } else {
4832 // Fake TOS for targets that shadowed breaks and continues. 4848 // Fake TOS for targets that shadowed breaks and continues.
4833 frame_->EmitPush(Immediate(FACTORY->undefined_value())); 4849 frame_->EmitPush(Immediate(factory->undefined_value()));
4834 } 4850 }
4835 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i))); 4851 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
4836 if (--nof_unlinks > 0) { 4852 if (--nof_unlinks > 0) {
4837 // If this is not the last unlink block, jump around the next. 4853 // If this is not the last unlink block, jump around the next.
4838 finally_block.Jump(); 4854 finally_block.Jump();
4839 } 4855 }
4840 } 4856 }
4841 } 4857 }
4842 4858
4843 // --- Finally block --- 4859 // --- Finally block ---
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after
5038 5054
5039 } else if (slot->var()->mode() == Variable::CONST) { 5055 } else if (slot->var()->mode() == Variable::CONST) {
5040 // Const slots may contain 'the hole' value (the constant hasn't been 5056 // Const slots may contain 'the hole' value (the constant hasn't been
5041 // initialized yet) which needs to be converted into the 'undefined' 5057 // initialized yet) which needs to be converted into the 'undefined'
5042 // value. 5058 // value.
5043 // 5059 //
5044 // We currently spill the virtual frame because constants use the 5060 // We currently spill the virtual frame because constants use the
5045 // potentially unsafe direct-frame access of SlotOperand. 5061 // potentially unsafe direct-frame access of SlotOperand.
5046 VirtualFrame::SpilledScope spilled_scope; 5062 VirtualFrame::SpilledScope spilled_scope;
5047 Comment cmnt(masm_, "[ Load const"); 5063 Comment cmnt(masm_, "[ Load const");
5064 Factory* factory = masm()->isolate()->factory();
5048 Label exit; 5065 Label exit;
5049 __ mov(ecx, SlotOperand(slot, ecx)); 5066 __ mov(ecx, SlotOperand(slot, ecx));
5050 __ cmp(ecx, FACTORY->the_hole_value()); 5067 __ cmp(ecx, factory->the_hole_value());
5051 __ j(not_equal, &exit); 5068 __ j(not_equal, &exit);
5052 __ mov(ecx, FACTORY->undefined_value()); 5069 __ mov(ecx, factory->undefined_value());
5053 __ bind(&exit); 5070 __ bind(&exit);
5054 frame()->EmitPush(ecx); 5071 frame()->EmitPush(ecx);
5055 5072
5056 } else if (slot->type() == Slot::PARAMETER) { 5073 } else if (slot->type() == Slot::PARAMETER) {
5057 frame()->PushParameterAt(slot->index()); 5074 frame()->PushParameterAt(slot->index());
5058 5075
5059 } else if (slot->type() == Slot::LOCAL) { 5076 } else if (slot->type() == Slot::LOCAL) {
5060 frame()->PushLocalAt(slot->index()); 5077 frame()->PushLocalAt(slot->index());
5061 5078
5062 } else { 5079 } else {
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after
5206 if (potential_slot != NULL) { 5223 if (potential_slot != NULL) {
5207 // Generate fast case for locals that rewrite to slots. 5224 // Generate fast case for locals that rewrite to slots.
5208 // Allocate a fresh register to use as a temp in 5225 // Allocate a fresh register to use as a temp in
5209 // ContextSlotOperandCheckExtensions and to hold the result 5226 // ContextSlotOperandCheckExtensions and to hold the result
5210 // value. 5227 // value.
5211 *result = allocator()->Allocate(); 5228 *result = allocator()->Allocate();
5212 ASSERT(result->is_valid()); 5229 ASSERT(result->is_valid());
5213 __ mov(result->reg(), 5230 __ mov(result->reg(),
5214 ContextSlotOperandCheckExtensions(potential_slot, *result, slow)); 5231 ContextSlotOperandCheckExtensions(potential_slot, *result, slow));
5215 if (potential_slot->var()->mode() == Variable::CONST) { 5232 if (potential_slot->var()->mode() == Variable::CONST) {
5216 __ cmp(result->reg(), FACTORY->the_hole_value()); 5233 Factory* factory = masm()->isolate()->factory();
5234 __ cmp(result->reg(), factory->the_hole_value());
5217 done->Branch(not_equal, result); 5235 done->Branch(not_equal, result);
5218 __ mov(result->reg(), FACTORY->undefined_value()); 5236 __ mov(result->reg(), factory->undefined_value());
5219 } 5237 }
5220 done->Jump(result); 5238 done->Jump(result);
5221 } else if (rewrite != NULL) { 5239 } else if (rewrite != NULL) {
5222 // Generate fast case for calls of an argument function. 5240 // Generate fast case for calls of an argument function.
5223 Property* property = rewrite->AsProperty(); 5241 Property* property = rewrite->AsProperty();
5224 if (property != NULL) { 5242 if (property != NULL) {
5225 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); 5243 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
5226 Literal* key_literal = property->key()->AsLiteral(); 5244 Literal* key_literal = property->key()->AsLiteral();
5227 if (obj_proxy != NULL && 5245 if (obj_proxy != NULL &&
5228 key_literal != NULL && 5246 key_literal != NULL &&
(...skipping 456 matching lines...) Expand 10 before | Expand all | Expand 10 after
5685 5703
5686 // Load the literals array of the function. 5704 // Load the literals array of the function.
5687 __ mov(literals.reg(), 5705 __ mov(literals.reg(),
5688 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset)); 5706 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5689 5707
5690 frame_->Push(&literals); 5708 frame_->Push(&literals);
5691 frame_->Push(Smi::FromInt(node->literal_index())); 5709 frame_->Push(Smi::FromInt(node->literal_index()));
5692 frame_->Push(node->constant_elements()); 5710 frame_->Push(node->constant_elements());
5693 int length = node->values()->length(); 5711 int length = node->values()->length();
5694 Result clone; 5712 Result clone;
5695 if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) { 5713 Isolate* isolate = masm()->isolate();
5714 if (node->constant_elements()->map() ==
5715 isolate->heap()->fixed_cow_array_map()) {
5696 FastCloneShallowArrayStub stub( 5716 FastCloneShallowArrayStub stub(
5697 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); 5717 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
5698 clone = frame_->CallStub(&stub, 3); 5718 clone = frame_->CallStub(&stub, 3);
5699 Counters* counters = masm()->isolate()->counters(); 5719 Counters* counters = isolate->counters();
5700 __ IncrementCounter(counters->cow_arrays_created_stub(), 1); 5720 __ IncrementCounter(counters->cow_arrays_created_stub(), 1);
5701 } else if (node->depth() > 1) { 5721 } else if (node->depth() > 1) {
5702 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); 5722 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
5703 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 5723 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
5704 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 5724 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
5705 } else { 5725 } else {
5706 FastCloneShallowArrayStub stub( 5726 FastCloneShallowArrayStub stub(
5707 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); 5727 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
5708 clone = frame_->CallStub(&stub, 3); 5728 clone = frame_->CallStub(&stub, 3);
5709 } 5729 }
(...skipping 355 matching lines...) Expand 10 before | Expand all | Expand 10 after
6065 property.GetValue(); 6085 property.GetValue();
6066 } 6086 }
6067 6087
6068 6088
6069 void CodeGenerator::VisitCall(Call* node) { 6089 void CodeGenerator::VisitCall(Call* node) {
6070 ASSERT(!in_safe_int32_mode()); 6090 ASSERT(!in_safe_int32_mode());
6071 Comment cmnt(masm_, "[ Call"); 6091 Comment cmnt(masm_, "[ Call");
6072 6092
6073 Expression* function = node->expression(); 6093 Expression* function = node->expression();
6074 ZoneList<Expression*>* args = node->arguments(); 6094 ZoneList<Expression*>* args = node->arguments();
6095 Factory* factory = masm()->isolate()->factory();
6075 6096
6076 // Check if the function is a variable or a property. 6097 // Check if the function is a variable or a property.
6077 Variable* var = function->AsVariableProxy()->AsVariable(); 6098 Variable* var = function->AsVariableProxy()->AsVariable();
6078 Property* property = function->AsProperty(); 6099 Property* property = function->AsProperty();
6079 6100
6080 // ------------------------------------------------------------------------ 6101 // ------------------------------------------------------------------------
6081 // Fast-case: Use inline caching. 6102 // Fast-case: Use inline caching.
6082 // --- 6103 // ---
6083 // According to ECMA-262, section 11.2.3, page 44, the function to call 6104 // According to ECMA-262, section 11.2.3, page 44, the function to call
6084 // must be resolved after the arguments have been evaluated. The IC code 6105 // must be resolved after the arguments have been evaluated. The IC code
6085 // automatically handles this by loading the arguments before the function 6106 // automatically handles this by loading the arguments before the function
6086 // is resolved in cache misses (this also holds for megamorphic calls). 6107 // is resolved in cache misses (this also holds for megamorphic calls).
6087 // ------------------------------------------------------------------------ 6108 // ------------------------------------------------------------------------
6088 6109
6089 if (var != NULL && var->is_possibly_eval()) { 6110 if (var != NULL && var->is_possibly_eval()) {
6090 // ---------------------------------- 6111 // ----------------------------------
6091 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed 6112 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
6092 // ---------------------------------- 6113 // ----------------------------------
6093 6114
6094 // In a call to eval, we first call %ResolvePossiblyDirectEval to 6115 // In a call to eval, we first call %ResolvePossiblyDirectEval to
6095 // resolve the function we need to call and the receiver of the 6116 // resolve the function we need to call and the receiver of the
6096 // call. Then we call the resolved function using the given 6117 // call. Then we call the resolved function using the given
6097 // arguments. 6118 // arguments.
6098 6119
6099 // Prepare the stack for the call to the resolved function. 6120 // Prepare the stack for the call to the resolved function.
6100 Load(function); 6121 Load(function);
6101 6122
6102 // Allocate a frame slot for the receiver. 6123 // Allocate a frame slot for the receiver.
6103 frame_->Push(FACTORY->undefined_value()); 6124 frame_->Push(factory->undefined_value());
6104 6125
6105 // Load the arguments. 6126 // Load the arguments.
6106 int arg_count = args->length(); 6127 int arg_count = args->length();
6107 for (int i = 0; i < arg_count; i++) { 6128 for (int i = 0; i < arg_count; i++) {
6108 Load(args->at(i)); 6129 Load(args->at(i));
6109 frame_->SpillTop(); 6130 frame_->SpillTop();
6110 } 6131 }
6111 6132
6112 // Result to hold the result of the function resolution and the 6133 // Result to hold the result of the function resolution and the
6113 // final result of the eval call. 6134 // final result of the eval call.
(...skipping 11 matching lines...) Expand all
6125 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded 6146 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
6126 // function, the first argument to the eval call and the 6147 // function, the first argument to the eval call and the
6127 // receiver. 6148 // receiver.
6128 Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(), 6149 Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
6129 NOT_INSIDE_TYPEOF, 6150 NOT_INSIDE_TYPEOF,
6130 &slow); 6151 &slow);
6131 frame_->Push(&fun); 6152 frame_->Push(&fun);
6132 if (arg_count > 0) { 6153 if (arg_count > 0) {
6133 frame_->PushElementAt(arg_count); 6154 frame_->PushElementAt(arg_count);
6134 } else { 6155 } else {
6135 frame_->Push(FACTORY->undefined_value()); 6156 frame_->Push(factory->undefined_value());
6136 } 6157 }
6137 frame_->PushParameterAt(-1); 6158 frame_->PushParameterAt(-1);
6138 6159
6139 // Push the strict mode flag. 6160 // Push the strict mode flag.
6140 frame_->Push(Smi::FromInt(strict_mode_flag())); 6161 frame_->Push(Smi::FromInt(strict_mode_flag()));
6141 6162
6142 // Resolve the call. 6163 // Resolve the call.
6143 result = 6164 result =
6144 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 4); 6165 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 4);
6145 6166
6146 done.Jump(&result); 6167 done.Jump(&result);
6147 slow.Bind(); 6168 slow.Bind();
6148 } 6169 }
6149 6170
6150 // Prepare the stack for the call to ResolvePossiblyDirectEval by 6171 // Prepare the stack for the call to ResolvePossiblyDirectEval by
6151 // pushing the loaded function, the first argument to the eval 6172 // pushing the loaded function, the first argument to the eval
6152 // call and the receiver. 6173 // call and the receiver.
6153 frame_->PushElementAt(arg_count + 1); 6174 frame_->PushElementAt(arg_count + 1);
6154 if (arg_count > 0) { 6175 if (arg_count > 0) {
6155 frame_->PushElementAt(arg_count); 6176 frame_->PushElementAt(arg_count);
6156 } else { 6177 } else {
6157 frame_->Push(FACTORY->undefined_value()); 6178 frame_->Push(factory->undefined_value());
6158 } 6179 }
6159 frame_->PushParameterAt(-1); 6180 frame_->PushParameterAt(-1);
6160 6181
6161 // Push the strict mode flag. 6182 // Push the strict mode flag.
6162 frame_->Push(Smi::FromInt(strict_mode_flag())); 6183 frame_->Push(Smi::FromInt(strict_mode_flag()));
6163 6184
6164 // Resolve the call. 6185 // Resolve the call.
6165 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 4); 6186 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
6166 6187
6167 // If we generated fast-case code bind the jump-target where fast 6188 // If we generated fast-case code bind the jump-target where fast
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after
6478 &need_conversion_, 6499 &need_conversion_,
6479 &need_conversion_, 6500 &need_conversion_,
6480 &index_out_of_range_, 6501 &index_out_of_range_,
6481 STRING_INDEX_IS_NUMBER) {} 6502 STRING_INDEX_IS_NUMBER) {}
6482 6503
6483 StringCharCodeAtGenerator* fast_case_generator() { 6504 StringCharCodeAtGenerator* fast_case_generator() {
6484 return &char_code_at_generator_; 6505 return &char_code_at_generator_;
6485 } 6506 }
6486 6507
6487 virtual void Generate() { 6508 virtual void Generate() {
6509 Factory* factory = masm()->isolate()->factory();
6488 VirtualFrameRuntimeCallHelper call_helper(frame_state()); 6510 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6489 char_code_at_generator_.GenerateSlow(masm(), call_helper); 6511 char_code_at_generator_.GenerateSlow(masm(), call_helper);
6490 6512
6491 __ bind(&need_conversion_); 6513 __ bind(&need_conversion_);
6492 // Move the undefined value into the result register, which will 6514 // Move the undefined value into the result register, which will
6493 // trigger conversion. 6515 // trigger conversion.
6494 __ Set(result_, Immediate(FACTORY->undefined_value())); 6516 __ Set(result_, Immediate(factory->undefined_value()));
6495 __ jmp(exit_label()); 6517 __ jmp(exit_label());
6496 6518
6497 __ bind(&index_out_of_range_); 6519 __ bind(&index_out_of_range_);
6498 // When the index is out of range, the spec requires us to return 6520 // When the index is out of range, the spec requires us to return
6499 // NaN. 6521 // NaN.
6500 __ Set(result_, Immediate(FACTORY->nan_value())); 6522 __ Set(result_, Immediate(factory->nan_value()));
6501 __ jmp(exit_label()); 6523 __ jmp(exit_label());
6502 } 6524 }
6503 6525
6504 private: 6526 private:
6505 Register result_; 6527 Register result_;
6506 6528
6507 Label need_conversion_; 6529 Label need_conversion_;
6508 Label index_out_of_range_; 6530 Label index_out_of_range_;
6509 6531
6510 StringCharCodeAtGenerator char_code_at_generator_; 6532 StringCharCodeAtGenerator char_code_at_generator_;
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
6687 destination()->Split(equal); 6709 destination()->Split(equal);
6688 } 6710 }
6689 6711
6690 6712
6691 void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) { 6713 void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
6692 Label bailout, done, one_char_separator, long_separator, 6714 Label bailout, done, one_char_separator, long_separator,
6693 non_trivial_array, not_size_one_array, loop, loop_condition, 6715 non_trivial_array, not_size_one_array, loop, loop_condition,
6694 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry; 6716 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
6695 6717
6696 ASSERT(args->length() == 2); 6718 ASSERT(args->length() == 2);
6719 Factory* factory = masm()->isolate()->factory();
6697 // We will leave the separator on the stack until the end of the function. 6720 // We will leave the separator on the stack until the end of the function.
6698 Load(args->at(1)); 6721 Load(args->at(1));
6699 // Load this to eax (= array) 6722 // Load this to eax (= array)
6700 Load(args->at(0)); 6723 Load(args->at(0));
6701 Result array_result = frame_->Pop(); 6724 Result array_result = frame_->Pop();
6702 array_result.ToRegister(eax); 6725 array_result.ToRegister(eax);
6703 frame_->SpillAll(); 6726 frame_->SpillAll();
6704 6727
6705 // All aliases of the same register have disjoint lifetimes. 6728 // All aliases of the same register have disjoint lifetimes.
6706 Register array = eax; 6729 Register array = eax;
(...skipping 24 matching lines...) Expand all
6731 6754
6732 // Check that the array has fast elements. 6755 // Check that the array has fast elements.
6733 __ test_b(FieldOperand(scratch, Map::kBitField2Offset), 6756 __ test_b(FieldOperand(scratch, Map::kBitField2Offset),
6734 1 << Map::kHasFastElements); 6757 1 << Map::kHasFastElements);
6735 __ j(zero, &bailout); 6758 __ j(zero, &bailout);
6736 6759
6737 // If the array has length zero, return the empty string. 6760 // If the array has length zero, return the empty string.
6738 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset)); 6761 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
6739 __ sar(array_length, 1); 6762 __ sar(array_length, 1);
6740 __ j(not_zero, &non_trivial_array); 6763 __ j(not_zero, &non_trivial_array);
6741 __ mov(result_operand, FACTORY->empty_string()); 6764 __ mov(result_operand, factory->empty_string());
6742 __ jmp(&done); 6765 __ jmp(&done);
6743 6766
6744 // Save the array length. 6767 // Save the array length.
6745 __ bind(&non_trivial_array); 6768 __ bind(&non_trivial_array);
6746 __ mov(array_length_operand, array_length); 6769 __ mov(array_length_operand, array_length);
6747 6770
6748 // Save the FixedArray containing array's elements. 6771 // Save the FixedArray containing array's elements.
6749 // End of array's live range. 6772 // End of array's live range.
6750 elements = array; 6773 elements = array;
6751 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset)); 6774 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
6942 FieldOperand(string, SeqAsciiString::kHeaderSize)); 6965 FieldOperand(string, SeqAsciiString::kHeaderSize));
6943 __ CopyBytes(string, result_pos, string_length, scratch); 6966 __ CopyBytes(string, result_pos, string_length, scratch);
6944 __ add(Operand(index), Immediate(1)); 6967 __ add(Operand(index), Immediate(1));
6945 6968
6946 __ cmp(index, array_length_operand); 6969 __ cmp(index, array_length_operand);
6947 __ j(less, &loop_3); // End while (index < length). 6970 __ j(less, &loop_3); // End while (index < length).
6948 __ jmp(&done); 6971 __ jmp(&done);
6949 6972
6950 6973
6951 __ bind(&bailout); 6974 __ bind(&bailout);
6952 __ mov(result_operand, FACTORY->undefined_value()); 6975 __ mov(result_operand, factory->undefined_value());
6953 __ bind(&done); 6976 __ bind(&done);
6954 __ mov(eax, result_operand); 6977 __ mov(eax, result_operand);
6955 // Drop temp values from the stack, and restore context register. 6978 // Drop temp values from the stack, and restore context register.
6956 __ add(Operand(esp), Immediate(2 * kPointerSize)); 6979 __ add(Operand(esp), Immediate(2 * kPointerSize));
6957 6980
6958 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 6981 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
6959 frame_->Drop(1); 6982 frame_->Drop(1);
6960 frame_->Push(&array_result); 6983 frame_->Push(&array_result);
6961 } 6984 }
6962 6985
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
7043 DeferredIsStringWrapperSafeForDefaultValueOf(Register object, 7066 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
7044 Register map_result, 7067 Register map_result,
7045 Register scratch1, 7068 Register scratch1,
7046 Register scratch2) 7069 Register scratch2)
7047 : object_(object), 7070 : object_(object),
7048 map_result_(map_result), 7071 map_result_(map_result),
7049 scratch1_(scratch1), 7072 scratch1_(scratch1),
7050 scratch2_(scratch2) { } 7073 scratch2_(scratch2) { }
7051 7074
7052 virtual void Generate() { 7075 virtual void Generate() {
7076 Factory* factory = masm()->isolate()->factory();
7053 Label false_result; 7077 Label false_result;
7054 7078
7055 // Check that map is loaded as expected. 7079 // Check that map is loaded as expected.
7056 if (FLAG_debug_code) { 7080 if (FLAG_debug_code) {
7057 __ cmp(map_result_, FieldOperand(object_, HeapObject::kMapOffset)); 7081 __ cmp(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
7058 __ Assert(equal, "Map not in expected register"); 7082 __ Assert(equal, "Map not in expected register");
7059 } 7083 }
7060 7084
7061 // Check for fast case object. Generate false result for slow case object. 7085 // Check for fast case object. Generate false result for slow case object.
7062 __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset)); 7086 __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset));
7063 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset)); 7087 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
7064 __ cmp(scratch1_, FACTORY->hash_table_map()); 7088 __ cmp(scratch1_, factory->hash_table_map());
7065 __ j(equal, &false_result); 7089 __ j(equal, &false_result);
7066 7090
7067 // Look for valueOf symbol in the descriptor array, and indicate false if 7091 // Look for valueOf symbol in the descriptor array, and indicate false if
7068 // found. The type is not checked, so if it is a transition it is a false 7092 // found. The type is not checked, so if it is a transition it is a false
7069 // negative. 7093 // negative.
7070 __ mov(map_result_, 7094 __ mov(map_result_,
7071 FieldOperand(map_result_, Map::kInstanceDescriptorsOffset)); 7095 FieldOperand(map_result_, Map::kInstanceDescriptorsOffset));
7072 __ mov(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset)); 7096 __ mov(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset));
7073 // map_result_: descriptor array 7097 // map_result_: descriptor array
7074 // scratch1_: length of descriptor array 7098 // scratch1_: length of descriptor array
7075 // Calculate the end of the descriptor array. 7099 // Calculate the end of the descriptor array.
7076 STATIC_ASSERT(kSmiTag == 0); 7100 STATIC_ASSERT(kSmiTag == 0);
7077 STATIC_ASSERT(kSmiTagSize == 1); 7101 STATIC_ASSERT(kSmiTagSize == 1);
7078 STATIC_ASSERT(kPointerSize == 4); 7102 STATIC_ASSERT(kPointerSize == 4);
7079 __ lea(scratch1_, 7103 __ lea(scratch1_,
7080 Operand(map_result_, scratch1_, times_2, FixedArray::kHeaderSize)); 7104 Operand(map_result_, scratch1_, times_2, FixedArray::kHeaderSize));
7081 // Calculate location of the first key name. 7105 // Calculate location of the first key name.
7082 __ add(Operand(map_result_), 7106 __ add(Operand(map_result_),
7083 Immediate(FixedArray::kHeaderSize + 7107 Immediate(FixedArray::kHeaderSize +
7084 DescriptorArray::kFirstIndex * kPointerSize)); 7108 DescriptorArray::kFirstIndex * kPointerSize));
7085 // Loop through all the keys in the descriptor array. If one of these is the 7109 // Loop through all the keys in the descriptor array. If one of these is the
7086 // symbol valueOf the result is false. 7110 // symbol valueOf the result is false.
7087 Label entry, loop; 7111 Label entry, loop;
7088 __ jmp(&entry); 7112 __ jmp(&entry);
7089 __ bind(&loop); 7113 __ bind(&loop);
7090 __ mov(scratch2_, FieldOperand(map_result_, 0)); 7114 __ mov(scratch2_, FieldOperand(map_result_, 0));
7091 __ cmp(scratch2_, FACTORY->value_of_symbol()); 7115 __ cmp(scratch2_, factory->value_of_symbol());
7092 __ j(equal, &false_result); 7116 __ j(equal, &false_result);
7093 __ add(Operand(map_result_), Immediate(kPointerSize)); 7117 __ add(Operand(map_result_), Immediate(kPointerSize));
7094 __ bind(&entry); 7118 __ bind(&entry);
7095 __ cmp(map_result_, Operand(scratch1_)); 7119 __ cmp(map_result_, Operand(scratch1_));
7096 __ j(not_equal, &loop); 7120 __ j(not_equal, &loop);
7097 7121
7098 // Reload map as register map_result_ was used as temporary above. 7122 // Reload map as register map_result_ was used as temporary above.
7099 __ mov(map_result_, FieldOperand(object_, HeapObject::kMapOffset)); 7123 __ mov(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
7100 7124
7101 // If a valueOf property is not found on the object check that it's 7125 // If a valueOf property is not found on the object check that it's
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
7255 7279
7256 __ bind(&exit); 7280 __ bind(&exit);
7257 result.set_type_info(TypeInfo::Smi()); 7281 result.set_type_info(TypeInfo::Smi());
7258 if (FLAG_debug_code) __ AbortIfNotSmi(result.reg()); 7282 if (FLAG_debug_code) __ AbortIfNotSmi(result.reg());
7259 frame_->Push(&result); 7283 frame_->Push(&result);
7260 } 7284 }
7261 7285
7262 7286
7263 void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) { 7287 void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
7264 ASSERT(args->length() == 1); 7288 ASSERT(args->length() == 1);
7289 Factory* factory = masm()->isolate()->factory();
7265 JumpTarget leave, null, function, non_function_constructor; 7290 JumpTarget leave, null, function, non_function_constructor;
7266 Load(args->at(0)); // Load the object. 7291 Load(args->at(0)); // Load the object.
7267 Result obj = frame_->Pop(); 7292 Result obj = frame_->Pop();
7268 obj.ToRegister(); 7293 obj.ToRegister();
7269 frame_->Spill(obj.reg()); 7294 frame_->Spill(obj.reg());
7270 7295
7271 // If the object is a smi, we return null. 7296 // If the object is a smi, we return null.
7272 __ test(obj.reg(), Immediate(kSmiTagMask)); 7297 __ test(obj.reg(), Immediate(kSmiTagMask));
7273 null.Branch(zero); 7298 null.Branch(zero);
7274 7299
(...skipping 21 matching lines...) Expand all
7296 // instance class name from there. 7321 // instance class name from there.
7297 __ mov(obj.reg(), 7322 __ mov(obj.reg(),
7298 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset)); 7323 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
7299 __ mov(obj.reg(), 7324 __ mov(obj.reg(),
7300 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset)); 7325 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
7301 frame_->Push(&obj); 7326 frame_->Push(&obj);
7302 leave.Jump(); 7327 leave.Jump();
7303 7328
7304 // Functions have class 'Function'. 7329 // Functions have class 'Function'.
7305 function.Bind(); 7330 function.Bind();
7306 frame_->Push(FACTORY->function_class_symbol()); 7331 frame_->Push(factory->function_class_symbol());
7307 leave.Jump(); 7332 leave.Jump();
7308 7333
7309 // Objects with a non-function constructor have class 'Object'. 7334 // Objects with a non-function constructor have class 'Object'.
7310 non_function_constructor.Bind(); 7335 non_function_constructor.Bind();
7311 frame_->Push(FACTORY->Object_symbol()); 7336 frame_->Push(factory->Object_symbol());
7312 leave.Jump(); 7337 leave.Jump();
7313 7338
7314 // Non-JS objects have class null. 7339 // Non-JS objects have class null.
7315 null.Bind(); 7340 null.Bind();
7316 frame_->Push(FACTORY->null_value()); 7341 frame_->Push(factory->null_value());
7317 7342
7318 // All done. 7343 // All done.
7319 leave.Bind(); 7344 leave.Bind();
7320 } 7345 }
7321 7346
7322 7347
7323 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { 7348 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
7324 ASSERT(args->length() == 1); 7349 ASSERT(args->length() == 1);
7325 JumpTarget leave; 7350 JumpTarget leave;
7326 Load(args->at(0)); // Load the object. 7351 Load(args->at(0)); // Load the object.
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
7428 __ mov(ebp_as_smi.reg(), Operand(ebp)); 7453 __ mov(ebp_as_smi.reg(), Operand(ebp));
7429 frame_->Push(&ebp_as_smi); 7454 frame_->Push(&ebp_as_smi);
7430 } 7455 }
7431 7456
7432 7457
7433 void CodeGenerator::GenerateRandomHeapNumber( 7458 void CodeGenerator::GenerateRandomHeapNumber(
7434 ZoneList<Expression*>* args) { 7459 ZoneList<Expression*>* args) {
7435 ASSERT(args->length() == 0); 7460 ASSERT(args->length() == 0);
7436 frame_->SpillAll(); 7461 frame_->SpillAll();
7437 7462
7463 Isolate* isolate = masm()->isolate();
7464
7438 Label slow_allocate_heapnumber; 7465 Label slow_allocate_heapnumber;
7439 Label heapnumber_allocated; 7466 Label heapnumber_allocated;
7440 7467
7441 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber); 7468 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
7442 __ jmp(&heapnumber_allocated); 7469 __ jmp(&heapnumber_allocated);
7443 7470
7444 __ bind(&slow_allocate_heapnumber); 7471 __ bind(&slow_allocate_heapnumber);
7445 // Allocate a heap number. 7472 // Allocate a heap number.
7446 __ CallRuntime(Runtime::kNumberAlloc, 0); 7473 __ CallRuntime(Runtime::kNumberAlloc, 0);
7447 __ mov(edi, eax); 7474 __ mov(edi, eax);
7448 7475
7449 __ bind(&heapnumber_allocated); 7476 __ bind(&heapnumber_allocated);
7450 7477
7451 __ PrepareCallCFunction(0, ebx); 7478 __ PrepareCallCFunction(0, ebx);
7452 __ CallCFunction(ExternalReference::random_uint32_function(masm()->isolate()), 7479 __ CallCFunction(ExternalReference::random_uint32_function(isolate),
7453 0); 7480 0);
7454 7481
7455 // Convert 32 random bits in eax to 0.(32 random bits) in a double 7482 // Convert 32 random bits in eax to 0.(32 random bits) in a double
7456 // by computing: 7483 // by computing:
7457 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). 7484 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
7458 // This is implemented on both SSE2 and FPU. 7485 // This is implemented on both SSE2 and FPU.
7459 if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) { 7486 if (isolate->cpu_features()->IsSupported(SSE2)) {
7460 CpuFeatures::Scope fscope(SSE2); 7487 CpuFeatures::Scope fscope(SSE2);
7461 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single. 7488 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
7462 __ movd(xmm1, Operand(ebx)); 7489 __ movd(xmm1, Operand(ebx));
7463 __ movd(xmm0, Operand(eax)); 7490 __ movd(xmm0, Operand(eax));
7464 __ cvtss2sd(xmm1, xmm1); 7491 __ cvtss2sd(xmm1, xmm1);
7465 __ pxor(xmm0, xmm1); 7492 __ pxor(xmm0, xmm1);
7466 __ subsd(xmm0, xmm1); 7493 __ subsd(xmm0, xmm1);
7467 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0); 7494 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
7468 } else { 7495 } else {
7469 // 0x4130000000000000 is 1.0 x 2^20 as a double. 7496 // 0x4130000000000000 is 1.0 x 2^20 as a double.
(...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after
7747 __ CallRuntime(Runtime::kSwapElements, 3); 7774 __ CallRuntime(Runtime::kSwapElements, 3);
7748 } 7775 }
7749 7776
7750 7777
7751 void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) { 7778 void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
7752 // Note: this code assumes that indices are passed are within 7779 // Note: this code assumes that indices are passed are within
7753 // elements' bounds and refer to valid (not holes) values. 7780 // elements' bounds and refer to valid (not holes) values.
7754 Comment cmnt(masm_, "[ GenerateSwapElements"); 7781 Comment cmnt(masm_, "[ GenerateSwapElements");
7755 7782
7756 ASSERT_EQ(3, args->length()); 7783 ASSERT_EQ(3, args->length());
7784 Factory* factory = masm()->isolate()->factory();
7757 7785
7758 Load(args->at(0)); 7786 Load(args->at(0));
7759 Load(args->at(1)); 7787 Load(args->at(1));
7760 Load(args->at(2)); 7788 Load(args->at(2));
7761 7789
7762 Result index2 = frame_->Pop(); 7790 Result index2 = frame_->Pop();
7763 index2.ToRegister(); 7791 index2.ToRegister();
7764 7792
7765 Result index1 = frame_->Pop(); 7793 Result index1 = frame_->Pop();
7766 index1.ToRegister(); 7794 index1.ToRegister();
(...skipping 19 matching lines...) Expand all
7786 // has no indexed interceptor. 7814 // has no indexed interceptor.
7787 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg()); 7815 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
7788 deferred->Branch(below); 7816 deferred->Branch(below);
7789 __ test_b(FieldOperand(tmp1.reg(), Map::kBitFieldOffset), 7817 __ test_b(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
7790 KeyedLoadIC::kSlowCaseBitFieldMask); 7818 KeyedLoadIC::kSlowCaseBitFieldMask);
7791 deferred->Branch(not_zero); 7819 deferred->Branch(not_zero);
7792 7820
7793 // Check the object's elements are in fast case and writable. 7821 // Check the object's elements are in fast case and writable.
7794 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset)); 7822 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
7795 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset), 7823 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
7796 Immediate(FACTORY->fixed_array_map())); 7824 Immediate(factory->fixed_array_map()));
7797 deferred->Branch(not_equal); 7825 deferred->Branch(not_equal);
7798 7826
7799 // Smi-tagging is equivalent to multiplying by 2. 7827 // Smi-tagging is equivalent to multiplying by 2.
7800 STATIC_ASSERT(kSmiTag == 0); 7828 STATIC_ASSERT(kSmiTag == 0);
7801 STATIC_ASSERT(kSmiTagSize == 1); 7829 STATIC_ASSERT(kSmiTagSize == 1);
7802 7830
7803 // Check that both indices are smis. 7831 // Check that both indices are smis.
7804 __ mov(tmp2.reg(), index1.reg()); 7832 __ mov(tmp2.reg(), index1.reg());
7805 __ or_(tmp2.reg(), Operand(index2.reg())); 7833 __ or_(tmp2.reg(), Operand(index2.reg()));
7806 __ test(tmp2.reg(), Immediate(kSmiTagMask)); 7834 __ test(tmp2.reg(), Immediate(kSmiTagMask));
(...skipping 20 matching lines...) Expand all
7827 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done); 7855 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
7828 // Possible optimization: do a check that both values are Smis 7856 // Possible optimization: do a check that both values are Smis
7829 // (or them and test against Smi mask.) 7857 // (or them and test against Smi mask.)
7830 7858
7831 __ mov(tmp2.reg(), tmp1.reg()); 7859 __ mov(tmp2.reg(), tmp1.reg());
7832 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg()); 7860 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg());
7833 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg()); 7861 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg());
7834 __ bind(&done); 7862 __ bind(&done);
7835 7863
7836 deferred->BindExit(); 7864 deferred->BindExit();
7837 frame_->Push(FACTORY->undefined_value()); 7865 frame_->Push(factory->undefined_value());
7838 } 7866 }
7839 7867
7840 7868
7841 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) { 7869 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
7842 Comment cmnt(masm_, "[ GenerateCallFunction"); 7870 Comment cmnt(masm_, "[ GenerateCallFunction");
7843 7871
7844 ASSERT(args->length() >= 2); 7872 ASSERT(args->length() >= 2);
7845 7873
7846 int n_args = args->length() - 2; // for receiver and function. 7874 int n_args = args->length() - 2; // for receiver and function.
7847 Load(args->at(0)); // receiver 7875 Load(args->at(0)); // receiver
7848 for (int i = 0; i < n_args; i++) { 7876 for (int i = 0; i < n_args; i++) {
7849 Load(args->at(i + 1)); 7877 Load(args->at(i + 1));
7850 } 7878 }
7851 Load(args->at(n_args + 1)); // function 7879 Load(args->at(n_args + 1)); // function
7852 Result result = frame_->CallJSFunction(n_args); 7880 Result result = frame_->CallJSFunction(n_args);
7853 frame_->Push(&result); 7881 frame_->Push(&result);
7854 } 7882 }
7855 7883
7856 7884
7857 // Generates the Math.pow method. Only handles special cases and 7885 // Generates the Math.pow method. Only handles special cases and
7858 // branches to the runtime system for everything else. Please note 7886 // branches to the runtime system for everything else. Please note
7859 // that this function assumes that the callsite has executed ToNumber 7887 // that this function assumes that the callsite has executed ToNumber
7860 // on both arguments. 7888 // on both arguments.
7861 void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) { 7889 void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
7862 ASSERT(args->length() == 2); 7890 ASSERT(args->length() == 2);
7891 Isolate* isolate = masm()->isolate();
7863 Load(args->at(0)); 7892 Load(args->at(0));
7864 Load(args->at(1)); 7893 Load(args->at(1));
7865 if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) { 7894 if (!isolate->cpu_features()->IsSupported(SSE2)) {
7866 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2); 7895 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
7867 frame_->Push(&res); 7896 frame_->Push(&res);
7868 } else { 7897 } else {
7898 Factory* factory = isolate->factory();
7869 CpuFeatures::Scope use_sse2(SSE2); 7899 CpuFeatures::Scope use_sse2(SSE2);
7870 Label allocate_return; 7900 Label allocate_return;
7871 // Load the two operands while leaving the values on the frame. 7901 // Load the two operands while leaving the values on the frame.
7872 frame()->Dup(); 7902 frame()->Dup();
7873 Result exponent = frame()->Pop(); 7903 Result exponent = frame()->Pop();
7874 exponent.ToRegister(); 7904 exponent.ToRegister();
7875 frame()->Spill(exponent.reg()); 7905 frame()->Spill(exponent.reg());
7876 frame()->PushElementAt(1); 7906 frame()->PushElementAt(1);
7877 Result base = frame()->Pop(); 7907 Result base = frame()->Pop();
7878 base.ToRegister(); 7908 base.ToRegister();
(...skipping 17 matching lines...) Expand all
7896 __ j(not_zero, &base_nonsmi); 7926 __ j(not_zero, &base_nonsmi);
7897 7927
7898 // Optimized version when y is an integer. 7928 // Optimized version when y is an integer.
7899 Label powi; 7929 Label powi;
7900 __ SmiUntag(base.reg()); 7930 __ SmiUntag(base.reg());
7901 __ cvtsi2sd(xmm0, Operand(base.reg())); 7931 __ cvtsi2sd(xmm0, Operand(base.reg()));
7902 __ jmp(&powi); 7932 __ jmp(&powi);
7903 // exponent is smi and base is a heapnumber. 7933 // exponent is smi and base is a heapnumber.
7904 __ bind(&base_nonsmi); 7934 __ bind(&base_nonsmi);
7905 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset), 7935 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7906 FACTORY->heap_number_map()); 7936 factory->heap_number_map());
7907 call_runtime.Branch(not_equal); 7937 call_runtime.Branch(not_equal);
7908 7938
7909 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset)); 7939 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7910 7940
7911 // Optimized version of pow if y is an integer. 7941 // Optimized version of pow if y is an integer.
7912 __ bind(&powi); 7942 __ bind(&powi);
7913 __ SmiUntag(exponent.reg()); 7943 __ SmiUntag(exponent.reg());
7914 7944
7915 // Save exponent in base as we need to check if exponent is negative later. 7945 // Save exponent in base as we need to check if exponent is negative later.
7916 // We know that base and exponent are in different registers. 7946 // We know that base and exponent are in different registers.
(...skipping 30 matching lines...) Expand all
7947 __ ucomisd(xmm0, xmm1); 7977 __ ucomisd(xmm0, xmm1);
7948 call_runtime.Branch(equal); 7978 call_runtime.Branch(equal);
7949 __ divsd(xmm3, xmm1); 7979 __ divsd(xmm3, xmm1);
7950 __ movsd(xmm1, xmm3); 7980 __ movsd(xmm1, xmm3);
7951 __ jmp(&allocate_return); 7981 __ jmp(&allocate_return);
7952 7982
7953 // exponent (or both) is a heapnumber - no matter what we should now work 7983 // exponent (or both) is a heapnumber - no matter what we should now work
7954 // on doubles. 7984 // on doubles.
7955 __ bind(&exponent_nonsmi); 7985 __ bind(&exponent_nonsmi);
7956 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset), 7986 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
7957 FACTORY->heap_number_map()); 7987 factory->heap_number_map());
7958 call_runtime.Branch(not_equal); 7988 call_runtime.Branch(not_equal);
7959 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset)); 7989 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
7960 // Test if exponent is nan. 7990 // Test if exponent is nan.
7961 __ ucomisd(xmm1, xmm1); 7991 __ ucomisd(xmm1, xmm1);
7962 call_runtime.Branch(parity_even); 7992 call_runtime.Branch(parity_even);
7963 7993
7964 Label base_not_smi; 7994 Label base_not_smi;
7965 Label handle_special_cases; 7995 Label handle_special_cases;
7966 __ test(base.reg(), Immediate(kSmiTagMask)); 7996 __ test(base.reg(), Immediate(kSmiTagMask));
7967 __ j(not_zero, &base_not_smi); 7997 __ j(not_zero, &base_not_smi);
7968 __ SmiUntag(base.reg()); 7998 __ SmiUntag(base.reg());
7969 __ cvtsi2sd(xmm0, Operand(base.reg())); 7999 __ cvtsi2sd(xmm0, Operand(base.reg()));
7970 __ jmp(&handle_special_cases); 8000 __ jmp(&handle_special_cases);
7971 __ bind(&base_not_smi); 8001 __ bind(&base_not_smi);
7972 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset), 8002 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7973 FACTORY->heap_number_map()); 8003 factory->heap_number_map());
7974 call_runtime.Branch(not_equal); 8004 call_runtime.Branch(not_equal);
7975 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset)); 8005 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
7976 __ and_(answer.reg(), HeapNumber::kExponentMask); 8006 __ and_(answer.reg(), HeapNumber::kExponentMask);
7977 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask)); 8007 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask));
7978 // base is NaN or +/-Infinity 8008 // base is NaN or +/-Infinity
7979 call_runtime.Branch(greater_equal); 8009 call_runtime.Branch(greater_equal);
7980 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset)); 8010 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7981 8011
7982 // base is in xmm0 and exponent is in xmm1. 8012 // base is in xmm0 and exponent is in xmm1.
7983 __ bind(&handle_special_cases); 8013 __ bind(&handle_special_cases);
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
8071 Result result = frame_->CallStub(&stub, 1); 8101 Result result = frame_->CallStub(&stub, 1);
8072 frame_->Push(&result); 8102 frame_->Push(&result);
8073 } 8103 }
8074 8104
8075 8105
8076 // Generates the Math.sqrt method. Please note - this function assumes that 8106 // Generates the Math.sqrt method. Please note - this function assumes that
8077 // the callsite has executed ToNumber on the argument. 8107 // the callsite has executed ToNumber on the argument.
8078 void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) { 8108 void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
8079 ASSERT_EQ(args->length(), 1); 8109 ASSERT_EQ(args->length(), 1);
8080 Load(args->at(0)); 8110 Load(args->at(0));
8111 Isolate* isolate = masm()->isolate();
8081 8112
8082 if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) { 8113 if (!isolate->cpu_features()->IsSupported(SSE2)) {
8083 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1); 8114 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
8084 frame()->Push(&result); 8115 frame()->Push(&result);
8085 } else { 8116 } else {
8086 CpuFeatures::Scope use_sse2(SSE2); 8117 CpuFeatures::Scope use_sse2(SSE2);
8087 // Leave original value on the frame if we need to call runtime. 8118 // Leave original value on the frame if we need to call runtime.
8088 frame()->Dup(); 8119 frame()->Dup();
8089 Result result = frame()->Pop(); 8120 Result result = frame()->Pop();
8090 result.ToRegister(); 8121 result.ToRegister();
8091 frame()->Spill(result.reg()); 8122 frame()->Spill(result.reg());
8092 Label runtime; 8123 Label runtime;
8093 Label non_smi; 8124 Label non_smi;
8094 Label load_done; 8125 Label load_done;
8095 JumpTarget end; 8126 JumpTarget end;
8096 8127
8097 __ test(result.reg(), Immediate(kSmiTagMask)); 8128 __ test(result.reg(), Immediate(kSmiTagMask));
8098 __ j(not_zero, &non_smi); 8129 __ j(not_zero, &non_smi);
8099 __ SmiUntag(result.reg()); 8130 __ SmiUntag(result.reg());
8100 __ cvtsi2sd(xmm0, Operand(result.reg())); 8131 __ cvtsi2sd(xmm0, Operand(result.reg()));
8101 __ jmp(&load_done); 8132 __ jmp(&load_done);
8102 __ bind(&non_smi); 8133 __ bind(&non_smi);
8103 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset), 8134 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
8104 FACTORY->heap_number_map()); 8135 isolate->factory()->heap_number_map());
8105 __ j(not_equal, &runtime); 8136 __ j(not_equal, &runtime);
8106 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset)); 8137 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
8107 8138
8108 __ bind(&load_done); 8139 __ bind(&load_done);
8109 __ sqrtsd(xmm0, xmm0); 8140 __ sqrtsd(xmm0, xmm0);
8110 // A copy of the virtual frame to allow us to go to runtime after the 8141 // A copy of the virtual frame to allow us to go to runtime after the
8111 // JumpTarget jump. 8142 // JumpTarget jump.
8112 Result scratch = allocator()->Allocate(); 8143 Result scratch = allocator()->Allocate();
8113 VirtualFrame* clone = new VirtualFrame(frame()); 8144 VirtualFrame* clone = new VirtualFrame(frame());
8114 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime); 8145 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime);
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
8236 // Call the C runtime function. 8267 // Call the C runtime function.
8237 Result answer = frame_->CallRuntime(function, arg_count); 8268 Result answer = frame_->CallRuntime(function, arg_count);
8238 frame_->Push(&answer); 8269 frame_->Push(&answer);
8239 } 8270 }
8240 } 8271 }
8241 8272
8242 8273
8243 void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { 8274 void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
8244 Comment cmnt(masm_, "[ UnaryOperation"); 8275 Comment cmnt(masm_, "[ UnaryOperation");
8245 8276
8277 Factory* factory = masm()->isolate()->factory();
8246 Token::Value op = node->op(); 8278 Token::Value op = node->op();
8247 8279
8248 if (op == Token::NOT) { 8280 if (op == Token::NOT) {
8249 // Swap the true and false targets but keep the same actual label 8281 // Swap the true and false targets but keep the same actual label
8250 // as the fall through. 8282 // as the fall through.
8251 destination()->Invert(); 8283 destination()->Invert();
8252 LoadCondition(node->expression(), destination(), true); 8284 LoadCondition(node->expression(), destination(), true);
8253 // Swap the labels back. 8285 // Swap the labels back.
8254 destination()->Invert(); 8286 destination()->Invert();
8255 8287
(...skipping 27 matching lines...) Expand all
8283 // variable. Sync the virtual frame eagerly so we can push the 8315 // variable. Sync the virtual frame eagerly so we can push the
8284 // arguments directly into place. 8316 // arguments directly into place.
8285 frame_->SyncRange(0, frame_->element_count() - 1); 8317 frame_->SyncRange(0, frame_->element_count() - 1);
8286 frame_->EmitPush(esi); 8318 frame_->EmitPush(esi);
8287 frame_->EmitPush(Immediate(variable->name())); 8319 frame_->EmitPush(Immediate(variable->name()));
8288 Result answer = frame_->CallRuntime(Runtime::kDeleteContextSlot, 2); 8320 Result answer = frame_->CallRuntime(Runtime::kDeleteContextSlot, 2);
8289 frame_->Push(&answer); 8321 frame_->Push(&answer);
8290 } else { 8322 } else {
8291 // Default: Result of deleting non-global, not dynamically 8323 // Default: Result of deleting non-global, not dynamically
8292 // introduced variables is false. 8324 // introduced variables is false.
8293 frame_->Push(FACTORY->false_value()); 8325 frame_->Push(factory->false_value());
8294 } 8326 }
8295 } else { 8327 } else {
8296 // Default: Result of deleting expressions is true. 8328 // Default: Result of deleting expressions is true.
8297 Load(node->expression()); // may have side-effects 8329 Load(node->expression()); // may have side-effects
8298 frame_->SetElementAt(0, FACTORY->true_value()); 8330 frame_->SetElementAt(0, factory->true_value());
8299 } 8331 }
8300 8332
8301 } else if (op == Token::TYPEOF) { 8333 } else if (op == Token::TYPEOF) {
8302 // Special case for loading the typeof expression; see comment on 8334 // Special case for loading the typeof expression; see comment on
8303 // LoadTypeofExpression(). 8335 // LoadTypeofExpression().
8304 LoadTypeofExpression(node->expression()); 8336 LoadTypeofExpression(node->expression());
8305 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1); 8337 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
8306 frame_->Push(&answer); 8338 frame_->Push(&answer);
8307 8339
8308 } else if (op == Token::VOID) { 8340 } else if (op == Token::VOID) {
8309 Expression* expression = node->expression(); 8341 Expression* expression = node->expression();
8310 if (expression && expression->AsLiteral() && ( 8342 if (expression && expression->AsLiteral() && (
8311 expression->AsLiteral()->IsTrue() || 8343 expression->AsLiteral()->IsTrue() ||
8312 expression->AsLiteral()->IsFalse() || 8344 expression->AsLiteral()->IsFalse() ||
8313 expression->AsLiteral()->handle()->IsNumber() || 8345 expression->AsLiteral()->handle()->IsNumber() ||
8314 expression->AsLiteral()->handle()->IsString() || 8346 expression->AsLiteral()->handle()->IsString() ||
8315 expression->AsLiteral()->handle()->IsJSRegExp() || 8347 expression->AsLiteral()->handle()->IsJSRegExp() ||
8316 expression->AsLiteral()->IsNull())) { 8348 expression->AsLiteral()->IsNull())) {
8317 // Omit evaluating the value of the primitive literal. 8349 // Omit evaluating the value of the primitive literal.
8318 // It will be discarded anyway, and can have no side effect. 8350 // It will be discarded anyway, and can have no side effect.
8319 frame_->Push(FACTORY->undefined_value()); 8351 frame_->Push(factory->undefined_value());
8320 } else { 8352 } else {
8321 Load(node->expression()); 8353 Load(node->expression());
8322 frame_->SetElementAt(0, FACTORY->undefined_value()); 8354 frame_->SetElementAt(0, factory->undefined_value());
8323 } 8355 }
8324 8356
8325 } else { 8357 } else {
8326 if (in_safe_int32_mode()) { 8358 if (in_safe_int32_mode()) {
8327 Visit(node->expression()); 8359 Visit(node->expression());
8328 Result value = frame_->Pop(); 8360 Result value = frame_->Pop();
8329 ASSERT(value.is_untagged_int32()); 8361 ASSERT(value.is_untagged_int32());
8330 // Registers containing an int32 value are not multiply used. 8362 // Registers containing an int32 value are not multiply used.
8331 ASSERT(!value.is_register() || !frame_->is_used(value.reg())); 8363 ASSERT(!value.is_register() || !frame_->is_used(value.reg()));
8332 value.ToRegister(); 8364 value.ToRegister();
(...skipping 770 matching lines...) Expand 10 before | Expand all | Expand 10 after
9103 bool left_already_loaded = false; 9135 bool left_already_loaded = false;
9104 9136
9105 // Get the expressions from the node. 9137 // Get the expressions from the node.
9106 Expression* left = node->left(); 9138 Expression* left = node->left();
9107 Expression* right = node->right(); 9139 Expression* right = node->right();
9108 Token::Value op = node->op(); 9140 Token::Value op = node->op();
9109 // To make typeof testing for natives implemented in JavaScript really 9141 // To make typeof testing for natives implemented in JavaScript really
9110 // efficient, we generate special code for expressions of the form: 9142 // efficient, we generate special code for expressions of the form:
9111 // 'typeof <expression> == <string>'. 9143 // 'typeof <expression> == <string>'.
9112 UnaryOperation* operation = left->AsUnaryOperation(); 9144 UnaryOperation* operation = left->AsUnaryOperation();
9145 Isolate* isolate = masm()->isolate();
9146 Factory* factory = isolate->factory();
9147 Heap* heap = isolate->heap();
9113 if ((op == Token::EQ || op == Token::EQ_STRICT) && 9148 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
9114 (operation != NULL && operation->op() == Token::TYPEOF) && 9149 (operation != NULL && operation->op() == Token::TYPEOF) &&
9115 (right->AsLiteral() != NULL && 9150 (right->AsLiteral() != NULL &&
9116 right->AsLiteral()->handle()->IsString())) { 9151 right->AsLiteral()->handle()->IsString())) {
9117 Handle<String> check(String::cast(*right->AsLiteral()->handle())); 9152 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
9118 9153
9119 // Load the operand and move it to a register. 9154 // Load the operand and move it to a register.
9120 LoadTypeofExpression(operation->expression()); 9155 LoadTypeofExpression(operation->expression());
9121 Result answer = frame_->Pop(); 9156 Result answer = frame_->Pop();
9122 answer.ToRegister(); 9157 answer.ToRegister();
9123 9158
9124 if (check->Equals(HEAP->number_symbol())) { 9159 if (check->Equals(heap->number_symbol())) {
9125 __ test(answer.reg(), Immediate(kSmiTagMask)); 9160 __ test(answer.reg(), Immediate(kSmiTagMask));
9126 destination()->true_target()->Branch(zero); 9161 destination()->true_target()->Branch(zero);
9127 frame_->Spill(answer.reg()); 9162 frame_->Spill(answer.reg());
9128 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); 9163 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
9129 __ cmp(answer.reg(), FACTORY->heap_number_map()); 9164 __ cmp(answer.reg(), factory->heap_number_map());
9130 answer.Unuse(); 9165 answer.Unuse();
9131 destination()->Split(equal); 9166 destination()->Split(equal);
9132 9167
9133 } else if (check->Equals(HEAP->string_symbol())) { 9168 } else if (check->Equals(heap->string_symbol())) {
9134 __ test(answer.reg(), Immediate(kSmiTagMask)); 9169 __ test(answer.reg(), Immediate(kSmiTagMask));
9135 destination()->false_target()->Branch(zero); 9170 destination()->false_target()->Branch(zero);
9136 9171
9137 // It can be an undetectable string object. 9172 // It can be an undetectable string object.
9138 Result temp = allocator()->Allocate(); 9173 Result temp = allocator()->Allocate();
9139 ASSERT(temp.is_valid()); 9174 ASSERT(temp.is_valid());
9140 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); 9175 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
9141 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset), 9176 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
9142 1 << Map::kIsUndetectable); 9177 1 << Map::kIsUndetectable);
9143 destination()->false_target()->Branch(not_zero); 9178 destination()->false_target()->Branch(not_zero);
9144 __ CmpInstanceType(temp.reg(), FIRST_NONSTRING_TYPE); 9179 __ CmpInstanceType(temp.reg(), FIRST_NONSTRING_TYPE);
9145 temp.Unuse(); 9180 temp.Unuse();
9146 answer.Unuse(); 9181 answer.Unuse();
9147 destination()->Split(below); 9182 destination()->Split(below);
9148 9183
9149 } else if (check->Equals(HEAP->boolean_symbol())) { 9184 } else if (check->Equals(heap->boolean_symbol())) {
9150 __ cmp(answer.reg(), FACTORY->true_value()); 9185 __ cmp(answer.reg(), factory->true_value());
9151 destination()->true_target()->Branch(equal); 9186 destination()->true_target()->Branch(equal);
9152 __ cmp(answer.reg(), FACTORY->false_value()); 9187 __ cmp(answer.reg(), factory->false_value());
9153 answer.Unuse(); 9188 answer.Unuse();
9154 destination()->Split(equal); 9189 destination()->Split(equal);
9155 9190
9156 } else if (check->Equals(HEAP->undefined_symbol())) { 9191 } else if (check->Equals(heap->undefined_symbol())) {
9157 __ cmp(answer.reg(), FACTORY->undefined_value()); 9192 __ cmp(answer.reg(), factory->undefined_value());
9158 destination()->true_target()->Branch(equal); 9193 destination()->true_target()->Branch(equal);
9159 9194
9160 __ test(answer.reg(), Immediate(kSmiTagMask)); 9195 __ test(answer.reg(), Immediate(kSmiTagMask));
9161 destination()->false_target()->Branch(zero); 9196 destination()->false_target()->Branch(zero);
9162 9197
9163 // It can be an undetectable object. 9198 // It can be an undetectable object.
9164 frame_->Spill(answer.reg()); 9199 frame_->Spill(answer.reg());
9165 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); 9200 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
9166 __ test_b(FieldOperand(answer.reg(), Map::kBitFieldOffset), 9201 __ test_b(FieldOperand(answer.reg(), Map::kBitFieldOffset),
9167 1 << Map::kIsUndetectable); 9202 1 << Map::kIsUndetectable);
9168 answer.Unuse(); 9203 answer.Unuse();
9169 destination()->Split(not_zero); 9204 destination()->Split(not_zero);
9170 9205
9171 } else if (check->Equals(HEAP->function_symbol())) { 9206 } else if (check->Equals(heap->function_symbol())) {
9172 __ test(answer.reg(), Immediate(kSmiTagMask)); 9207 __ test(answer.reg(), Immediate(kSmiTagMask));
9173 destination()->false_target()->Branch(zero); 9208 destination()->false_target()->Branch(zero);
9174 frame_->Spill(answer.reg()); 9209 frame_->Spill(answer.reg());
9175 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg()); 9210 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
9176 destination()->true_target()->Branch(equal); 9211 destination()->true_target()->Branch(equal);
9177 // Regular expressions are callable so typeof == 'function'. 9212 // Regular expressions are callable so typeof == 'function'.
9178 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE); 9213 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
9179 answer.Unuse(); 9214 answer.Unuse();
9180 destination()->Split(equal); 9215 destination()->Split(equal);
9181 } else if (check->Equals(HEAP->object_symbol())) { 9216 } else if (check->Equals(heap->object_symbol())) {
9182 __ test(answer.reg(), Immediate(kSmiTagMask)); 9217 __ test(answer.reg(), Immediate(kSmiTagMask));
9183 destination()->false_target()->Branch(zero); 9218 destination()->false_target()->Branch(zero);
9184 __ cmp(answer.reg(), FACTORY->null_value()); 9219 __ cmp(answer.reg(), factory->null_value());
9185 destination()->true_target()->Branch(equal); 9220 destination()->true_target()->Branch(equal);
9186 9221
9187 Result map = allocator()->Allocate(); 9222 Result map = allocator()->Allocate();
9188 ASSERT(map.is_valid()); 9223 ASSERT(map.is_valid());
9189 // Regular expressions are typeof == 'function', not 'object'. 9224 // Regular expressions are typeof == 'function', not 'object'.
9190 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg()); 9225 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg());
9191 destination()->false_target()->Branch(equal); 9226 destination()->false_target()->Branch(equal);
9192 9227
9193 // It can be an undetectable object. 9228 // It can be an undetectable object.
9194 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset), 9229 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
(...skipping 22 matching lines...) Expand all
9217 if (check->value() == 2147483648.0) { // 0x80000000. 9252 if (check->value() == 2147483648.0) { // 0x80000000.
9218 Load(left); 9253 Load(left);
9219 left_already_loaded = true; 9254 left_already_loaded = true;
9220 Result lhs = frame_->Pop(); 9255 Result lhs = frame_->Pop();
9221 lhs.ToRegister(); 9256 lhs.ToRegister();
9222 __ test(lhs.reg(), Immediate(kSmiTagMask)); 9257 __ test(lhs.reg(), Immediate(kSmiTagMask));
9223 destination()->true_target()->Branch(zero); // All Smis are less. 9258 destination()->true_target()->Branch(zero); // All Smis are less.
9224 Result scratch = allocator()->Allocate(); 9259 Result scratch = allocator()->Allocate();
9225 ASSERT(scratch.is_valid()); 9260 ASSERT(scratch.is_valid());
9226 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset)); 9261 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
9227 __ cmp(scratch.reg(), FACTORY->heap_number_map()); 9262 __ cmp(scratch.reg(), factory->heap_number_map());
9228 JumpTarget not_a_number; 9263 JumpTarget not_a_number;
9229 not_a_number.Branch(not_equal, &lhs); 9264 not_a_number.Branch(not_equal, &lhs);
9230 __ mov(scratch.reg(), 9265 __ mov(scratch.reg(),
9231 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset)); 9266 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset));
9232 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000)); 9267 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000));
9233 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf. 9268 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf.
9234 const uint32_t borderline_exponent = 9269 const uint32_t borderline_exponent =
9235 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift; 9270 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
9236 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent)); 9271 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent));
9237 scratch.Unuse(); 9272 scratch.Unuse();
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
9301 Load(right); 9336 Load(right);
9302 } 9337 }
9303 Comparison(node, cc, strict, destination()); 9338 Comparison(node, cc, strict, destination());
9304 } 9339 }
9305 9340
9306 9341
9307 void CodeGenerator::VisitCompareToNull(CompareToNull* node) { 9342 void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
9308 ASSERT(!in_safe_int32_mode()); 9343 ASSERT(!in_safe_int32_mode());
9309 Comment cmnt(masm_, "[ CompareToNull"); 9344 Comment cmnt(masm_, "[ CompareToNull");
9310 9345
9346 Factory* factory = masm()->isolate()->factory();
9311 Load(node->expression()); 9347 Load(node->expression());
9312 Result operand = frame_->Pop(); 9348 Result operand = frame_->Pop();
9313 operand.ToRegister(); 9349 operand.ToRegister();
9314 __ cmp(operand.reg(), FACTORY->null_value()); 9350 __ cmp(operand.reg(), factory->null_value());
9315 if (node->is_strict()) { 9351 if (node->is_strict()) {
9316 operand.Unuse(); 9352 operand.Unuse();
9317 destination()->Split(equal); 9353 destination()->Split(equal);
9318 } else { 9354 } else {
9319 // The 'null' value is only equal to 'undefined' if using non-strict 9355 // The 'null' value is only equal to 'undefined' if using non-strict
9320 // comparisons. 9356 // comparisons.
9321 destination()->true_target()->Branch(equal); 9357 destination()->true_target()->Branch(equal);
9322 __ cmp(operand.reg(), FACTORY->undefined_value()); 9358 __ cmp(operand.reg(), factory->undefined_value());
9323 destination()->true_target()->Branch(equal); 9359 destination()->true_target()->Branch(equal);
9324 __ test(operand.reg(), Immediate(kSmiTagMask)); 9360 __ test(operand.reg(), Immediate(kSmiTagMask));
9325 destination()->false_target()->Branch(equal); 9361 destination()->false_target()->Branch(equal);
9326 9362
9327 // It can be an undetectable object. 9363 // It can be an undetectable object.
9328 // Use a scratch register in preference to spilling operand.reg(). 9364 // Use a scratch register in preference to spilling operand.reg().
9329 Result temp = allocator()->Allocate(); 9365 Result temp = allocator()->Allocate();
9330 ASSERT(temp.is_valid()); 9366 ASSERT(temp.is_valid());
9331 __ mov(temp.reg(), 9367 __ mov(temp.reg(),
9332 FieldOperand(operand.reg(), HeapObject::kMapOffset)); 9368 FieldOperand(operand.reg(), HeapObject::kMapOffset));
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
9385 bool is_contextual_; 9421 bool is_contextual_;
9386 bool is_dont_delete_; 9422 bool is_dont_delete_;
9387 }; 9423 };
9388 9424
9389 9425
9390 void DeferredReferenceGetNamedValue::Generate() { 9426 void DeferredReferenceGetNamedValue::Generate() {
9391 if (!receiver_.is(eax)) { 9427 if (!receiver_.is(eax)) {
9392 __ mov(eax, receiver_); 9428 __ mov(eax, receiver_);
9393 } 9429 }
9394 __ Set(ecx, Immediate(name_)); 9430 __ Set(ecx, Immediate(name_));
9395 Handle<Code> ic(masm()->isolate()->builtins()->builtin( 9431 Isolate* isolate = masm()->isolate();
9432 Handle<Code> ic(isolate->builtins()->builtin(
9396 Builtins::kLoadIC_Initialize)); 9433 Builtins::kLoadIC_Initialize));
9397 RelocInfo::Mode mode = is_contextual_ 9434 RelocInfo::Mode mode = is_contextual_
9398 ? RelocInfo::CODE_TARGET_CONTEXT 9435 ? RelocInfo::CODE_TARGET_CONTEXT
9399 : RelocInfo::CODE_TARGET; 9436 : RelocInfo::CODE_TARGET;
9400 __ call(ic, mode); 9437 __ call(ic, mode);
9401 // The call must be followed by: 9438 // The call must be followed by:
9402 // - a test eax instruction to indicate that the inobject property 9439 // - a test eax instruction to indicate that the inobject property
9403 // case was inlined. 9440 // case was inlined.
9404 // - a mov ecx or mov edx instruction to indicate that the 9441 // - a mov ecx or mov edx instruction to indicate that the
9405 // contextual property load was inlined. 9442 // contextual property load was inlined.
9406 // 9443 //
9407 // Store the delta to the map check instruction here in the test 9444 // Store the delta to the map check instruction here in the test
9408 // instruction. Use masm_-> instead of the __ macro since the 9445 // instruction. Use masm_-> instead of the __ macro since the
9409 // latter can't return a value. 9446 // latter can't return a value.
9410 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); 9447 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9411 // Here we use masm_-> instead of the __ macro because this is the 9448 // Here we use masm_-> instead of the __ macro because this is the
9412 // instruction that gets patched and coverage code gets in the way. 9449 // instruction that gets patched and coverage code gets in the way.
9413 Counters* counters = masm()->isolate()->counters(); 9450 Counters* counters = isolate->counters();
9414 if (is_contextual_) { 9451 if (is_contextual_) {
9415 masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site); 9452 masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site);
9416 __ IncrementCounter(counters->named_load_global_inline_miss(), 1); 9453 __ IncrementCounter(counters->named_load_global_inline_miss(), 1);
9417 if (is_dont_delete_) { 9454 if (is_dont_delete_) {
9418 __ IncrementCounter(counters->dont_delete_hint_miss(), 1); 9455 __ IncrementCounter(counters->dont_delete_hint_miss(), 1);
9419 } 9456 }
9420 } else { 9457 } else {
9421 masm_->test(eax, Immediate(-delta_to_patch_site)); 9458 masm_->test(eax, Immediate(-delta_to_patch_site));
9422 __ IncrementCounter(counters->named_load_inline_miss(), 1); 9459 __ IncrementCounter(counters->named_load_inline_miss(), 1);
9423 } 9460 }
(...skipping 17 matching lines...) Expand all
9441 9478
9442 private: 9479 private:
9443 Label patch_site_; 9480 Label patch_site_;
9444 Register dst_; 9481 Register dst_;
9445 Register receiver_; 9482 Register receiver_;
9446 Register key_; 9483 Register key_;
9447 }; 9484 };
9448 9485
9449 9486
9450 void DeferredReferenceGetKeyedValue::Generate() { 9487 void DeferredReferenceGetKeyedValue::Generate() {
9488 Isolate* isolate = masm()->isolate();
9451 if (!receiver_.is(eax)) { 9489 if (!receiver_.is(eax)) {
9452 // Register eax is available for key. 9490 // Register eax is available for key.
9453 if (!key_.is(eax)) { 9491 if (!key_.is(eax)) {
9454 __ mov(eax, key_); 9492 __ mov(eax, key_);
9455 } 9493 }
9456 if (!receiver_.is(edx)) { 9494 if (!receiver_.is(edx)) {
9457 __ mov(edx, receiver_); 9495 __ mov(edx, receiver_);
9458 } 9496 }
9459 } else if (!key_.is(edx)) { 9497 } else if (!key_.is(edx)) {
9460 // Register edx is available for receiver. 9498 // Register edx is available for receiver.
9461 if (!receiver_.is(edx)) { 9499 if (!receiver_.is(edx)) {
9462 __ mov(edx, receiver_); 9500 __ mov(edx, receiver_);
9463 } 9501 }
9464 if (!key_.is(eax)) { 9502 if (!key_.is(eax)) {
9465 __ mov(eax, key_); 9503 __ mov(eax, key_);
9466 } 9504 }
9467 } else { 9505 } else {
9468 __ xchg(edx, eax); 9506 __ xchg(edx, eax);
9469 } 9507 }
9470 // Calculate the delta from the IC call instruction to the map check 9508 // Calculate the delta from the IC call instruction to the map check
9471 // cmp instruction in the inlined version. This delta is stored in 9509 // cmp instruction in the inlined version. This delta is stored in
9472 // a test(eax, delta) instruction after the call so that we can find 9510 // a test(eax, delta) instruction after the call so that we can find
9473 // it in the IC initialization code and patch the cmp instruction. 9511 // it in the IC initialization code and patch the cmp instruction.
9474 // This means that we cannot allow test instructions after calls to 9512 // This means that we cannot allow test instructions after calls to
9475 // KeyedLoadIC stubs in other places. 9513 // KeyedLoadIC stubs in other places.
9476 Handle<Code> ic(masm()->isolate()->builtins()->builtin( 9514 Handle<Code> ic(isolate->builtins()->builtin(
9477 Builtins::kKeyedLoadIC_Initialize)); 9515 Builtins::kKeyedLoadIC_Initialize));
9478 __ call(ic, RelocInfo::CODE_TARGET); 9516 __ call(ic, RelocInfo::CODE_TARGET);
9479 // The delta from the start of the map-compare instruction to the 9517 // The delta from the start of the map-compare instruction to the
9480 // test instruction. We use masm_-> directly here instead of the __ 9518 // test instruction. We use masm_-> directly here instead of the __
9481 // macro because the macro sometimes uses macro expansion to turn 9519 // macro because the macro sometimes uses macro expansion to turn
9482 // into something that can't return a value. This is encountered 9520 // into something that can't return a value. This is encountered
9483 // when doing generated code coverage tests. 9521 // when doing generated code coverage tests.
9484 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); 9522 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9485 // Here we use masm_-> instead of the __ macro because this is the 9523 // Here we use masm_-> instead of the __ macro because this is the
9486 // instruction that gets patched and coverage code gets in the way. 9524 // instruction that gets patched and coverage code gets in the way.
9487 masm_->test(eax, Immediate(-delta_to_patch_site)); 9525 masm_->test(eax, Immediate(-delta_to_patch_site));
9488 Counters* counters = masm()->isolate()->counters(); 9526 Counters* counters = isolate->counters();
9489 __ IncrementCounter(counters->keyed_load_inline_miss(), 1); 9527 __ IncrementCounter(counters->keyed_load_inline_miss(), 1);
9490 9528
9491 if (!dst_.is(eax)) __ mov(dst_, eax); 9529 if (!dst_.is(eax)) __ mov(dst_, eax);
9492 } 9530 }
9493 9531
9494 9532
9495 class DeferredReferenceSetKeyedValue: public DeferredCode { 9533 class DeferredReferenceSetKeyedValue: public DeferredCode {
9496 public: 9534 public:
9497 DeferredReferenceSetKeyedValue(Register value, 9535 DeferredReferenceSetKeyedValue(Register value,
9498 Register key, 9536 Register key,
(...skipping 16 matching lines...) Expand all
9515 Register value_; 9553 Register value_;
9516 Register key_; 9554 Register key_;
9517 Register receiver_; 9555 Register receiver_;
9518 Register scratch_; 9556 Register scratch_;
9519 Label patch_site_; 9557 Label patch_site_;
9520 StrictModeFlag strict_mode_; 9558 StrictModeFlag strict_mode_;
9521 }; 9559 };
9522 9560
9523 9561
9524 void DeferredReferenceSetKeyedValue::Generate() { 9562 void DeferredReferenceSetKeyedValue::Generate() {
9525 Counters* counters = masm()->isolate()->counters(); 9563 Isolate* isolate = masm()->isolate();
9564 Counters* counters = isolate->counters();
9526 __ IncrementCounter(counters->keyed_store_inline_miss(), 1); 9565 __ IncrementCounter(counters->keyed_store_inline_miss(), 1);
9527 // Move value_ to eax, key_ to ecx, and receiver_ to edx. 9566 // Move value_ to eax, key_ to ecx, and receiver_ to edx.
9528 Register old_value = value_; 9567 Register old_value = value_;
9529 9568
9530 // First, move value to eax. 9569 // First, move value to eax.
9531 if (!value_.is(eax)) { 9570 if (!value_.is(eax)) {
9532 if (key_.is(eax)) { 9571 if (key_.is(eax)) {
9533 // Move key_ out of eax, preferably to ecx. 9572 // Move key_ out of eax, preferably to ecx.
9534 if (!value_.is(ecx) && !receiver_.is(ecx)) { 9573 if (!value_.is(ecx) && !receiver_.is(ecx)) {
9535 __ mov(ecx, key_); 9574 __ mov(ecx, key_);
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
9570 } 9609 }
9571 } 9610 }
9572 } else { // Key is not in edx or ecx. 9611 } else { // Key is not in edx or ecx.
9573 if (!receiver_.is(edx)) { 9612 if (!receiver_.is(edx)) {
9574 __ mov(edx, receiver_); 9613 __ mov(edx, receiver_);
9575 } 9614 }
9576 __ mov(ecx, key_); 9615 __ mov(ecx, key_);
9577 } 9616 }
9578 9617
9579 // Call the IC stub. 9618 // Call the IC stub.
9580 Handle<Code> ic(masm()->isolate()->builtins()->builtin( 9619 Handle<Code> ic(isolate->builtins()->builtin(
9581 (strict_mode_ == kStrictMode) ? Builtins::kKeyedStoreIC_Initialize_Strict 9620 (strict_mode_ == kStrictMode) ? Builtins::kKeyedStoreIC_Initialize_Strict
9582 : Builtins::kKeyedStoreIC_Initialize)); 9621 : Builtins::kKeyedStoreIC_Initialize));
9583 __ call(ic, RelocInfo::CODE_TARGET); 9622 __ call(ic, RelocInfo::CODE_TARGET);
9584 // The delta from the start of the map-compare instruction to the 9623 // The delta from the start of the map-compare instruction to the
9585 // test instruction. We use masm_-> directly here instead of the 9624 // test instruction. We use masm_-> directly here instead of the
9586 // __ macro because the macro sometimes uses macro expansion to turn 9625 // __ macro because the macro sometimes uses macro expansion to turn
9587 // into something that can't return a value. This is encountered 9626 // into something that can't return a value. This is encountered
9588 // when doing generated code coverage tests. 9627 // when doing generated code coverage tests.
9589 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); 9628 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9590 // Here we use masm_-> instead of the __ macro because this is the 9629 // Here we use masm_-> instead of the __ macro because this is the
(...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after
9834 // against an invalid map. In the inline cache code, we patch the map 9873 // against an invalid map. In the inline cache code, we patch the map
9835 // check if appropriate. 9874 // check if appropriate.
9836 if (loop_nesting() > 0) { 9875 if (loop_nesting() > 0) {
9837 Comment cmnt(masm_, "[ Inlined load from keyed Property"); 9876 Comment cmnt(masm_, "[ Inlined load from keyed Property");
9838 9877
9839 // Use a fresh temporary to load the elements without destroying 9878 // Use a fresh temporary to load the elements without destroying
9840 // the receiver which is needed for the deferred slow case. 9879 // the receiver which is needed for the deferred slow case.
9841 Result elements = allocator()->Allocate(); 9880 Result elements = allocator()->Allocate();
9842 ASSERT(elements.is_valid()); 9881 ASSERT(elements.is_valid());
9843 9882
9883 Isolate* isolate = masm()->isolate();
9884 Factory* factory = isolate->factory();
9885
9844 Result key = frame_->Pop(); 9886 Result key = frame_->Pop();
9845 Result receiver = frame_->Pop(); 9887 Result receiver = frame_->Pop();
9846 key.ToRegister(); 9888 key.ToRegister();
9847 receiver.ToRegister(); 9889 receiver.ToRegister();
9848 9890
9849 // If key and receiver are shared registers on the frame, their values will 9891 // If key and receiver are shared registers on the frame, their values will
9850 // be automatically saved and restored when going to deferred code. 9892 // be automatically saved and restored when going to deferred code.
9851 // The result is in elements, which is guaranteed non-shared. 9893 // The result is in elements, which is guaranteed non-shared.
9852 DeferredReferenceGetKeyedValue* deferred = 9894 DeferredReferenceGetKeyedValue* deferred =
9853 new DeferredReferenceGetKeyedValue(elements.reg(), 9895 new DeferredReferenceGetKeyedValue(elements.reg(),
9854 receiver.reg(), 9896 receiver.reg(),
9855 key.reg()); 9897 key.reg());
9856 9898
9857 __ test(receiver.reg(), Immediate(kSmiTagMask)); 9899 __ test(receiver.reg(), Immediate(kSmiTagMask));
9858 deferred->Branch(zero); 9900 deferred->Branch(zero);
9859 9901
9860 // Check that the receiver has the expected map. 9902 // Check that the receiver has the expected map.
9861 // Initially, use an invalid map. The map is patched in the IC 9903 // Initially, use an invalid map. The map is patched in the IC
9862 // initialization code. 9904 // initialization code.
9863 __ bind(deferred->patch_site()); 9905 __ bind(deferred->patch_site());
9864 // Use masm-> here instead of the double underscore macro since extra 9906 // Use masm-> here instead of the double underscore macro since extra
9865 // coverage code can interfere with the patching. 9907 // coverage code can interfere with the patching.
9866 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), 9908 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9867 Immediate(FACTORY->null_value())); 9909 Immediate(factory->null_value()));
9868 deferred->Branch(not_equal); 9910 deferred->Branch(not_equal);
9869 9911
9870 // Check that the key is a smi. 9912 // Check that the key is a smi.
9871 if (!key.is_smi()) { 9913 if (!key.is_smi()) {
9872 __ test(key.reg(), Immediate(kSmiTagMask)); 9914 __ test(key.reg(), Immediate(kSmiTagMask));
9873 deferred->Branch(not_zero); 9915 deferred->Branch(not_zero);
9874 } else { 9916 } else {
9875 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg()); 9917 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9876 } 9918 }
9877 9919
9878 // Get the elements array from the receiver. 9920 // Get the elements array from the receiver.
9879 __ mov(elements.reg(), 9921 __ mov(elements.reg(),
9880 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); 9922 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
9881 __ AssertFastElements(elements.reg()); 9923 __ AssertFastElements(elements.reg());
9882 9924
9883 // Check that the key is within bounds. 9925 // Check that the key is within bounds.
9884 __ cmp(key.reg(), 9926 __ cmp(key.reg(),
9885 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); 9927 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
9886 deferred->Branch(above_equal); 9928 deferred->Branch(above_equal);
9887 9929
9888 // Load and check that the result is not the hole. 9930 // Load and check that the result is not the hole.
9889 // Key holds a smi. 9931 // Key holds a smi.
9890 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 9932 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
9891 __ mov(elements.reg(), 9933 __ mov(elements.reg(),
9892 FieldOperand(elements.reg(), 9934 FieldOperand(elements.reg(),
9893 key.reg(), 9935 key.reg(),
9894 times_2, 9936 times_2,
9895 FixedArray::kHeaderSize)); 9937 FixedArray::kHeaderSize));
9896 result = elements; 9938 result = elements;
9897 __ cmp(Operand(result.reg()), Immediate(FACTORY->the_hole_value())); 9939 __ cmp(Operand(result.reg()), Immediate(factory->the_hole_value()));
9898 deferred->Branch(equal); 9940 deferred->Branch(equal);
9899 __ IncrementCounter(masm_->isolate()->counters()->keyed_load_inline(), 1); 9941 __ IncrementCounter(isolate->counters()->keyed_load_inline(), 1);
9900 9942
9901 deferred->BindExit(); 9943 deferred->BindExit();
9902 } else { 9944 } else {
9903 Comment cmnt(masm_, "[ Load from keyed Property"); 9945 Comment cmnt(masm_, "[ Load from keyed Property");
9904 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); 9946 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
9905 // Make sure that we do not have a test instruction after the 9947 // Make sure that we do not have a test instruction after the
9906 // call. A test instruction after the call is used to 9948 // call. A test instruction after the call is used to
9907 // indicate that we have generated an inline version of the 9949 // indicate that we have generated an inline version of the
9908 // keyed load. The explicit nop instruction is here because 9950 // keyed load. The explicit nop instruction is here because
9909 // the push that follows might be peep-hole optimized away. 9951 // the push that follows might be peep-hole optimized away.
(...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after
10189 // esp[8]: Second argument, source pointer. 10231 // esp[8]: Second argument, source pointer.
10190 // esp[4]: First argument, destination pointer. 10232 // esp[4]: First argument, destination pointer.
10191 // esp[0]: return address 10233 // esp[0]: return address
10192 10234
10193 const int kDestinationOffset = 1 * kPointerSize; 10235 const int kDestinationOffset = 1 * kPointerSize;
10194 const int kSourceOffset = 2 * kPointerSize; 10236 const int kSourceOffset = 2 * kPointerSize;
10195 const int kSizeOffset = 3 * kPointerSize; 10237 const int kSizeOffset = 3 * kPointerSize;
10196 10238
10197 int stack_offset = 0; // Update if we change the stack height. 10239 int stack_offset = 0; // Update if we change the stack height.
10198 10240
10241 Isolate* isolate = masm.isolate();
10242 Counters* counters = isolate->counters();
10243
10199 if (FLAG_debug_code) { 10244 if (FLAG_debug_code) {
10200 __ cmp(Operand(esp, kSizeOffset + stack_offset), 10245 __ cmp(Operand(esp, kSizeOffset + stack_offset),
10201 Immediate(kMinComplexMemCopy)); 10246 Immediate(kMinComplexMemCopy));
10202 Label ok; 10247 Label ok;
10203 __ j(greater_equal, &ok); 10248 __ j(greater_equal, &ok);
10204 __ int3(); 10249 __ int3();
10205 __ bind(&ok); 10250 __ bind(&ok);
10206 } 10251 }
10207 if (masm.isolate()->cpu_features()->IsSupported(SSE2)) { 10252 if (isolate->cpu_features()->IsSupported(SSE2)) {
10208 CpuFeatures::Scope enable(SSE2); 10253 CpuFeatures::Scope enable(SSE2);
10209 __ push(edi); 10254 __ push(edi);
10210 __ push(esi); 10255 __ push(esi);
10211 stack_offset += 2 * kPointerSize; 10256 stack_offset += 2 * kPointerSize;
10212 Register dst = edi; 10257 Register dst = edi;
10213 Register src = esi; 10258 Register src = esi;
10214 Register count = ecx; 10259 Register count = ecx;
10215 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset)); 10260 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
10216 __ mov(src, Operand(esp, stack_offset + kSourceOffset)); 10261 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
10217 __ mov(count, Operand(esp, stack_offset + kSizeOffset)); 10262 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
10218 10263
10219 10264
10220 __ movdqu(xmm0, Operand(src, 0)); 10265 __ movdqu(xmm0, Operand(src, 0));
10221 __ movdqu(Operand(dst, 0), xmm0); 10266 __ movdqu(Operand(dst, 0), xmm0);
10222 __ mov(edx, dst); 10267 __ mov(edx, dst);
10223 __ and_(edx, 0xF); 10268 __ and_(edx, 0xF);
10224 __ neg(edx); 10269 __ neg(edx);
10225 __ add(Operand(edx), Immediate(16)); 10270 __ add(Operand(edx), Immediate(16));
10226 __ add(dst, Operand(edx)); 10271 __ add(dst, Operand(edx));
10227 __ add(src, Operand(edx)); 10272 __ add(src, Operand(edx));
10228 __ sub(Operand(count), edx); 10273 __ sub(Operand(count), edx);
10229 10274
10230 // edi is now aligned. Check if esi is also aligned. 10275 // edi is now aligned. Check if esi is also aligned.
10231 Label unaligned_source; 10276 Label unaligned_source;
10232 __ test(Operand(src), Immediate(0x0F)); 10277 __ test(Operand(src), Immediate(0x0F));
10233 __ j(not_zero, &unaligned_source); 10278 __ j(not_zero, &unaligned_source);
10234 { 10279 {
10235 __ IncrementCounter(masm.isolate()->counters()->memcopy_aligned(), 1); 10280 __ IncrementCounter(counters->memcopy_aligned(), 1);
10236 // Copy loop for aligned source and destination. 10281 // Copy loop for aligned source and destination.
10237 __ mov(edx, count); 10282 __ mov(edx, count);
10238 Register loop_count = ecx; 10283 Register loop_count = ecx;
10239 Register count = edx; 10284 Register count = edx;
10240 __ shr(loop_count, 5); 10285 __ shr(loop_count, 5);
10241 { 10286 {
10242 // Main copy loop. 10287 // Main copy loop.
10243 Label loop; 10288 Label loop;
10244 __ bind(&loop); 10289 __ bind(&loop);
10245 __ prefetch(Operand(src, 0x20), 1); 10290 __ prefetch(Operand(src, 0x20), 1);
(...skipping 27 matching lines...) Expand all
10273 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset)); 10318 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset));
10274 __ pop(esi); 10319 __ pop(esi);
10275 __ pop(edi); 10320 __ pop(edi);
10276 __ ret(0); 10321 __ ret(0);
10277 } 10322 }
10278 __ Align(16); 10323 __ Align(16);
10279 { 10324 {
10280 // Copy loop for unaligned source and aligned destination. 10325 // Copy loop for unaligned source and aligned destination.
10281 // If source is not aligned, we can't read it as efficiently. 10326 // If source is not aligned, we can't read it as efficiently.
10282 __ bind(&unaligned_source); 10327 __ bind(&unaligned_source);
10283 __ IncrementCounter(masm.isolate()->counters()->memcopy_unaligned(), 1); 10328 __ IncrementCounter(counters->memcopy_unaligned(), 1);
10284 __ mov(edx, ecx); 10329 __ mov(edx, ecx);
10285 Register loop_count = ecx; 10330 Register loop_count = ecx;
10286 Register count = edx; 10331 Register count = edx;
10287 __ shr(loop_count, 5); 10332 __ shr(loop_count, 5);
10288 { 10333 {
10289 // Main copy loop 10334 // Main copy loop
10290 Label loop; 10335 Label loop;
10291 __ bind(&loop); 10336 __ bind(&loop);
10292 __ prefetch(Operand(src, 0x20), 1); 10337 __ prefetch(Operand(src, 0x20), 1);
10293 __ movdqu(xmm0, Operand(src, 0x00)); 10338 __ movdqu(xmm0, Operand(src, 0x00));
(...skipping 23 matching lines...) Expand all
10317 __ movdqu(xmm0, Operand(src, count, times_1, -0x10)); 10362 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
10318 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0); 10363 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
10319 10364
10320 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset)); 10365 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset));
10321 __ pop(esi); 10366 __ pop(esi);
10322 __ pop(edi); 10367 __ pop(edi);
10323 __ ret(0); 10368 __ ret(0);
10324 } 10369 }
10325 10370
10326 } else { 10371 } else {
10327 __ IncrementCounter(masm.isolate()->counters()->memcopy_noxmm(), 1); 10372 __ IncrementCounter(counters->memcopy_noxmm(), 1);
10328 // SSE2 not supported. Unlikely to happen in practice. 10373 // SSE2 not supported. Unlikely to happen in practice.
10329 __ push(edi); 10374 __ push(edi);
10330 __ push(esi); 10375 __ push(esi);
10331 stack_offset += 2 * kPointerSize; 10376 stack_offset += 2 * kPointerSize;
10332 __ cld(); 10377 __ cld();
10333 Register dst = edi; 10378 Register dst = edi;
10334 Register src = esi; 10379 Register src = esi;
10335 Register count = ecx; 10380 Register count = ecx;
10336 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset)); 10381 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
10337 __ mov(src, Operand(esp, stack_offset + kSourceOffset)); 10382 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
10378 memcpy(chunk->GetStartAddress(), desc.buffer, desc.instr_size); 10423 memcpy(chunk->GetStartAddress(), desc.buffer, desc.instr_size);
10379 CPU::FlushICache(chunk->GetStartAddress(), desc.instr_size); 10424 CPU::FlushICache(chunk->GetStartAddress(), desc.instr_size);
10380 return FUNCTION_CAST<MemCopyFunction>(chunk->GetStartAddress()); 10425 return FUNCTION_CAST<MemCopyFunction>(chunk->GetStartAddress());
10381 } 10426 }
10382 10427
10383 #undef __ 10428 #undef __
10384 10429
10385 } } // namespace v8::internal 10430 } } // namespace v8::internal
10386 10431
10387 #endif // V8_TARGET_ARCH_IA32 10432 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698