Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(672)

Side by Side Diff: src/ia32/fast-codegen-ia32.cc

Issue 650026: Reorder code in fast-codegen-ia32.cc. (Closed)
Patch Set: Created 10 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 449 matching lines...) Expand 10 before | Expand all | Expand 10 after
460 if (cgen.HasStackOverflow()) { 460 if (cgen.HasStackOverflow()) {
461 ASSERT(!Top::has_pending_exception()); 461 ASSERT(!Top::has_pending_exception());
462 return Handle<Code>::null(); 462 return Handle<Code>::null();
463 } 463 }
464 464
465 Code::Flags flags = Code::ComputeFlags(Code::FUNCTION, NOT_IN_LOOP); 465 Code::Flags flags = Code::ComputeFlags(Code::FUNCTION, NOT_IN_LOOP);
466 return CodeGenerator::MakeCodeEpilogue(&masm, flags, info); 466 return CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
467 } 467 }
468 468
469 469
470 Register FastCodeGenerator::accumulator0() { return eax; }
471 Register FastCodeGenerator::accumulator1() { return edx; }
472 Register FastCodeGenerator::scratch0() { return ecx; }
473 Register FastCodeGenerator::scratch1() { return edi; }
474 Register FastCodeGenerator::receiver_reg() { return ebx; }
475 Register FastCodeGenerator::context_reg() { return esi; }
476
477
478 void FastCodeGenerator::EmitLoadReceiver() {
479 // Offset 2 is due to return address and saved frame pointer.
480 int index = 2 + function()->scope()->num_parameters();
481 __ mov(receiver_reg(), Operand(ebp, index * kPointerSize));
482 }
483
484
485 void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
486 ASSERT(!destination().is(no_reg));
487 ASSERT(cell->IsJSGlobalPropertyCell());
488
489 __ mov(destination(), Immediate(cell));
490 __ mov(destination(),
491 FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset));
492 if (FLAG_debug_code) {
493 __ cmp(destination(), Factory::the_hole_value());
494 __ Check(not_equal, "DontDelete cells can't contain the hole");
495 }
496
497 // The loaded value is not known to be a smi.
498 clear_as_smi(destination());
499 }
500
501
502 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
503 LookupResult lookup;
504 info()->receiver()->Lookup(*name, &lookup);
505
506 ASSERT(lookup.holder() == *info()->receiver());
507 ASSERT(lookup.type() == FIELD);
508 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
509 int index = lookup.GetFieldIndex() - map->inobject_properties();
510 int offset = index * kPointerSize;
511
512 // We will emit the write barrier unless the stored value is statically
513 // known to be a smi.
514 bool needs_write_barrier = !is_smi(accumulator0());
515
516 // Perform the store. Negative offsets are inobject properties.
517 if (offset < 0) {
518 offset += map->instance_size();
519 __ mov(FieldOperand(receiver_reg(), offset), accumulator0());
520 if (needs_write_barrier) {
521 // Preserve receiver from write barrier.
522 __ mov(scratch0(), receiver_reg());
523 }
524 } else {
525 offset += FixedArray::kHeaderSize;
526 __ mov(scratch0(),
527 FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
528 __ mov(FieldOperand(scratch0(), offset), accumulator0());
529 }
530
531 if (needs_write_barrier) {
532 if (destination().is(no_reg)) {
533 // After RecordWrite accumulator0 is only accidently a smi, but it is
534 // already marked as not known to be one.
535 __ RecordWrite(scratch0(), offset, accumulator0(), scratch1());
536 } else {
537 // Copy the value to the other accumulator to preserve a copy from the
538 // write barrier. One of the accumulators is available as a scratch
539 // register. Neither is a smi.
540 __ mov(accumulator1(), accumulator0());
541 clear_as_smi(accumulator1());
542 Register value_scratch = other_accumulator(destination());
543 __ RecordWrite(scratch0(), offset, value_scratch, scratch1());
544 }
545 } else if (destination().is(accumulator1())) {
546 __ mov(accumulator1(), accumulator0());
547 // Is a smi because we do not need the write barrier.
548 set_as_smi(accumulator1());
549 }
550 }
551
552
553 void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
554 ASSERT(!destination().is(no_reg));
555 LookupResult lookup;
556 info()->receiver()->Lookup(*name, &lookup);
557
558 ASSERT(lookup.holder() == *info()->receiver());
559 ASSERT(lookup.type() == FIELD);
560 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
561 int index = lookup.GetFieldIndex() - map->inobject_properties();
562 int offset = index * kPointerSize;
563
564 // Perform the load. Negative offsets are inobject properties.
565 if (offset < 0) {
566 offset += map->instance_size();
567 __ mov(destination(), FieldOperand(receiver_reg(), offset));
568 } else {
569 offset += FixedArray::kHeaderSize;
570 __ mov(scratch0(),
571 FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
572 __ mov(destination(), FieldOperand(scratch0(), offset));
573 }
574
575 // The loaded value is not known to be a smi.
576 clear_as_smi(destination());
577 }
578
579
580 void FastCodeGenerator::EmitBitOr() {
581 if (is_smi(accumulator0()) && is_smi(accumulator1())) {
582 // If both operands are known to be a smi then there is no need to check
583 // the operands or result. There is no need to perform the operation in
584 // an effect context.
585 if (!destination().is(no_reg)) {
586 // Leave the result in the destination register. Bitwise or is
587 // commutative.
588 __ or_(destination(), Operand(other_accumulator(destination())));
589 }
590 } else if (destination().is(no_reg)) {
591 // Result is not needed but do not clobber the operands in case of
592 // bailout.
593 __ mov(scratch0(), accumulator1());
594 __ or_(scratch0(), Operand(accumulator0()));
595 __ test(scratch0(), Immediate(kSmiTagMask));
596 __ j(not_zero, bailout(), not_taken);
597 } else {
598 // Preserve the destination operand in a scratch register in case of
599 // bailout.
600 __ mov(scratch0(), destination());
601 __ or_(destination(), Operand(other_accumulator(destination())));
602 __ test(destination(), Immediate(kSmiTagMask));
603 __ j(not_zero, bailout(), not_taken);
604 }
605
606 // If we didn't bailout, the result (in fact, both inputs too) is known to
607 // be a smi.
608 set_as_smi(accumulator0());
609 set_as_smi(accumulator1());
610 }
611
612
613 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) {
614 ASSERT(info_ == NULL);
615 info_ = compilation_info;
616
617 // Save the caller's frame pointer and set up our own.
618 Comment prologue_cmnt(masm(), ";; Prologue");
619 __ push(ebp);
620 __ mov(ebp, esp);
621 __ push(esi); // Context.
622 __ push(edi); // Closure.
623 // Note that we keep a live register reference to esi (context) at this
624 // point.
625
626 // Receiver (this) is allocated to a fixed register.
627 if (info()->has_this_properties()) {
628 Comment cmnt(masm(), ";; MapCheck(this)");
629 if (FLAG_print_ir) {
630 PrintF("#: MapCheck(this)\n");
631 }
632 ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
633 Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
634 Handle<Map> map(object->map());
635 EmitLoadReceiver();
636 __ CheckMap(receiver_reg(), map, bailout(), false);
637 }
638
639 // If there is a global variable access check if the global object is the
640 // same as at lazy-compilation time.
641 if (info()->has_globals()) {
642 Comment cmnt(masm(), ";; MapCheck(GLOBAL)");
643 if (FLAG_print_ir) {
644 PrintF("#: MapCheck(GLOBAL)\n");
645 }
646 ASSERT(info()->has_global_object());
647 Handle<Map> map(info()->global_object()->map());
648 __ mov(scratch0(), CodeGenerator::GlobalObject());
649 __ CheckMap(scratch0(), map, bailout(), true);
650 }
651
652 VisitStatements(function()->body());
653
654 Comment return_cmnt(masm(), ";; Return(<undefined>)");
655 if (FLAG_print_ir) {
656 PrintF("#: Return(<undefined>)\n");
657 }
658 __ mov(eax, Factory::undefined_value());
659 __ mov(esp, ebp);
660 __ pop(ebp);
661 __ ret((scope()->num_parameters() + 1) * kPointerSize);
662
663 __ bind(&bailout_);
664 }
665
666
470 void FastCodeGenerator::VisitDeclaration(Declaration* decl) { 667 void FastCodeGenerator::VisitDeclaration(Declaration* decl) {
471 UNREACHABLE(); 668 UNREACHABLE();
472 } 669 }
473 670
474 671
475 void FastCodeGenerator::VisitBlock(Block* stmt) { 672 void FastCodeGenerator::VisitBlock(Block* stmt) {
476 VisitStatements(stmt->statements()); 673 VisitStatements(stmt->statements());
477 } 674 }
478 675
479 676
(...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after
737 934
738 void FastCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 935 void FastCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
739 UNREACHABLE(); 936 UNREACHABLE();
740 } 937 }
741 938
742 939
743 void FastCodeGenerator::VisitThisFunction(ThisFunction* expr) { 940 void FastCodeGenerator::VisitThisFunction(ThisFunction* expr) {
744 UNREACHABLE(); 941 UNREACHABLE();
745 } 942 }
746 943
747 Register FastCodeGenerator::accumulator0() { return eax; }
748 Register FastCodeGenerator::accumulator1() { return edx; }
749 Register FastCodeGenerator::scratch0() { return ecx; }
750 Register FastCodeGenerator::scratch1() { return edi; }
751 Register FastCodeGenerator::receiver_reg() { return ebx; }
752 Register FastCodeGenerator::context_reg() { return esi; }
753
754
755 void FastCodeGenerator::EmitLoadReceiver() {
756 // Offset 2 is due to return address and saved frame pointer.
757 int index = 2 + function()->scope()->num_parameters();
758 __ mov(receiver_reg(), Operand(ebp, index * kPointerSize));
759 }
760
761
762 void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
763 ASSERT(!destination().is(no_reg));
764 ASSERT(cell->IsJSGlobalPropertyCell());
765
766 __ mov(destination(), Immediate(cell));
767 __ mov(destination(),
768 FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset));
769 if (FLAG_debug_code) {
770 __ cmp(destination(), Factory::the_hole_value());
771 __ Check(not_equal, "DontDelete cells can't contain the hole");
772 }
773
774 // The loaded value is not known to be a smi.
775 clear_as_smi(destination());
776 }
777
778
779 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
780 LookupResult lookup;
781 info()->receiver()->Lookup(*name, &lookup);
782
783 ASSERT(lookup.holder() == *info()->receiver());
784 ASSERT(lookup.type() == FIELD);
785 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
786 int index = lookup.GetFieldIndex() - map->inobject_properties();
787 int offset = index * kPointerSize;
788
789 // We will emit the write barrier unless the stored value is statically
790 // known to be a smi.
791 bool needs_write_barrier = !is_smi(accumulator0());
792
793 // Perform the store. Negative offsets are inobject properties.
794 if (offset < 0) {
795 offset += map->instance_size();
796 __ mov(FieldOperand(receiver_reg(), offset), accumulator0());
797 if (needs_write_barrier) {
798 // Preserve receiver from write barrier.
799 __ mov(scratch0(), receiver_reg());
800 }
801 } else {
802 offset += FixedArray::kHeaderSize;
803 __ mov(scratch0(),
804 FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
805 __ mov(FieldOperand(scratch0(), offset), accumulator0());
806 }
807
808 if (needs_write_barrier) {
809 if (destination().is(no_reg)) {
810 // After RecordWrite accumulator0 is only accidently a smi, but it is
811 // already marked as not known to be one.
812 __ RecordWrite(scratch0(), offset, accumulator0(), scratch1());
813 } else {
814 // Copy the value to the other accumulator to preserve a copy from the
815 // write barrier. One of the accumulators is available as a scratch
816 // register. Neither is a smi.
817 __ mov(accumulator1(), accumulator0());
818 clear_as_smi(accumulator1());
819 Register value_scratch = other_accumulator(destination());
820 __ RecordWrite(scratch0(), offset, value_scratch, scratch1());
821 }
822 } else if (destination().is(accumulator1())) {
823 __ mov(accumulator1(), accumulator0());
824 // Is a smi because we do not need the write barrier.
825 set_as_smi(accumulator1());
826 }
827 }
828
829
830 void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
831 ASSERT(!destination().is(no_reg));
832 LookupResult lookup;
833 info()->receiver()->Lookup(*name, &lookup);
834
835 ASSERT(lookup.holder() == *info()->receiver());
836 ASSERT(lookup.type() == FIELD);
837 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
838 int index = lookup.GetFieldIndex() - map->inobject_properties();
839 int offset = index * kPointerSize;
840
841 // Perform the load. Negative offsets are inobject properties.
842 if (offset < 0) {
843 offset += map->instance_size();
844 __ mov(destination(), FieldOperand(receiver_reg(), offset));
845 } else {
846 offset += FixedArray::kHeaderSize;
847 __ mov(scratch0(),
848 FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
849 __ mov(destination(), FieldOperand(scratch0(), offset));
850 }
851
852 // The loaded value is not known to be a smi.
853 clear_as_smi(destination());
854 }
855
856
857 void FastCodeGenerator::EmitBitOr() {
858 if (is_smi(accumulator0()) && is_smi(accumulator1())) {
859 // If both operands are known to be a smi then there is no need to check
860 // the operands or result. There is no need to perform the operation in
861 // an effect context.
862 if (!destination().is(no_reg)) {
863 // Leave the result in the destination register. Bitwise or is
864 // commutative.
865 __ or_(destination(), Operand(other_accumulator(destination())));
866 }
867 } else if (destination().is(no_reg)) {
868 // Result is not needed but do not clobber the operands in case of
869 // bailout.
870 __ mov(scratch0(), accumulator1());
871 __ or_(scratch0(), Operand(accumulator0()));
872 __ test(scratch0(), Immediate(kSmiTagMask));
873 __ j(not_zero, bailout(), not_taken);
874 } else {
875 // Preserve the destination operand in a scratch register in case of
876 // bailout.
877 __ mov(scratch0(), destination());
878 __ or_(destination(), Operand(other_accumulator(destination())));
879 __ test(destination(), Immediate(kSmiTagMask));
880 __ j(not_zero, bailout(), not_taken);
881 }
882
883 // If we didn't bailout, the result (in fact, both inputs too) is known to
884 // be a smi.
885 set_as_smi(accumulator0());
886 set_as_smi(accumulator1());
887 }
888
889
890 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) {
891 ASSERT(info_ == NULL);
892 info_ = compilation_info;
893
894 // Save the caller's frame pointer and set up our own.
895 Comment prologue_cmnt(masm(), ";; Prologue");
896 __ push(ebp);
897 __ mov(ebp, esp);
898 __ push(esi); // Context.
899 __ push(edi); // Closure.
900 // Note that we keep a live register reference to esi (context) at this
901 // point.
902
903 // Receiver (this) is allocated to a fixed register.
904 if (info()->has_this_properties()) {
905 Comment cmnt(masm(), ";; MapCheck(this)");
906 if (FLAG_print_ir) {
907 PrintF("#: MapCheck(this)\n");
908 }
909 ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
910 Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
911 Handle<Map> map(object->map());
912 EmitLoadReceiver();
913 __ CheckMap(receiver_reg(), map, bailout(), false);
914 }
915
916 // If there is a global variable access check if the global object is the
917 // same as at lazy-compilation time.
918 if (info()->has_globals()) {
919 Comment cmnt(masm(), ";; MapCheck(GLOBAL)");
920 if (FLAG_print_ir) {
921 PrintF("#: MapCheck(GLOBAL)\n");
922 }
923 ASSERT(info()->has_global_object());
924 Handle<Map> map(info()->global_object()->map());
925 __ mov(scratch0(), CodeGenerator::GlobalObject());
926 __ CheckMap(scratch0(), map, bailout(), true);
927 }
928
929 VisitStatements(function()->body());
930
931 Comment return_cmnt(masm(), ";; Return(<undefined>)");
932 if (FLAG_print_ir) {
933 PrintF("#: Return(<undefined>)\n");
934 }
935 __ mov(eax, Factory::undefined_value());
936 __ mov(esp, ebp);
937 __ pop(ebp);
938 __ ret((scope()->num_parameters() + 1) * kPointerSize);
939
940 __ bind(&bailout_);
941 }
942
943
944 #undef __ 944 #undef __
945 945
946 946
947 } } // namespace v8::internal 947 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698