Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(782)

Side by Side Diff: src/arm/codegen-arm.cc

Issue 3017018: Change asserts to STATIC_ASSERT if they can be checked at compilation time. ... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 10 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 797 matching lines...) Expand 10 before | Expand all | Expand 10 after
808 case Token::SUB: 808 case Token::SUB:
809 if (inline_smi) { 809 if (inline_smi) {
810 JumpTarget done; 810 JumpTarget done;
811 Register rhs = frame_->PopToRegister(); 811 Register rhs = frame_->PopToRegister();
812 Register lhs = frame_->PopToRegister(rhs); 812 Register lhs = frame_->PopToRegister(rhs);
813 Register scratch = VirtualFrame::scratch0(); 813 Register scratch = VirtualFrame::scratch0();
814 __ orr(scratch, rhs, Operand(lhs)); 814 __ orr(scratch, rhs, Operand(lhs));
815 // Check they are both small and positive. 815 // Check they are both small and positive.
816 __ tst(scratch, Operand(kSmiTagMask | 0xc0000000)); 816 __ tst(scratch, Operand(kSmiTagMask | 0xc0000000));
817 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now. 817 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
818 ASSERT_EQ(0, kSmiTag); 818 STATIC_ASSERT(kSmiTag == 0);
819 if (op == Token::ADD) { 819 if (op == Token::ADD) {
820 __ add(r0, lhs, Operand(rhs), LeaveCC, eq); 820 __ add(r0, lhs, Operand(rhs), LeaveCC, eq);
821 } else { 821 } else {
822 __ sub(r0, lhs, Operand(rhs), LeaveCC, eq); 822 __ sub(r0, lhs, Operand(rhs), LeaveCC, eq);
823 } 823 }
824 done.Branch(eq); 824 done.Branch(eq);
825 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs); 825 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
826 frame_->SpillAll(); 826 frame_->SpillAll();
827 frame_->CallStub(&stub, 0); 827 frame_->CallStub(&stub, 0);
828 done.Bind(); 828 done.Bind();
(...skipping 27 matching lines...) Expand all
856 } else { 856 } else {
857 cond = al; 857 cond = al;
858 } 858 }
859 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now. 859 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
860 if (op == Token::BIT_OR) { 860 if (op == Token::BIT_OR) {
861 __ orr(r0, lhs, Operand(rhs), LeaveCC, cond); 861 __ orr(r0, lhs, Operand(rhs), LeaveCC, cond);
862 } else if (op == Token::BIT_AND) { 862 } else if (op == Token::BIT_AND) {
863 __ and_(r0, lhs, Operand(rhs), LeaveCC, cond); 863 __ and_(r0, lhs, Operand(rhs), LeaveCC, cond);
864 } else { 864 } else {
865 ASSERT(op == Token::BIT_XOR); 865 ASSERT(op == Token::BIT_XOR);
866 ASSERT_EQ(0, kSmiTag); 866 STATIC_ASSERT(kSmiTag == 0);
867 __ eor(r0, lhs, Operand(rhs), LeaveCC, cond); 867 __ eor(r0, lhs, Operand(rhs), LeaveCC, cond);
868 } 868 }
869 if (cond != al) { 869 if (cond != al) {
870 JumpTarget done; 870 JumpTarget done;
871 done.Branch(cond); 871 done.Branch(cond);
872 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs); 872 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
873 frame_->SpillAll(); 873 frame_->SpillAll();
874 frame_->CallStub(&stub, 0); 874 frame_->CallStub(&stub, 0);
875 done.Bind(); 875 done.Bind();
876 } 876 }
(...skipping 636 matching lines...) Expand 10 before | Expand all | Expand 10 after
1513 // sp[0]: receiver - in the receiver_reg register. 1513 // sp[0]: receiver - in the receiver_reg register.
1514 // sp[1]: applicand.apply 1514 // sp[1]: applicand.apply
1515 // sp[2]: applicand. 1515 // sp[2]: applicand.
1516 1516
1517 // Check that the receiver really is a JavaScript object. 1517 // Check that the receiver really is a JavaScript object.
1518 __ BranchOnSmi(receiver_reg, &build_args); 1518 __ BranchOnSmi(receiver_reg, &build_args);
1519 // We allow all JSObjects including JSFunctions. As long as 1519 // We allow all JSObjects including JSFunctions. As long as
1520 // JS_FUNCTION_TYPE is the last instance type and it is right 1520 // JS_FUNCTION_TYPE is the last instance type and it is right
1521 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper 1521 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
1522 // bound. 1522 // bound.
1523 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 1523 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1524 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); 1524 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1525 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE); 1525 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE);
1526 __ b(lt, &build_args); 1526 __ b(lt, &build_args);
1527 1527
1528 // Check that applicand.apply is Function.prototype.apply. 1528 // Check that applicand.apply is Function.prototype.apply.
1529 __ ldr(r0, MemOperand(sp, kPointerSize)); 1529 __ ldr(r0, MemOperand(sp, kPointerSize));
1530 __ BranchOnSmi(r0, &build_args); 1530 __ BranchOnSmi(r0, &build_args);
1531 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 1531 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
1532 __ b(ne, &build_args); 1532 __ b(ne, &build_args);
1533 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); 1533 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
1534 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); 1534 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
(...skipping 1068 matching lines...) Expand 10 before | Expand all | Expand 10 after
2603 function_return_is_shadowed_ = function_return_was_shadowed; 2603 function_return_is_shadowed_ = function_return_was_shadowed;
2604 2604
2605 // Get an external reference to the handler address. 2605 // Get an external reference to the handler address.
2606 ExternalReference handler_address(Top::k_handler_address); 2606 ExternalReference handler_address(Top::k_handler_address);
2607 2607
2608 // If we can fall off the end of the try block, unlink from try chain. 2608 // If we can fall off the end of the try block, unlink from try chain.
2609 if (has_valid_frame()) { 2609 if (has_valid_frame()) {
2610 // The next handler address is on top of the frame. Unlink from 2610 // The next handler address is on top of the frame. Unlink from
2611 // the handler list and drop the rest of this handler from the 2611 // the handler list and drop the rest of this handler from the
2612 // frame. 2612 // frame.
2613 ASSERT(StackHandlerConstants::kNextOffset == 0); 2613 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2614 frame_->EmitPop(r1); 2614 frame_->EmitPop(r1);
2615 __ mov(r3, Operand(handler_address)); 2615 __ mov(r3, Operand(handler_address));
2616 __ str(r1, MemOperand(r3)); 2616 __ str(r1, MemOperand(r3));
2617 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 2617 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2618 if (has_unlinks) { 2618 if (has_unlinks) {
2619 exit.Jump(); 2619 exit.Jump();
2620 } 2620 }
2621 } 2621 }
2622 2622
2623 // Generate unlink code for the (formerly) shadowing labels that have been 2623 // Generate unlink code for the (formerly) shadowing labels that have been
2624 // jumped to. Deallocate each shadow target. 2624 // jumped to. Deallocate each shadow target.
2625 for (int i = 0; i < shadows.length(); i++) { 2625 for (int i = 0; i < shadows.length(); i++) {
2626 if (shadows[i]->is_linked()) { 2626 if (shadows[i]->is_linked()) {
2627 // Unlink from try chain; 2627 // Unlink from try chain;
2628 shadows[i]->Bind(); 2628 shadows[i]->Bind();
2629 // Because we can be jumping here (to spilled code) from unspilled 2629 // Because we can be jumping here (to spilled code) from unspilled
2630 // code, we need to reestablish a spilled frame at this block. 2630 // code, we need to reestablish a spilled frame at this block.
2631 frame_->SpillAll(); 2631 frame_->SpillAll();
2632 2632
2633 // Reload sp from the top handler, because some statements that we 2633 // Reload sp from the top handler, because some statements that we
2634 // break from (eg, for...in) may have left stuff on the stack. 2634 // break from (eg, for...in) may have left stuff on the stack.
2635 __ mov(r3, Operand(handler_address)); 2635 __ mov(r3, Operand(handler_address));
2636 __ ldr(sp, MemOperand(r3)); 2636 __ ldr(sp, MemOperand(r3));
2637 frame_->Forget(frame_->height() - handler_height); 2637 frame_->Forget(frame_->height() - handler_height);
2638 2638
2639 ASSERT(StackHandlerConstants::kNextOffset == 0); 2639 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2640 frame_->EmitPop(r1); 2640 frame_->EmitPop(r1);
2641 __ str(r1, MemOperand(r3)); 2641 __ str(r1, MemOperand(r3));
2642 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 2642 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2643 2643
2644 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) { 2644 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2645 frame_->PrepareForReturn(); 2645 frame_->PrepareForReturn();
2646 } 2646 }
2647 shadows[i]->other_target()->Jump(); 2647 shadows[i]->other_target()->Jump();
2648 } 2648 }
2649 } 2649 }
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
2716 } 2716 }
2717 function_return_is_shadowed_ = function_return_was_shadowed; 2717 function_return_is_shadowed_ = function_return_was_shadowed;
2718 2718
2719 // Get an external reference to the handler address. 2719 // Get an external reference to the handler address.
2720 ExternalReference handler_address(Top::k_handler_address); 2720 ExternalReference handler_address(Top::k_handler_address);
2721 2721
2722 // If we can fall off the end of the try block, unlink from the try 2722 // If we can fall off the end of the try block, unlink from the try
2723 // chain and set the state on the frame to FALLING. 2723 // chain and set the state on the frame to FALLING.
2724 if (has_valid_frame()) { 2724 if (has_valid_frame()) {
2725 // The next handler address is on top of the frame. 2725 // The next handler address is on top of the frame.
2726 ASSERT(StackHandlerConstants::kNextOffset == 0); 2726 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2727 frame_->EmitPop(r1); 2727 frame_->EmitPop(r1);
2728 __ mov(r3, Operand(handler_address)); 2728 __ mov(r3, Operand(handler_address));
2729 __ str(r1, MemOperand(r3)); 2729 __ str(r1, MemOperand(r3));
2730 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 2730 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2731 2731
2732 // Fake a top of stack value (unneeded when FALLING) and set the 2732 // Fake a top of stack value (unneeded when FALLING) and set the
2733 // state in r2, then jump around the unlink blocks if any. 2733 // state in r2, then jump around the unlink blocks if any.
2734 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 2734 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2735 frame_->EmitPush(r0); 2735 frame_->EmitPush(r0);
2736 __ mov(r2, Operand(Smi::FromInt(FALLING))); 2736 __ mov(r2, Operand(Smi::FromInt(FALLING)));
(...skipping 18 matching lines...) Expand all
2755 2755
2756 // Reload sp from the top handler, because some statements that 2756 // Reload sp from the top handler, because some statements that
2757 // we break from (eg, for...in) may have left stuff on the 2757 // we break from (eg, for...in) may have left stuff on the
2758 // stack. 2758 // stack.
2759 __ mov(r3, Operand(handler_address)); 2759 __ mov(r3, Operand(handler_address));
2760 __ ldr(sp, MemOperand(r3)); 2760 __ ldr(sp, MemOperand(r3));
2761 frame_->Forget(frame_->height() - handler_height); 2761 frame_->Forget(frame_->height() - handler_height);
2762 2762
2763 // Unlink this handler and drop it from the frame. The next 2763 // Unlink this handler and drop it from the frame. The next
2764 // handler address is currently on top of the frame. 2764 // handler address is currently on top of the frame.
2765 ASSERT(StackHandlerConstants::kNextOffset == 0); 2765 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2766 frame_->EmitPop(r1); 2766 frame_->EmitPop(r1);
2767 __ str(r1, MemOperand(r3)); 2767 __ str(r1, MemOperand(r3));
2768 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 2768 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2769 2769
2770 if (i == kReturnShadowIndex) { 2770 if (i == kReturnShadowIndex) {
2771 // If this label shadowed the function return, materialize the 2771 // If this label shadowed the function return, materialize the
2772 // return value on the stack. 2772 // return value on the stack.
2773 frame_->EmitPush(r0); 2773 frame_->EmitPush(r0);
2774 } else { 2774 } else {
2775 // Fake TOS for targets that shadowed breaks and continues. 2775 // Fake TOS for targets that shadowed breaks and continues.
(...skipping 1398 matching lines...) Expand 10 before | Expand all | Expand 10 after
4174 null.Branch(eq); 4174 null.Branch(eq);
4175 4175
4176 // Check that the object is a JS object but take special care of JS 4176 // Check that the object is a JS object but take special care of JS
4177 // functions to make sure they have 'Function' as their class. 4177 // functions to make sure they have 'Function' as their class.
4178 __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE); 4178 __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE);
4179 null.Branch(lt); 4179 null.Branch(lt);
4180 4180
4181 // As long as JS_FUNCTION_TYPE is the last instance type and it is 4181 // As long as JS_FUNCTION_TYPE is the last instance type and it is
4182 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for 4182 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
4183 // LAST_JS_OBJECT_TYPE. 4183 // LAST_JS_OBJECT_TYPE.
4184 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 4184 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4185 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); 4185 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
4186 __ cmp(r1, Operand(JS_FUNCTION_TYPE)); 4186 __ cmp(r1, Operand(JS_FUNCTION_TYPE));
4187 function.Branch(eq); 4187 function.Branch(eq);
4188 4188
4189 // Check if the constructor in the map is a function. 4189 // Check if the constructor in the map is a function.
4190 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); 4190 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
4191 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 4191 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
4192 non_function_constructor.Branch(ne); 4192 non_function_constructor.Branch(ne);
4193 4193
4194 // The r0 register now contains the constructor function. Grab the 4194 // The r0 register now contains the constructor function. Grab the
4195 // instance class name from there. 4195 // instance class name from there.
(...skipping 925 matching lines...) Expand 10 before | Expand all | Expand 10 after
5121 5121
5122 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX)); 5122 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX));
5123 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset)); 5123 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset));
5124 __ ldr(r1, ContextOperand(r1, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 5124 __ ldr(r1, ContextOperand(r1, Context::JSFUNCTION_RESULT_CACHES_INDEX));
5125 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(cache_id))); 5125 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(cache_id)));
5126 5126
5127 DeferredSearchCache* deferred = new DeferredSearchCache(r0, r1, r2); 5127 DeferredSearchCache* deferred = new DeferredSearchCache(r0, r1, r2);
5128 5128
5129 const int kFingerOffset = 5129 const int kFingerOffset =
5130 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex); 5130 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
5131 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 5131 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
5132 __ ldr(r0, FieldMemOperand(r1, kFingerOffset)); 5132 __ ldr(r0, FieldMemOperand(r1, kFingerOffset));
5133 // r0 now holds finger offset as a smi. 5133 // r0 now holds finger offset as a smi.
5134 __ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 5134 __ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5135 // r3 now points to the start of fixed array elements. 5135 // r3 now points to the start of fixed array elements.
5136 __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex)); 5136 __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
5137 // Note side effect of PreIndex: r3 now points to the key of the pair. 5137 // Note side effect of PreIndex: r3 now points to the key of the pair.
5138 __ cmp(r2, r0); 5138 __ cmp(r2, r0);
5139 deferred->Branch(ne); 5139 deferred->Branch(ne);
5140 5140
5141 __ ldr(r0, MemOperand(r3, kPointerSize)); 5141 __ ldr(r0, MemOperand(r3, kPointerSize));
(...skipping 1791 matching lines...) Expand 10 before | Expand all | Expand 10 after
6933 #else 6933 #else
6934 Register exponent = result2_; 6934 Register exponent = result2_;
6935 Register mantissa = result1_; 6935 Register mantissa = result1_;
6936 #endif 6936 #endif
6937 Label not_special; 6937 Label not_special;
6938 // Convert from Smi to integer. 6938 // Convert from Smi to integer.
6939 __ mov(source_, Operand(source_, ASR, kSmiTagSize)); 6939 __ mov(source_, Operand(source_, ASR, kSmiTagSize));
6940 // Move sign bit from source to destination. This works because the sign bit 6940 // Move sign bit from source to destination. This works because the sign bit
6941 // in the exponent word of the double has the same position and polarity as 6941 // in the exponent word of the double has the same position and polarity as
6942 // the 2's complement sign bit in a Smi. 6942 // the 2's complement sign bit in a Smi.
6943 ASSERT(HeapNumber::kSignMask == 0x80000000u); 6943 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
6944 __ and_(exponent, source_, Operand(HeapNumber::kSignMask), SetCC); 6944 __ and_(exponent, source_, Operand(HeapNumber::kSignMask), SetCC);
6945 // Subtract from 0 if source was negative. 6945 // Subtract from 0 if source was negative.
6946 __ rsb(source_, source_, Operand(0), LeaveCC, ne); 6946 __ rsb(source_, source_, Operand(0), LeaveCC, ne);
6947 6947
6948 // We have -1, 0 or 1, which we treat specially. Register source_ contains 6948 // We have -1, 0 or 1, which we treat specially. Register source_ contains
6949 // absolute value: it is either equal to 1 (special case of -1 and 1), 6949 // absolute value: it is either equal to 1 (special case of -1 and 1),
6950 // greater than 1 (not a special case) or less than 1 (special case of 0). 6950 // greater than 1 (not a special case) or less than 1 (special case of 0).
6951 __ cmp(source_, Operand(1)); 6951 __ cmp(source_, Operand(1));
6952 __ b(gt, &not_special); 6952 __ b(gt, &not_special);
6953 6953
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
6986 __ Ret(); 6986 __ Ret();
6987 } 6987 }
6988 6988
6989 6989
6990 // See comment for class. 6990 // See comment for class.
6991 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { 6991 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
6992 Label max_negative_int; 6992 Label max_negative_int;
6993 // the_int_ has the answer which is a signed int32 but not a Smi. 6993 // the_int_ has the answer which is a signed int32 but not a Smi.
6994 // We test for the special value that has a different exponent. This test 6994 // We test for the special value that has a different exponent. This test
6995 // has the neat side effect of setting the flags according to the sign. 6995 // has the neat side effect of setting the flags according to the sign.
6996 ASSERT(HeapNumber::kSignMask == 0x80000000u); 6996 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
6997 __ cmp(the_int_, Operand(0x80000000u)); 6997 __ cmp(the_int_, Operand(0x80000000u));
6998 __ b(eq, &max_negative_int); 6998 __ b(eq, &max_negative_int);
6999 // Set up the correct exponent in scratch_. All non-Smi int32s have the same. 6999 // Set up the correct exponent in scratch_. All non-Smi int32s have the same.
7000 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). 7000 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased).
7001 uint32_t non_smi_exponent = 7001 uint32_t non_smi_exponent =
7002 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift; 7002 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
7003 __ mov(scratch_, Operand(non_smi_exponent)); 7003 __ mov(scratch_, Operand(non_smi_exponent));
7004 // Set the sign bit in scratch_ if the value was negative. 7004 // Set the sign bit in scratch_ if the value was negative.
7005 __ orr(scratch_, scratch_, Operand(HeapNumber::kSignMask), LeaveCC, cs); 7005 __ orr(scratch_, scratch_, Operand(HeapNumber::kSignMask), LeaveCC, cs);
7006 // Subtract from 0 if the value was negative. 7006 // Subtract from 0 if the value was negative.
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after
7331 // See comment at call site. 7331 // See comment at call site.
7332 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, 7332 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
7333 Register lhs, 7333 Register lhs,
7334 Register rhs) { 7334 Register rhs) {
7335 ASSERT((lhs.is(r0) && rhs.is(r1)) || 7335 ASSERT((lhs.is(r0) && rhs.is(r1)) ||
7336 (lhs.is(r1) && rhs.is(r0))); 7336 (lhs.is(r1) && rhs.is(r0)));
7337 7337
7338 // If either operand is a JSObject or an oddball value, then they are 7338 // If either operand is a JSObject or an oddball value, then they are
7339 // not equal since their pointers are different. 7339 // not equal since their pointers are different.
7340 // There is no test for undetectability in strict equality. 7340 // There is no test for undetectability in strict equality.
7341 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 7341 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
7342 Label first_non_object; 7342 Label first_non_object;
7343 // Get the type of the first operand into r2 and compare it with 7343 // Get the type of the first operand into r2 and compare it with
7344 // FIRST_JS_OBJECT_TYPE. 7344 // FIRST_JS_OBJECT_TYPE.
7345 __ CompareObjectType(rhs, r2, r2, FIRST_JS_OBJECT_TYPE); 7345 __ CompareObjectType(rhs, r2, r2, FIRST_JS_OBJECT_TYPE);
7346 __ b(lt, &first_non_object); 7346 __ b(lt, &first_non_object);
7347 7347
7348 // Return non-zero (r0 is not zero) 7348 // Return non-zero (r0 is not zero)
7349 Label return_not_equal; 7349 Label return_not_equal;
7350 __ bind(&return_not_equal); 7350 __ bind(&return_not_equal);
7351 __ Ret(); 7351 __ Ret();
7352 7352
7353 __ bind(&first_non_object); 7353 __ bind(&first_non_object);
7354 // Check for oddballs: true, false, null, undefined. 7354 // Check for oddballs: true, false, null, undefined.
7355 __ cmp(r2, Operand(ODDBALL_TYPE)); 7355 __ cmp(r2, Operand(ODDBALL_TYPE));
7356 __ b(eq, &return_not_equal); 7356 __ b(eq, &return_not_equal);
7357 7357
7358 __ CompareObjectType(lhs, r3, r3, FIRST_JS_OBJECT_TYPE); 7358 __ CompareObjectType(lhs, r3, r3, FIRST_JS_OBJECT_TYPE);
7359 __ b(ge, &return_not_equal); 7359 __ b(ge, &return_not_equal);
7360 7360
7361 // Check for oddballs: true, false, null, undefined. 7361 // Check for oddballs: true, false, null, undefined.
7362 __ cmp(r3, Operand(ODDBALL_TYPE)); 7362 __ cmp(r3, Operand(ODDBALL_TYPE));
7363 __ b(eq, &return_not_equal); 7363 __ b(eq, &return_not_equal);
7364 7364
7365 // Now that we have the types we might as well check for symbol-symbol. 7365 // Now that we have the types we might as well check for symbol-symbol.
7366 // Ensure that no non-strings have the symbol bit set. 7366 // Ensure that no non-strings have the symbol bit set.
7367 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE); 7367 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
7368 ASSERT(kSymbolTag != 0); 7368 STATIC_ASSERT(kSymbolTag != 0);
7369 __ and_(r2, r2, Operand(r3)); 7369 __ and_(r2, r2, Operand(r3));
7370 __ tst(r2, Operand(kIsSymbolMask)); 7370 __ tst(r2, Operand(kIsSymbolMask));
7371 __ b(ne, &return_not_equal); 7371 __ b(ne, &return_not_equal);
7372 } 7372 }
7373 7373
7374 7374
7375 // See comment at call site. 7375 // See comment at call site.
7376 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm, 7376 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
7377 Register lhs, 7377 Register lhs,
7378 Register rhs, 7378 Register rhs,
(...skipping 30 matching lines...) Expand all
7409 Register lhs, 7409 Register lhs,
7410 Register rhs, 7410 Register rhs,
7411 Label* possible_strings, 7411 Label* possible_strings,
7412 Label* not_both_strings) { 7412 Label* not_both_strings) {
7413 ASSERT((lhs.is(r0) && rhs.is(r1)) || 7413 ASSERT((lhs.is(r0) && rhs.is(r1)) ||
7414 (lhs.is(r1) && rhs.is(r0))); 7414 (lhs.is(r1) && rhs.is(r0)));
7415 7415
7416 // r2 is object type of rhs. 7416 // r2 is object type of rhs.
7417 // Ensure that no non-strings have the symbol bit set. 7417 // Ensure that no non-strings have the symbol bit set.
7418 Label object_test; 7418 Label object_test;
7419 ASSERT(kSymbolTag != 0); 7419 STATIC_ASSERT(kSymbolTag != 0);
7420 __ tst(r2, Operand(kIsNotStringMask)); 7420 __ tst(r2, Operand(kIsNotStringMask));
7421 __ b(ne, &object_test); 7421 __ b(ne, &object_test);
7422 __ tst(r2, Operand(kIsSymbolMask)); 7422 __ tst(r2, Operand(kIsSymbolMask));
7423 __ b(eq, possible_strings); 7423 __ b(eq, possible_strings);
7424 __ CompareObjectType(lhs, r3, r3, FIRST_NONSTRING_TYPE); 7424 __ CompareObjectType(lhs, r3, r3, FIRST_NONSTRING_TYPE);
7425 __ b(ge, not_both_strings); 7425 __ b(ge, not_both_strings);
7426 __ tst(r3, Operand(kIsSymbolMask)); 7426 __ tst(r3, Operand(kIsSymbolMask));
7427 __ b(eq, possible_strings); 7427 __ b(eq, possible_strings);
7428 7428
7429 // Both are symbols. We already checked they weren't the same pointer 7429 // Both are symbols. We already checked they weren't the same pointer
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
7480 if (!object_is_smi) { 7480 if (!object_is_smi) {
7481 __ BranchOnSmi(object, &is_smi); 7481 __ BranchOnSmi(object, &is_smi);
7482 if (CpuFeatures::IsSupported(VFP3)) { 7482 if (CpuFeatures::IsSupported(VFP3)) {
7483 CpuFeatures::Scope scope(VFP3); 7483 CpuFeatures::Scope scope(VFP3);
7484 __ CheckMap(object, 7484 __ CheckMap(object,
7485 scratch1, 7485 scratch1,
7486 Heap::kHeapNumberMapRootIndex, 7486 Heap::kHeapNumberMapRootIndex,
7487 not_found, 7487 not_found,
7488 true); 7488 true);
7489 7489
7490 ASSERT_EQ(8, kDoubleSize); 7490 STATIC_ASSERT(8 == kDoubleSize);
7491 __ add(scratch1, 7491 __ add(scratch1,
7492 object, 7492 object,
7493 Operand(HeapNumber::kValueOffset - kHeapObjectTag)); 7493 Operand(HeapNumber::kValueOffset - kHeapObjectTag));
7494 __ ldm(ia, scratch1, scratch1.bit() | scratch2.bit()); 7494 __ ldm(ia, scratch1, scratch1.bit() | scratch2.bit());
7495 __ eor(scratch1, scratch1, Operand(scratch2)); 7495 __ eor(scratch1, scratch1, Operand(scratch2));
7496 __ and_(scratch1, scratch1, Operand(mask)); 7496 __ and_(scratch1, scratch1, Operand(mask));
7497 7497
7498 // Calculate address of entry in string cache: each entry consists 7498 // Calculate address of entry in string cache: each entry consists
7499 // of two pointer sized fields. 7499 // of two pointer sized fields.
7500 __ add(scratch1, 7500 __ add(scratch1,
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
7579 7579
7580 // NOTICE! This code is only reached after a smi-fast-case check, so 7580 // NOTICE! This code is only reached after a smi-fast-case check, so
7581 // it is certain that at least one operand isn't a smi. 7581 // it is certain that at least one operand isn't a smi.
7582 7582
7583 // Handle the case where the objects are identical. Either returns the answer 7583 // Handle the case where the objects are identical. Either returns the answer
7584 // or goes to slow. Only falls through if the objects were not identical. 7584 // or goes to slow. Only falls through if the objects were not identical.
7585 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_); 7585 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_);
7586 7586
7587 // If either is a Smi (we know that not both are), then they can only 7587 // If either is a Smi (we know that not both are), then they can only
7588 // be strictly equal if the other is a HeapNumber. 7588 // be strictly equal if the other is a HeapNumber.
7589 ASSERT_EQ(0, kSmiTag); 7589 STATIC_ASSERT(kSmiTag == 0);
7590 ASSERT_EQ(0, Smi::FromInt(0)); 7590 ASSERT_EQ(0, Smi::FromInt(0));
7591 __ and_(r2, lhs_, Operand(rhs_)); 7591 __ and_(r2, lhs_, Operand(rhs_));
7592 __ tst(r2, Operand(kSmiTagMask)); 7592 __ tst(r2, Operand(kSmiTagMask));
7593 __ b(ne, &not_smis); 7593 __ b(ne, &not_smis);
7594 // One operand is a smi. EmitSmiNonsmiComparison generates code that can: 7594 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
7595 // 1) Return the answer. 7595 // 1) Return the answer.
7596 // 2) Go to slow. 7596 // 2) Go to slow.
7597 // 3) Fall through to both_loaded_as_doubles. 7597 // 3) Fall through to both_loaded_as_doubles.
7598 // 4) Jump to lhs_not_nan. 7598 // 4) Jump to lhs_not_nan.
7599 // In cases 3 and 4 we have found out we were dealing with a number-number 7599 // In cases 3 and 4 we have found out we were dealing with a number-number
(...skipping 982 matching lines...) Expand 10 before | Expand all | Expand 10 after
8582 // smi_test_reg to tell us that. 8582 // smi_test_reg to tell us that.
8583 if (ShouldGenerateSmiCode()) { 8583 if (ShouldGenerateSmiCode()) {
8584 __ orr(smi_test_reg, lhs, Operand(rhs)); 8584 __ orr(smi_test_reg, lhs, Operand(rhs));
8585 } 8585 }
8586 8586
8587 switch (op_) { 8587 switch (op_) {
8588 case Token::ADD: { 8588 case Token::ADD: {
8589 Label not_smi; 8589 Label not_smi;
8590 // Fast path. 8590 // Fast path.
8591 if (ShouldGenerateSmiCode()) { 8591 if (ShouldGenerateSmiCode()) {
8592 ASSERT(kSmiTag == 0); // Adjust code below. 8592 STATIC_ASSERT(kSmiTag == 0); // Adjust code below.
8593 __ tst(smi_test_reg, Operand(kSmiTagMask)); 8593 __ tst(smi_test_reg, Operand(kSmiTagMask));
8594 __ b(ne, &not_smi); 8594 __ b(ne, &not_smi);
8595 __ add(r0, r1, Operand(r0), SetCC); // Add y optimistically. 8595 __ add(r0, r1, Operand(r0), SetCC); // Add y optimistically.
8596 // Return if no overflow. 8596 // Return if no overflow.
8597 __ Ret(vc); 8597 __ Ret(vc);
8598 __ sub(r0, r0, Operand(r1)); // Revert optimistic add. 8598 __ sub(r0, r0, Operand(r1)); // Revert optimistic add.
8599 } 8599 }
8600 HandleBinaryOpSlowCases(masm, &not_smi, lhs, rhs, Builtins::ADD); 8600 HandleBinaryOpSlowCases(masm, &not_smi, lhs, rhs, Builtins::ADD);
8601 break; 8601 break;
8602 } 8602 }
8603 8603
8604 case Token::SUB: { 8604 case Token::SUB: {
8605 Label not_smi; 8605 Label not_smi;
8606 // Fast path. 8606 // Fast path.
8607 if (ShouldGenerateSmiCode()) { 8607 if (ShouldGenerateSmiCode()) {
8608 ASSERT(kSmiTag == 0); // Adjust code below. 8608 STATIC_ASSERT(kSmiTag == 0); // Adjust code below.
8609 __ tst(smi_test_reg, Operand(kSmiTagMask)); 8609 __ tst(smi_test_reg, Operand(kSmiTagMask));
8610 __ b(ne, &not_smi); 8610 __ b(ne, &not_smi);
8611 if (lhs.is(r1)) { 8611 if (lhs.is(r1)) {
8612 __ sub(r0, r1, Operand(r0), SetCC); // Subtract y optimistically. 8612 __ sub(r0, r1, Operand(r0), SetCC); // Subtract y optimistically.
8613 // Return if no overflow. 8613 // Return if no overflow.
8614 __ Ret(vc); 8614 __ Ret(vc);
8615 __ sub(r0, r1, Operand(r0)); // Revert optimistic subtract. 8615 __ sub(r0, r1, Operand(r0)); // Revert optimistic subtract.
8616 } else { 8616 } else {
8617 __ sub(r0, r0, Operand(r1), SetCC); // Subtract y optimistically. 8617 __ sub(r0, r0, Operand(r1), SetCC); // Subtract y optimistically.
8618 // Return if no overflow. 8618 // Return if no overflow.
8619 __ Ret(vc); 8619 __ Ret(vc);
8620 __ add(r0, r0, Operand(r1)); // Revert optimistic subtract. 8620 __ add(r0, r0, Operand(r1)); // Revert optimistic subtract.
8621 } 8621 }
8622 } 8622 }
8623 HandleBinaryOpSlowCases(masm, &not_smi, lhs, rhs, Builtins::SUB); 8623 HandleBinaryOpSlowCases(masm, &not_smi, lhs, rhs, Builtins::SUB);
8624 break; 8624 break;
8625 } 8625 }
8626 8626
8627 case Token::MUL: { 8627 case Token::MUL: {
8628 Label not_smi, slow; 8628 Label not_smi, slow;
8629 if (ShouldGenerateSmiCode()) { 8629 if (ShouldGenerateSmiCode()) {
8630 ASSERT(kSmiTag == 0); // adjust code below 8630 STATIC_ASSERT(kSmiTag == 0); // adjust code below
8631 __ tst(smi_test_reg, Operand(kSmiTagMask)); 8631 __ tst(smi_test_reg, Operand(kSmiTagMask));
8632 Register scratch2 = smi_test_reg; 8632 Register scratch2 = smi_test_reg;
8633 smi_test_reg = no_reg; 8633 smi_test_reg = no_reg;
8634 __ b(ne, &not_smi); 8634 __ b(ne, &not_smi);
8635 // Remove tag from one operand (but keep sign), so that result is Smi. 8635 // Remove tag from one operand (but keep sign), so that result is Smi.
8636 __ mov(ip, Operand(rhs, ASR, kSmiTagSize)); 8636 __ mov(ip, Operand(rhs, ASR, kSmiTagSize));
8637 // Do multiplication 8637 // Do multiplication
8638 // scratch = lower 32 bits of ip * lhs. 8638 // scratch = lower 32 bits of ip * lhs.
8639 __ smull(scratch, scratch2, lhs, ip); 8639 __ smull(scratch, scratch2, lhs, ip);
8640 // Go slow on overflows (overflow bit is not set). 8640 // Go slow on overflows (overflow bit is not set).
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
8756 // support for modulus checking for smis makes sense. We can handle 8756 // support for modulus checking for smis makes sense. We can handle
8757 // 1 to 25 times any power of 2. This covers over half the numbers from 8757 // 1 to 25 times any power of 2. This covers over half the numbers from
8758 // 1 to 100 including all of the first 25. (Actually the constants < 10 8758 // 1 to 100 including all of the first 25. (Actually the constants < 10
8759 // are handled above by reciprocal multiplication. We only get here for 8759 // are handled above by reciprocal multiplication. We only get here for
8760 // those cases if the right hand side is not a constant or for cases 8760 // those cases if the right hand side is not a constant or for cases
8761 // like 192 which is 3*2^6 and ends up in the 3 case in the integer mod 8761 // like 192 which is 3*2^6 and ends up in the 3 case in the integer mod
8762 // stub.) 8762 // stub.)
8763 Label slow; 8763 Label slow;
8764 Label not_power_of_2; 8764 Label not_power_of_2;
8765 ASSERT(!ShouldGenerateSmiCode()); 8765 ASSERT(!ShouldGenerateSmiCode());
8766 ASSERT(kSmiTag == 0); // Adjust code below. 8766 STATIC_ASSERT(kSmiTag == 0); // Adjust code below.
8767 // Check for two positive smis. 8767 // Check for two positive smis.
8768 __ orr(smi_test_reg, lhs, Operand(rhs)); 8768 __ orr(smi_test_reg, lhs, Operand(rhs));
8769 __ tst(smi_test_reg, Operand(0x80000000u | kSmiTagMask)); 8769 __ tst(smi_test_reg, Operand(0x80000000u | kSmiTagMask));
8770 __ b(ne, &slow); 8770 __ b(ne, &slow);
8771 // Check that rhs is a power of two and not zero. 8771 // Check that rhs is a power of two and not zero.
8772 Register mask_bits = r3; 8772 Register mask_bits = r3;
8773 __ sub(scratch, rhs, Operand(1), SetCC); 8773 __ sub(scratch, rhs, Operand(1), SetCC);
8774 __ b(mi, &slow); 8774 __ b(mi, &slow);
8775 __ and_(mask_bits, rhs, Operand(scratch), SetCC); 8775 __ and_(mask_bits, rhs, Operand(scratch), SetCC);
8776 __ b(ne, &not_power_of_2); 8776 __ b(ne, &not_power_of_2);
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
8816 break; 8816 break;
8817 } 8817 }
8818 8818
8819 case Token::BIT_OR: 8819 case Token::BIT_OR:
8820 case Token::BIT_AND: 8820 case Token::BIT_AND:
8821 case Token::BIT_XOR: 8821 case Token::BIT_XOR:
8822 case Token::SAR: 8822 case Token::SAR:
8823 case Token::SHR: 8823 case Token::SHR:
8824 case Token::SHL: { 8824 case Token::SHL: {
8825 Label slow; 8825 Label slow;
8826 ASSERT(kSmiTag == 0); // adjust code below 8826 STATIC_ASSERT(kSmiTag == 0); // adjust code below
8827 __ tst(smi_test_reg, Operand(kSmiTagMask)); 8827 __ tst(smi_test_reg, Operand(kSmiTagMask));
8828 __ b(ne, &slow); 8828 __ b(ne, &slow);
8829 Register scratch2 = smi_test_reg; 8829 Register scratch2 = smi_test_reg;
8830 smi_test_reg = no_reg; 8830 smi_test_reg = no_reg;
8831 switch (op_) { 8831 switch (op_) {
8832 case Token::BIT_OR: __ orr(result, rhs, Operand(lhs)); break; 8832 case Token::BIT_OR: __ orr(result, rhs, Operand(lhs)); break;
8833 case Token::BIT_AND: __ and_(result, rhs, Operand(lhs)); break; 8833 case Token::BIT_AND: __ and_(result, rhs, Operand(lhs)); break;
8834 case Token::BIT_XOR: __ eor(result, rhs, Operand(lhs)); break; 8834 case Token::BIT_XOR: __ eor(result, rhs, Operand(lhs)); break;
8835 case Token::SAR: 8835 case Token::SAR:
8836 // Remove tags from right operand. 8836 // Remove tags from right operand.
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
9130 default: 9130 default:
9131 UNREACHABLE(); 9131 UNREACHABLE();
9132 } 9132 }
9133 } 9133 }
9134 9134
9135 9135
9136 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { 9136 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
9137 // r0 holds the exception. 9137 // r0 holds the exception.
9138 9138
9139 // Adjust this code if not the case. 9139 // Adjust this code if not the case.
9140 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); 9140 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
9141 9141
9142 // Drop the sp to the top of the handler. 9142 // Drop the sp to the top of the handler.
9143 __ mov(r3, Operand(ExternalReference(Top::k_handler_address))); 9143 __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
9144 __ ldr(sp, MemOperand(r3)); 9144 __ ldr(sp, MemOperand(r3));
9145 9145
9146 // Restore the next handler and frame pointer, discard handler state. 9146 // Restore the next handler and frame pointer, discard handler state.
9147 ASSERT(StackHandlerConstants::kNextOffset == 0); 9147 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
9148 __ pop(r2); 9148 __ pop(r2);
9149 __ str(r2, MemOperand(r3)); 9149 __ str(r2, MemOperand(r3));
9150 ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); 9150 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
9151 __ ldm(ia_w, sp, r3.bit() | fp.bit()); // r3: discarded state. 9151 __ ldm(ia_w, sp, r3.bit() | fp.bit()); // r3: discarded state.
9152 9152
9153 // Before returning we restore the context from the frame pointer if 9153 // Before returning we restore the context from the frame pointer if
9154 // not NULL. The frame pointer is NULL in the exception handler of a 9154 // not NULL. The frame pointer is NULL in the exception handler of a
9155 // JS entry frame. 9155 // JS entry frame.
9156 __ cmp(fp, Operand(0)); 9156 __ cmp(fp, Operand(0));
9157 // Set cp to NULL if fp is NULL. 9157 // Set cp to NULL if fp is NULL.
9158 __ mov(cp, Operand(0), LeaveCC, eq); 9158 __ mov(cp, Operand(0), LeaveCC, eq);
9159 // Restore cp otherwise. 9159 // Restore cp otherwise.
9160 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); 9160 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
9161 #ifdef DEBUG 9161 #ifdef DEBUG
9162 if (FLAG_debug_code) { 9162 if (FLAG_debug_code) {
9163 __ mov(lr, Operand(pc)); 9163 __ mov(lr, Operand(pc));
9164 } 9164 }
9165 #endif 9165 #endif
9166 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); 9166 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
9167 __ pop(pc); 9167 __ pop(pc);
9168 } 9168 }
9169 9169
9170 9170
9171 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, 9171 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
9172 UncatchableExceptionType type) { 9172 UncatchableExceptionType type) {
9173 // Adjust this code if not the case. 9173 // Adjust this code if not the case.
9174 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); 9174 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
9175 9175
9176 // Drop sp to the top stack handler. 9176 // Drop sp to the top stack handler.
9177 __ mov(r3, Operand(ExternalReference(Top::k_handler_address))); 9177 __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
9178 __ ldr(sp, MemOperand(r3)); 9178 __ ldr(sp, MemOperand(r3));
9179 9179
9180 // Unwind the handlers until the ENTRY handler is found. 9180 // Unwind the handlers until the ENTRY handler is found.
9181 Label loop, done; 9181 Label loop, done;
9182 __ bind(&loop); 9182 __ bind(&loop);
9183 // Load the type of the current stack handler. 9183 // Load the type of the current stack handler.
9184 const int kStateOffset = StackHandlerConstants::kStateOffset; 9184 const int kStateOffset = StackHandlerConstants::kStateOffset;
9185 __ ldr(r2, MemOperand(sp, kStateOffset)); 9185 __ ldr(r2, MemOperand(sp, kStateOffset));
9186 __ cmp(r2, Operand(StackHandler::ENTRY)); 9186 __ cmp(r2, Operand(StackHandler::ENTRY));
9187 __ b(eq, &done); 9187 __ b(eq, &done);
9188 // Fetch the next handler in the list. 9188 // Fetch the next handler in the list.
9189 const int kNextOffset = StackHandlerConstants::kNextOffset; 9189 const int kNextOffset = StackHandlerConstants::kNextOffset;
9190 __ ldr(sp, MemOperand(sp, kNextOffset)); 9190 __ ldr(sp, MemOperand(sp, kNextOffset));
9191 __ jmp(&loop); 9191 __ jmp(&loop);
9192 __ bind(&done); 9192 __ bind(&done);
9193 9193
9194 // Set the top handler address to next handler past the current ENTRY handler. 9194 // Set the top handler address to next handler past the current ENTRY handler.
9195 ASSERT(StackHandlerConstants::kNextOffset == 0); 9195 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
9196 __ pop(r2); 9196 __ pop(r2);
9197 __ str(r2, MemOperand(r3)); 9197 __ str(r2, MemOperand(r3));
9198 9198
9199 if (type == OUT_OF_MEMORY) { 9199 if (type == OUT_OF_MEMORY) {
9200 // Set external caught exception to false. 9200 // Set external caught exception to false.
9201 ExternalReference external_caught(Top::k_external_caught_exception_address); 9201 ExternalReference external_caught(Top::k_external_caught_exception_address);
9202 __ mov(r0, Operand(false)); 9202 __ mov(r0, Operand(false));
9203 __ mov(r2, Operand(external_caught)); 9203 __ mov(r2, Operand(external_caught));
9204 __ str(r0, MemOperand(r2)); 9204 __ str(r0, MemOperand(r2));
9205 9205
9206 // Set pending exception and r0 to out of memory exception. 9206 // Set pending exception and r0 to out of memory exception.
9207 Failure* out_of_memory = Failure::OutOfMemoryException(); 9207 Failure* out_of_memory = Failure::OutOfMemoryException();
9208 __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory))); 9208 __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
9209 __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address))); 9209 __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
9210 __ str(r0, MemOperand(r2)); 9210 __ str(r0, MemOperand(r2));
9211 } 9211 }
9212 9212
9213 // Stack layout at this point. See also StackHandlerConstants. 9213 // Stack layout at this point. See also StackHandlerConstants.
9214 // sp -> state (ENTRY) 9214 // sp -> state (ENTRY)
9215 // fp 9215 // fp
9216 // lr 9216 // lr
9217 9217
9218 // Discard handler state (r2 is not used) and restore frame pointer. 9218 // Discard handler state (r2 is not used) and restore frame pointer.
9219 ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); 9219 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
9220 __ ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state. 9220 __ ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state.
9221 // Before returning we restore the context from the frame pointer if 9221 // Before returning we restore the context from the frame pointer if
9222 // not NULL. The frame pointer is NULL in the exception handler of a 9222 // not NULL. The frame pointer is NULL in the exception handler of a
9223 // JS entry frame. 9223 // JS entry frame.
9224 __ cmp(fp, Operand(0)); 9224 __ cmp(fp, Operand(0));
9225 // Set cp to NULL if fp is NULL. 9225 // Set cp to NULL if fp is NULL.
9226 __ mov(cp, Operand(0), LeaveCC, eq); 9226 __ mov(cp, Operand(0), LeaveCC, eq);
9227 // Restore cp otherwise. 9227 // Restore cp otherwise.
9228 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); 9228 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
9229 #ifdef DEBUG 9229 #ifdef DEBUG
9230 if (FLAG_debug_code) { 9230 if (FLAG_debug_code) {
9231 __ mov(lr, Operand(pc)); 9231 __ mov(lr, Operand(pc));
9232 } 9232 }
9233 #endif 9233 #endif
9234 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); 9234 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
9235 __ pop(pc); 9235 __ pop(pc);
9236 } 9236 }
9237 9237
9238 9238
9239 void CEntryStub::GenerateCore(MacroAssembler* masm, 9239 void CEntryStub::GenerateCore(MacroAssembler* masm,
9240 Label* throw_normal_exception, 9240 Label* throw_normal_exception,
9241 Label* throw_termination_exception, 9241 Label* throw_termination_exception,
9242 Label* throw_out_of_memory_exception, 9242 Label* throw_out_of_memory_exception,
9243 bool do_gc, 9243 bool do_gc,
9244 bool always_allocate, 9244 bool always_allocate,
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
9319 // It's okay to clobber r2 and r3 here. Don't mess with r0 and r1 9319 // It's okay to clobber r2 and r3 here. Don't mess with r0 and r1
9320 // though (contain the result). 9320 // though (contain the result).
9321 __ mov(r2, Operand(scope_depth)); 9321 __ mov(r2, Operand(scope_depth));
9322 __ ldr(r3, MemOperand(r2)); 9322 __ ldr(r3, MemOperand(r2));
9323 __ sub(r3, r3, Operand(1)); 9323 __ sub(r3, r3, Operand(1));
9324 __ str(r3, MemOperand(r2)); 9324 __ str(r3, MemOperand(r2));
9325 } 9325 }
9326 9326
9327 // check for failure result 9327 // check for failure result
9328 Label failure_returned; 9328 Label failure_returned;
9329 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); 9329 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
9330 // Lower 2 bits of r2 are 0 iff r0 has failure tag. 9330 // Lower 2 bits of r2 are 0 iff r0 has failure tag.
9331 __ add(r2, r0, Operand(1)); 9331 __ add(r2, r0, Operand(1));
9332 __ tst(r2, Operand(kFailureTagMask)); 9332 __ tst(r2, Operand(kFailureTagMask));
9333 __ b(eq, &failure_returned); 9333 __ b(eq, &failure_returned);
9334 9334
9335 // Exit C frame and return. 9335 // Exit C frame and return.
9336 // r0:r1: result 9336 // r0:r1: result
9337 // sp: stack pointer 9337 // sp: stack pointer
9338 // fp: frame pointer 9338 // fp: frame pointer
9339 __ LeaveExitFrame(mode_); 9339 __ LeaveExitFrame(mode_);
9340 9340
9341 // check if we should retry or throw exception 9341 // check if we should retry or throw exception
9342 Label retry; 9342 Label retry;
9343 __ bind(&failure_returned); 9343 __ bind(&failure_returned);
9344 ASSERT(Failure::RETRY_AFTER_GC == 0); 9344 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
9345 __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); 9345 __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
9346 __ b(eq, &retry); 9346 __ b(eq, &retry);
9347 9347
9348 // Special handling of out of memory exceptions. 9348 // Special handling of out of memory exceptions.
9349 Failure* out_of_memory = Failure::OutOfMemoryException(); 9349 Failure* out_of_memory = Failure::OutOfMemoryException();
9350 __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory))); 9350 __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
9351 __ b(eq, throw_out_of_memory_exception); 9351 __ b(eq, throw_out_of_memory_exception);
9352 9352
9353 // Retrieve the pending exception and clear the variable. 9353 // Retrieve the pending exception and clear the variable.
9354 __ mov(ip, Operand(ExternalReference::the_hole_value_location())); 9354 __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
(...skipping 382 matching lines...) Expand 10 before | Expand all | Expand 10 after
9737 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset)); 9737 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset));
9738 __ ldr(r4, MemOperand(r4, offset)); 9738 __ ldr(r4, MemOperand(r4, offset));
9739 9739
9740 // Copy the JS object part. 9740 // Copy the JS object part.
9741 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 9741 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
9742 __ ldr(r3, FieldMemOperand(r4, i)); 9742 __ ldr(r3, FieldMemOperand(r4, i));
9743 __ str(r3, FieldMemOperand(r0, i)); 9743 __ str(r3, FieldMemOperand(r0, i));
9744 } 9744 }
9745 9745
9746 // Setup the callee in-object property. 9746 // Setup the callee in-object property.
9747 ASSERT(Heap::arguments_callee_index == 0); 9747 STATIC_ASSERT(Heap::arguments_callee_index == 0);
9748 __ ldr(r3, MemOperand(sp, 2 * kPointerSize)); 9748 __ ldr(r3, MemOperand(sp, 2 * kPointerSize));
9749 __ str(r3, FieldMemOperand(r0, JSObject::kHeaderSize)); 9749 __ str(r3, FieldMemOperand(r0, JSObject::kHeaderSize));
9750 9750
9751 // Get the length (smi tagged) and set that as an in-object property too. 9751 // Get the length (smi tagged) and set that as an in-object property too.
9752 ASSERT(Heap::arguments_length_index == 1); 9752 STATIC_ASSERT(Heap::arguments_length_index == 1);
9753 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); 9753 __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
9754 __ str(r1, FieldMemOperand(r0, JSObject::kHeaderSize + kPointerSize)); 9754 __ str(r1, FieldMemOperand(r0, JSObject::kHeaderSize + kPointerSize));
9755 9755
9756 // If there are no actual arguments, we're done. 9756 // If there are no actual arguments, we're done.
9757 Label done; 9757 Label done;
9758 __ cmp(r1, Operand(0)); 9758 __ cmp(r1, Operand(0));
9759 __ b(eq, &done); 9759 __ b(eq, &done);
9760 9760
9761 // Get the parameters pointer from the stack. 9761 // Get the parameters pointer from the stack.
9762 __ ldr(r2, MemOperand(sp, 1 * kPointerSize)); 9762 __ ldr(r2, MemOperand(sp, 1 * kPointerSize));
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
9834 ExternalReference::address_of_regexp_stack_memory_address(); 9834 ExternalReference::address_of_regexp_stack_memory_address();
9835 ExternalReference address_of_regexp_stack_memory_size = 9835 ExternalReference address_of_regexp_stack_memory_size =
9836 ExternalReference::address_of_regexp_stack_memory_size(); 9836 ExternalReference::address_of_regexp_stack_memory_size();
9837 __ mov(r0, Operand(address_of_regexp_stack_memory_size)); 9837 __ mov(r0, Operand(address_of_regexp_stack_memory_size));
9838 __ ldr(r0, MemOperand(r0, 0)); 9838 __ ldr(r0, MemOperand(r0, 0));
9839 __ tst(r0, Operand(r0)); 9839 __ tst(r0, Operand(r0));
9840 __ b(eq, &runtime); 9840 __ b(eq, &runtime);
9841 9841
9842 // Check that the first argument is a JSRegExp object. 9842 // Check that the first argument is a JSRegExp object.
9843 __ ldr(r0, MemOperand(sp, kJSRegExpOffset)); 9843 __ ldr(r0, MemOperand(sp, kJSRegExpOffset));
9844 ASSERT_EQ(0, kSmiTag); 9844 STATIC_ASSERT(kSmiTag == 0);
9845 __ tst(r0, Operand(kSmiTagMask)); 9845 __ tst(r0, Operand(kSmiTagMask));
9846 __ b(eq, &runtime); 9846 __ b(eq, &runtime);
9847 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); 9847 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
9848 __ b(ne, &runtime); 9848 __ b(ne, &runtime);
9849 9849
9850 // Check that the RegExp has been compiled (data contains a fixed array). 9850 // Check that the RegExp has been compiled (data contains a fixed array).
9851 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset)); 9851 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset));
9852 if (FLAG_debug_code) { 9852 if (FLAG_debug_code) {
9853 __ tst(regexp_data, Operand(kSmiTagMask)); 9853 __ tst(regexp_data, Operand(kSmiTagMask));
9854 __ Check(nz, "Unexpected type for RegExp data, FixedArray expected"); 9854 __ Check(nz, "Unexpected type for RegExp data, FixedArray expected");
9855 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE); 9855 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE);
9856 __ Check(eq, "Unexpected type for RegExp data, FixedArray expected"); 9856 __ Check(eq, "Unexpected type for RegExp data, FixedArray expected");
9857 } 9857 }
9858 9858
9859 // regexp_data: RegExp data (FixedArray) 9859 // regexp_data: RegExp data (FixedArray)
9860 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. 9860 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
9861 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); 9861 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
9862 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); 9862 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
9863 __ b(ne, &runtime); 9863 __ b(ne, &runtime);
9864 9864
9865 // regexp_data: RegExp data (FixedArray) 9865 // regexp_data: RegExp data (FixedArray)
9866 // Check that the number of captures fit in the static offsets vector buffer. 9866 // Check that the number of captures fit in the static offsets vector buffer.
9867 __ ldr(r2, 9867 __ ldr(r2,
9868 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset)); 9868 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
9869 // Calculate number of capture registers (number_of_captures + 1) * 2. This 9869 // Calculate number of capture registers (number_of_captures + 1) * 2. This
9870 // uses the asumption that smis are 2 * their untagged value. 9870 // uses the asumption that smis are 2 * their untagged value.
9871 ASSERT_EQ(0, kSmiTag); 9871 STATIC_ASSERT(kSmiTag == 0);
9872 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 9872 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
9873 __ add(r2, r2, Operand(2)); // r2 was a smi. 9873 __ add(r2, r2, Operand(2)); // r2 was a smi.
9874 // Check that the static offsets vector buffer is large enough. 9874 // Check that the static offsets vector buffer is large enough.
9875 __ cmp(r2, Operand(OffsetsVector::kStaticOffsetsVectorSize)); 9875 __ cmp(r2, Operand(OffsetsVector::kStaticOffsetsVectorSize));
9876 __ b(hi, &runtime); 9876 __ b(hi, &runtime);
9877 9877
9878 // r2: Number of capture registers 9878 // r2: Number of capture registers
9879 // regexp_data: RegExp data (FixedArray) 9879 // regexp_data: RegExp data (FixedArray)
9880 // Check that the second argument is a string. 9880 // Check that the second argument is a string.
9881 __ ldr(subject, MemOperand(sp, kSubjectOffset)); 9881 __ ldr(subject, MemOperand(sp, kSubjectOffset));
9882 __ tst(subject, Operand(kSmiTagMask)); 9882 __ tst(subject, Operand(kSmiTagMask));
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
9923 __ b(gt, &runtime); 9923 __ b(gt, &runtime);
9924 9924
9925 // subject: Subject string 9925 // subject: Subject string
9926 // regexp_data: RegExp data (FixedArray) 9926 // regexp_data: RegExp data (FixedArray)
9927 // Check the representation and encoding of the subject string. 9927 // Check the representation and encoding of the subject string.
9928 Label seq_string; 9928 Label seq_string;
9929 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); 9929 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
9930 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); 9930 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
9931 // First check for flat string. 9931 // First check for flat string.
9932 __ tst(r0, Operand(kIsNotStringMask | kStringRepresentationMask)); 9932 __ tst(r0, Operand(kIsNotStringMask | kStringRepresentationMask));
9933 ASSERT_EQ(0, kStringTag | kSeqStringTag); 9933 STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
9934 __ b(eq, &seq_string); 9934 __ b(eq, &seq_string);
9935 9935
9936 // subject: Subject string 9936 // subject: Subject string
9937 // regexp_data: RegExp data (FixedArray) 9937 // regexp_data: RegExp data (FixedArray)
9938 // Check for flat cons string. 9938 // Check for flat cons string.
9939 // A flat cons string is a cons string where the second part is the empty 9939 // A flat cons string is a cons string where the second part is the empty
9940 // string. In that case the subject string is just the first part of the cons 9940 // string. In that case the subject string is just the first part of the cons
9941 // string. Also in this case the first part of the cons string is known to be 9941 // string. Also in this case the first part of the cons string is known to be
9942 // a sequential string or an external string. 9942 // a sequential string or an external string.
9943 ASSERT(kExternalStringTag !=0); 9943 STATIC_ASSERT(kExternalStringTag !=0);
9944 ASSERT_EQ(0, kConsStringTag & kExternalStringTag); 9944 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
9945 __ tst(r0, Operand(kIsNotStringMask | kExternalStringTag)); 9945 __ tst(r0, Operand(kIsNotStringMask | kExternalStringTag));
9946 __ b(ne, &runtime); 9946 __ b(ne, &runtime);
9947 __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset)); 9947 __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset));
9948 __ LoadRoot(r1, Heap::kEmptyStringRootIndex); 9948 __ LoadRoot(r1, Heap::kEmptyStringRootIndex);
9949 __ cmp(r0, r1); 9949 __ cmp(r0, r1);
9950 __ b(ne, &runtime); 9950 __ b(ne, &runtime);
9951 __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); 9951 __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
9952 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); 9952 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
9953 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); 9953 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
9954 // Is first part a flat string? 9954 // Is first part a flat string?
9955 ASSERT_EQ(0, kSeqStringTag); 9955 STATIC_ASSERT(kSeqStringTag == 0);
9956 __ tst(r0, Operand(kStringRepresentationMask)); 9956 __ tst(r0, Operand(kStringRepresentationMask));
9957 __ b(nz, &runtime); 9957 __ b(nz, &runtime);
9958 9958
9959 __ bind(&seq_string); 9959 __ bind(&seq_string);
9960 // subject: Subject string 9960 // subject: Subject string
9961 // regexp_data: RegExp data (FixedArray) 9961 // regexp_data: RegExp data (FixedArray)
9962 // r0: Instance type of subject string 9962 // r0: Instance type of subject string
9963 ASSERT_EQ(4, kAsciiStringTag); 9963 STATIC_ASSERT(4 == kAsciiStringTag);
9964 ASSERT_EQ(0, kTwoByteStringTag); 9964 STATIC_ASSERT(kTwoByteStringTag == 0);
9965 // Find the code object based on the assumptions above. 9965 // Find the code object based on the assumptions above.
9966 __ and_(r0, r0, Operand(kStringEncodingMask)); 9966 __ and_(r0, r0, Operand(kStringEncodingMask));
9967 __ mov(r3, Operand(r0, ASR, 2), SetCC); 9967 __ mov(r3, Operand(r0, ASR, 2), SetCC);
9968 __ ldr(r7, FieldMemOperand(regexp_data, JSRegExp::kDataAsciiCodeOffset), ne); 9968 __ ldr(r7, FieldMemOperand(regexp_data, JSRegExp::kDataAsciiCodeOffset), ne);
9969 __ ldr(r7, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset), eq); 9969 __ ldr(r7, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset), eq);
9970 9970
9971 // Check that the irregexp code has been generated for the actual string 9971 // Check that the irregexp code has been generated for the actual string
9972 // encoding. If it has, the field contains a code object otherwise it contains 9972 // encoding. If it has, the field contains a code object otherwise it contains
9973 // the hole. 9973 // the hole.
9974 __ CompareObjectType(r7, r0, r0, CODE_TYPE); 9974 __ CompareObjectType(r7, r0, r0, CODE_TYPE);
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
10008 __ str(r0, MemOperand(sp, 1 * kPointerSize)); 10008 __ str(r0, MemOperand(sp, 1 * kPointerSize));
10009 10009
10010 // Argument 5 (sp[0]): static offsets vector buffer. 10010 // Argument 5 (sp[0]): static offsets vector buffer.
10011 __ mov(r0, Operand(ExternalReference::address_of_static_offsets_vector())); 10011 __ mov(r0, Operand(ExternalReference::address_of_static_offsets_vector()));
10012 __ str(r0, MemOperand(sp, 0 * kPointerSize)); 10012 __ str(r0, MemOperand(sp, 0 * kPointerSize));
10013 10013
10014 // For arguments 4 and 3 get string length, calculate start of string data and 10014 // For arguments 4 and 3 get string length, calculate start of string data and
10015 // calculate the shift of the index (0 for ASCII and 1 for two byte). 10015 // calculate the shift of the index (0 for ASCII and 1 for two byte).
10016 __ ldr(r0, FieldMemOperand(subject, String::kLengthOffset)); 10016 __ ldr(r0, FieldMemOperand(subject, String::kLengthOffset));
10017 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); 10017 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
10018 ASSERT_EQ(SeqAsciiString::kHeaderSize, SeqTwoByteString::kHeaderSize); 10018 STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
10019 __ add(r9, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 10019 __ add(r9, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10020 __ eor(r3, r3, Operand(1)); 10020 __ eor(r3, r3, Operand(1));
10021 // Argument 4 (r3): End of string data 10021 // Argument 4 (r3): End of string data
10022 // Argument 3 (r2): Start of string data 10022 // Argument 3 (r2): Start of string data
10023 __ add(r2, r9, Operand(r1, LSL, r3)); 10023 __ add(r2, r9, Operand(r1, LSL, r3));
10024 __ add(r3, r9, Operand(r0, LSL, r3)); 10024 __ add(r3, r9, Operand(r0, LSL, r3));
10025 10025
10026 // Argument 2 (r1): Previous index. 10026 // Argument 2 (r1): Previous index.
10027 // Already there 10027 // Already there
10028 10028
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
10063 // For failure and exception return null. 10063 // For failure and exception return null.
10064 __ mov(r0, Operand(Factory::null_value())); 10064 __ mov(r0, Operand(Factory::null_value()));
10065 __ add(sp, sp, Operand(4 * kPointerSize)); 10065 __ add(sp, sp, Operand(4 * kPointerSize));
10066 __ Ret(); 10066 __ Ret();
10067 10067
10068 // Process the result from the native regexp code. 10068 // Process the result from the native regexp code.
10069 __ bind(&success); 10069 __ bind(&success);
10070 __ ldr(r1, 10070 __ ldr(r1,
10071 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset)); 10071 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
10072 // Calculate number of capture registers (number_of_captures + 1) * 2. 10072 // Calculate number of capture registers (number_of_captures + 1) * 2.
10073 ASSERT_EQ(0, kSmiTag); 10073 STATIC_ASSERT(kSmiTag == 0);
10074 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 10074 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
10075 __ add(r1, r1, Operand(2)); // r1 was a smi. 10075 __ add(r1, r1, Operand(2)); // r1 was a smi.
10076 10076
10077 // r1: number of capture registers 10077 // r1: number of capture registers
10078 // r4: subject string 10078 // r4: subject string
10079 // Store the capture count. 10079 // Store the capture count.
10080 __ mov(r2, Operand(r1, LSL, kSmiTagSize + kSmiShiftSize)); // To smi. 10080 __ mov(r2, Operand(r1, LSL, kSmiTagSize + kSmiShiftSize)); // To smi.
10081 __ str(r2, FieldMemOperand(last_match_info_elements, 10081 __ str(r2, FieldMemOperand(last_match_info_elements,
10082 RegExpImpl::kLastCaptureCountOffset)); 10082 RegExpImpl::kLastCaptureCountOffset));
10083 // Store last subject and last input. 10083 // Store last subject and last input.
10084 __ mov(r3, last_match_info_elements); // Moved up to reduce latency. 10084 __ mov(r3, last_match_info_elements); // Moved up to reduce latency.
(...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after
10276 // Put smi-tagged index into scratch register. 10276 // Put smi-tagged index into scratch register.
10277 __ mov(scratch_, index_); 10277 __ mov(scratch_, index_);
10278 __ bind(&got_smi_index_); 10278 __ bind(&got_smi_index_);
10279 10279
10280 // Check for index out of range. 10280 // Check for index out of range.
10281 __ ldr(ip, FieldMemOperand(object_, String::kLengthOffset)); 10281 __ ldr(ip, FieldMemOperand(object_, String::kLengthOffset));
10282 __ cmp(ip, Operand(scratch_)); 10282 __ cmp(ip, Operand(scratch_));
10283 __ b(ls, index_out_of_range_); 10283 __ b(ls, index_out_of_range_);
10284 10284
10285 // We need special handling for non-flat strings. 10285 // We need special handling for non-flat strings.
10286 ASSERT(kSeqStringTag == 0); 10286 STATIC_ASSERT(kSeqStringTag == 0);
10287 __ tst(result_, Operand(kStringRepresentationMask)); 10287 __ tst(result_, Operand(kStringRepresentationMask));
10288 __ b(eq, &flat_string); 10288 __ b(eq, &flat_string);
10289 10289
10290 // Handle non-flat strings. 10290 // Handle non-flat strings.
10291 __ tst(result_, Operand(kIsConsStringMask)); 10291 __ tst(result_, Operand(kIsConsStringMask));
10292 __ b(eq, &call_runtime_); 10292 __ b(eq, &call_runtime_);
10293 10293
10294 // ConsString. 10294 // ConsString.
10295 // Check whether the right hand side is the empty string (i.e. if 10295 // Check whether the right hand side is the empty string (i.e. if
10296 // this is really a flat string in a cons string). If that is not 10296 // this is really a flat string in a cons string). If that is not
10297 // the case we would rather go to the runtime system now to flatten 10297 // the case we would rather go to the runtime system now to flatten
10298 // the string. 10298 // the string.
10299 __ ldr(result_, FieldMemOperand(object_, ConsString::kSecondOffset)); 10299 __ ldr(result_, FieldMemOperand(object_, ConsString::kSecondOffset));
10300 __ LoadRoot(ip, Heap::kEmptyStringRootIndex); 10300 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
10301 __ cmp(result_, Operand(ip)); 10301 __ cmp(result_, Operand(ip));
10302 __ b(ne, &call_runtime_); 10302 __ b(ne, &call_runtime_);
10303 // Get the first of the two strings and load its instance type. 10303 // Get the first of the two strings and load its instance type.
10304 __ ldr(object_, FieldMemOperand(object_, ConsString::kFirstOffset)); 10304 __ ldr(object_, FieldMemOperand(object_, ConsString::kFirstOffset));
10305 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); 10305 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
10306 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); 10306 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
10307 // If the first cons component is also non-flat, then go to runtime. 10307 // If the first cons component is also non-flat, then go to runtime.
10308 ASSERT(kSeqStringTag == 0); 10308 STATIC_ASSERT(kSeqStringTag == 0);
10309 __ tst(result_, Operand(kStringRepresentationMask)); 10309 __ tst(result_, Operand(kStringRepresentationMask));
10310 __ b(nz, &call_runtime_); 10310 __ b(nz, &call_runtime_);
10311 10311
10312 // Check for 1-byte or 2-byte string. 10312 // Check for 1-byte or 2-byte string.
10313 __ bind(&flat_string); 10313 __ bind(&flat_string);
10314 ASSERT(kAsciiStringTag != 0); 10314 STATIC_ASSERT(kAsciiStringTag != 0);
10315 __ tst(result_, Operand(kStringEncodingMask)); 10315 __ tst(result_, Operand(kStringEncodingMask));
10316 __ b(nz, &ascii_string); 10316 __ b(nz, &ascii_string);
10317 10317
10318 // 2-byte string. 10318 // 2-byte string.
10319 // Load the 2-byte character code into the result register. We can 10319 // Load the 2-byte character code into the result register. We can
10320 // add without shifting since the smi tag size is the log2 of the 10320 // add without shifting since the smi tag size is the log2 of the
10321 // number of bytes in a two-byte character. 10321 // number of bytes in a two-byte character.
10322 ASSERT(kSmiTag == 0 && kSmiTagSize == 1 && kSmiShiftSize == 0); 10322 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1 && kSmiShiftSize == 0);
10323 __ add(scratch_, object_, Operand(scratch_)); 10323 __ add(scratch_, object_, Operand(scratch_));
10324 __ ldrh(result_, FieldMemOperand(scratch_, SeqTwoByteString::kHeaderSize)); 10324 __ ldrh(result_, FieldMemOperand(scratch_, SeqTwoByteString::kHeaderSize));
10325 __ jmp(&got_char_code); 10325 __ jmp(&got_char_code);
10326 10326
10327 // ASCII string. 10327 // ASCII string.
10328 // Load the byte into the result register. 10328 // Load the byte into the result register.
10329 __ bind(&ascii_string); 10329 __ bind(&ascii_string);
10330 __ add(scratch_, object_, Operand(scratch_, LSR, kSmiTagSize)); 10330 __ add(scratch_, object_, Operand(scratch_, LSR, kSmiTagSize));
10331 __ ldrb(result_, FieldMemOperand(scratch_, SeqAsciiString::kHeaderSize)); 10331 __ ldrb(result_, FieldMemOperand(scratch_, SeqAsciiString::kHeaderSize));
10332 10332
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
10389 10389
10390 __ Abort("Unexpected fallthrough from CharCodeAt slow case"); 10390 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
10391 } 10391 }
10392 10392
10393 10393
10394 // ------------------------------------------------------------------------- 10394 // -------------------------------------------------------------------------
10395 // StringCharFromCodeGenerator 10395 // StringCharFromCodeGenerator
10396 10396
10397 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { 10397 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
10398 // Fast case of Heap::LookupSingleCharacterStringFromCode. 10398 // Fast case of Heap::LookupSingleCharacterStringFromCode.
10399 ASSERT(kSmiTag == 0); 10399 STATIC_ASSERT(kSmiTag == 0);
10400 ASSERT(kSmiShiftSize == 0); 10400 STATIC_ASSERT(kSmiShiftSize == 0);
10401 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1)); 10401 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
10402 __ tst(code_, 10402 __ tst(code_,
10403 Operand(kSmiTagMask | 10403 Operand(kSmiTagMask |
10404 ((~String::kMaxAsciiCharCode) << kSmiTagSize))); 10404 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
10405 __ b(nz, &slow_case_); 10405 __ b(nz, &slow_case_);
10406 10406
10407 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); 10407 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
10408 // At this point code register contains smi tagged ascii char code. 10408 // At this point code register contains smi tagged ascii char code.
10409 ASSERT(kSmiTag == 0); 10409 STATIC_ASSERT(kSmiTag == 0);
10410 __ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize)); 10410 __ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize));
10411 __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); 10411 __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
10412 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 10412 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
10413 __ cmp(result_, Operand(ip)); 10413 __ cmp(result_, Operand(ip));
10414 __ b(eq, &slow_case_); 10414 __ b(eq, &slow_case_);
10415 __ bind(&exit_); 10415 __ bind(&exit_);
10416 } 10416 }
10417 10417
10418 10418
10419 void StringCharFromCodeGenerator::GenerateSlow( 10419 void StringCharFromCodeGenerator::GenerateSlow(
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
10504 // that it is. 10504 // that it is.
10505 __ tst(dest, Operand(kPointerAlignmentMask)); 10505 __ tst(dest, Operand(kPointerAlignmentMask));
10506 __ Check(eq, "Destination of copy not aligned."); 10506 __ Check(eq, "Destination of copy not aligned.");
10507 } 10507 }
10508 10508
10509 const int kReadAlignment = 4; 10509 const int kReadAlignment = 4;
10510 const int kReadAlignmentMask = kReadAlignment - 1; 10510 const int kReadAlignmentMask = kReadAlignment - 1;
10511 // Ensure that reading an entire aligned word containing the last character 10511 // Ensure that reading an entire aligned word containing the last character
10512 // of a string will not read outside the allocated area (because we pad up 10512 // of a string will not read outside the allocated area (because we pad up
10513 // to kObjectAlignment). 10513 // to kObjectAlignment).
10514 ASSERT(kObjectAlignment >= kReadAlignment); 10514 STATIC_ASSERT(kObjectAlignment >= kReadAlignment);
10515 // Assumes word reads and writes are little endian. 10515 // Assumes word reads and writes are little endian.
10516 // Nothing to do for zero characters. 10516 // Nothing to do for zero characters.
10517 Label done; 10517 Label done;
10518 if (!ascii) { 10518 if (!ascii) {
10519 __ add(count, count, Operand(count), SetCC); 10519 __ add(count, count, Operand(count), SetCC);
10520 } else { 10520 } else {
10521 __ cmp(count, Operand(0)); 10521 __ cmp(count, Operand(0));
10522 } 10522 }
10523 __ b(eq, &done); 10523 __ b(eq, &done);
10524 10524
(...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after
10708 // Calculate entry in symbol table. 10708 // Calculate entry in symbol table.
10709 if (i > 0) { 10709 if (i > 0) {
10710 __ add(candidate, hash, Operand(SymbolTable::GetProbeOffset(i))); 10710 __ add(candidate, hash, Operand(SymbolTable::GetProbeOffset(i)));
10711 } else { 10711 } else {
10712 __ mov(candidate, hash); 10712 __ mov(candidate, hash);
10713 } 10713 }
10714 10714
10715 __ and_(candidate, candidate, Operand(mask)); 10715 __ and_(candidate, candidate, Operand(mask));
10716 10716
10717 // Load the entry from the symble table. 10717 // Load the entry from the symble table.
10718 ASSERT_EQ(1, SymbolTable::kEntrySize); 10718 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
10719 __ ldr(candidate, 10719 __ ldr(candidate,
10720 MemOperand(first_symbol_table_element, 10720 MemOperand(first_symbol_table_element,
10721 candidate, 10721 candidate,
10722 LSL, 10722 LSL,
10723 kPointerSizeLog2)); 10723 kPointerSizeLog2));
10724 10724
10725 // If entry is undefined no string with this hash can be found. 10725 // If entry is undefined no string with this hash can be found.
10726 __ cmp(candidate, undefined); 10726 __ cmp(candidate, undefined);
10727 __ b(eq, not_found); 10727 __ b(eq, not_found);
10728 10728
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
10808 // If any of these assumptions fail, we call the runtime system. 10808 // If any of these assumptions fail, we call the runtime system.
10809 10809
10810 static const int kToOffset = 0 * kPointerSize; 10810 static const int kToOffset = 0 * kPointerSize;
10811 static const int kFromOffset = 1 * kPointerSize; 10811 static const int kFromOffset = 1 * kPointerSize;
10812 static const int kStringOffset = 2 * kPointerSize; 10812 static const int kStringOffset = 2 * kPointerSize;
10813 10813
10814 10814
10815 // Check bounds and smi-ness. 10815 // Check bounds and smi-ness.
10816 __ ldr(r7, MemOperand(sp, kToOffset)); 10816 __ ldr(r7, MemOperand(sp, kToOffset));
10817 __ ldr(r6, MemOperand(sp, kFromOffset)); 10817 __ ldr(r6, MemOperand(sp, kFromOffset));
10818 ASSERT_EQ(0, kSmiTag); 10818 STATIC_ASSERT(kSmiTag == 0);
10819 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 10819 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
10820 // I.e., arithmetic shift right by one un-smi-tags. 10820 // I.e., arithmetic shift right by one un-smi-tags.
10821 __ mov(r2, Operand(r7, ASR, 1), SetCC); 10821 __ mov(r2, Operand(r7, ASR, 1), SetCC);
10822 __ mov(r3, Operand(r6, ASR, 1), SetCC, cc); 10822 __ mov(r3, Operand(r6, ASR, 1), SetCC, cc);
10823 // If either r2 or r6 had the smi tag bit set, then carry is set now. 10823 // If either r2 or r6 had the smi tag bit set, then carry is set now.
10824 __ b(cs, &runtime); // Either "from" or "to" is not a smi. 10824 __ b(cs, &runtime); // Either "from" or "to" is not a smi.
10825 __ b(mi, &runtime); // From is negative. 10825 __ b(mi, &runtime); // From is negative.
10826 10826
10827 __ sub(r2, r2, Operand(r3), SetCC); 10827 __ sub(r2, r2, Operand(r3), SetCC);
10828 __ b(mi, &runtime); // Fail if from > to. 10828 __ b(mi, &runtime); // Fail if from > to.
10829 // Special handling of sub-strings of length 1 and 2. One character strings 10829 // Special handling of sub-strings of length 1 and 2. One character strings
10830 // are handled in the runtime system (looked up in the single character 10830 // are handled in the runtime system (looked up in the single character
10831 // cache). Two character strings are looked for in the symbol cache. 10831 // cache). Two character strings are looked for in the symbol cache.
10832 __ cmp(r2, Operand(2)); 10832 __ cmp(r2, Operand(2));
10833 __ b(lt, &runtime); 10833 __ b(lt, &runtime);
10834 10834
10835 // r2: length 10835 // r2: length
10836 // r3: from index (untaged smi) 10836 // r3: from index (untaged smi)
10837 // r6: from (smi) 10837 // r6: from (smi)
10838 // r7: to (smi) 10838 // r7: to (smi)
10839 10839
10840 // Make sure first argument is a sequential (or flat) string. 10840 // Make sure first argument is a sequential (or flat) string.
10841 __ ldr(r5, MemOperand(sp, kStringOffset)); 10841 __ ldr(r5, MemOperand(sp, kStringOffset));
10842 ASSERT_EQ(0, kSmiTag); 10842 STATIC_ASSERT(kSmiTag == 0);
10843 __ tst(r5, Operand(kSmiTagMask)); 10843 __ tst(r5, Operand(kSmiTagMask));
10844 __ b(eq, &runtime); 10844 __ b(eq, &runtime);
10845 Condition is_string = masm->IsObjectStringType(r5, r1); 10845 Condition is_string = masm->IsObjectStringType(r5, r1);
10846 __ b(NegateCondition(is_string), &runtime); 10846 __ b(NegateCondition(is_string), &runtime);
10847 10847
10848 // r1: instance type 10848 // r1: instance type
10849 // r2: length 10849 // r2: length
10850 // r3: from index (untaged smi) 10850 // r3: from index (untaged smi)
10851 // r5: string 10851 // r5: string
10852 // r6: from (smi) 10852 // r6: from (smi)
10853 // r7: to (smi) 10853 // r7: to (smi)
10854 Label seq_string; 10854 Label seq_string;
10855 __ and_(r4, r1, Operand(kStringRepresentationMask)); 10855 __ and_(r4, r1, Operand(kStringRepresentationMask));
10856 ASSERT(kSeqStringTag < kConsStringTag); 10856 STATIC_ASSERT(kSeqStringTag < kConsStringTag);
10857 ASSERT(kExternalStringTag > kConsStringTag); 10857 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
10858 __ cmp(r4, Operand(kConsStringTag)); 10858 __ cmp(r4, Operand(kConsStringTag));
10859 __ b(gt, &runtime); // External strings go to runtime. 10859 __ b(gt, &runtime); // External strings go to runtime.
10860 __ b(lt, &seq_string); // Sequential strings are handled directly. 10860 __ b(lt, &seq_string); // Sequential strings are handled directly.
10861 10861
10862 // Cons string. Try to recurse (once) on the first substring. 10862 // Cons string. Try to recurse (once) on the first substring.
10863 // (This adds a little more generality than necessary to handle flattened 10863 // (This adds a little more generality than necessary to handle flattened
10864 // cons strings, but not much). 10864 // cons strings, but not much).
10865 __ ldr(r5, FieldMemOperand(r5, ConsString::kFirstOffset)); 10865 __ ldr(r5, FieldMemOperand(r5, ConsString::kFirstOffset));
10866 __ ldr(r4, FieldMemOperand(r5, HeapObject::kMapOffset)); 10866 __ ldr(r4, FieldMemOperand(r5, HeapObject::kMapOffset));
10867 __ ldrb(r1, FieldMemOperand(r4, Map::kInstanceTypeOffset)); 10867 __ ldrb(r1, FieldMemOperand(r4, Map::kInstanceTypeOffset));
10868 __ tst(r1, Operand(kStringRepresentationMask)); 10868 __ tst(r1, Operand(kStringRepresentationMask));
10869 ASSERT_EQ(0, kSeqStringTag); 10869 STATIC_ASSERT(kSeqStringTag == 0);
10870 __ b(ne, &runtime); // Cons and External strings go to runtime. 10870 __ b(ne, &runtime); // Cons and External strings go to runtime.
10871 10871
10872 // Definitly a sequential string. 10872 // Definitly a sequential string.
10873 __ bind(&seq_string); 10873 __ bind(&seq_string);
10874 10874
10875 // r1: instance type. 10875 // r1: instance type.
10876 // r2: length 10876 // r2: length
10877 // r3: from index (untaged smi) 10877 // r3: from index (untaged smi)
10878 // r5: string 10878 // r5: string
10879 // r6: from (smi) 10879 // r6: from (smi)
10880 // r7: to (smi) 10880 // r7: to (smi)
10881 __ ldr(r4, FieldMemOperand(r5, String::kLengthOffset)); 10881 __ ldr(r4, FieldMemOperand(r5, String::kLengthOffset));
10882 __ cmp(r4, Operand(r7)); 10882 __ cmp(r4, Operand(r7));
10883 __ b(lt, &runtime); // Fail if to > length. 10883 __ b(lt, &runtime); // Fail if to > length.
10884 10884
10885 // r1: instance type. 10885 // r1: instance type.
10886 // r2: result string length. 10886 // r2: result string length.
10887 // r3: from index (untaged smi) 10887 // r3: from index (untaged smi)
10888 // r5: string. 10888 // r5: string.
10889 // r6: from offset (smi) 10889 // r6: from offset (smi)
10890 // Check for flat ascii string. 10890 // Check for flat ascii string.
10891 Label non_ascii_flat; 10891 Label non_ascii_flat;
10892 __ tst(r1, Operand(kStringEncodingMask)); 10892 __ tst(r1, Operand(kStringEncodingMask));
10893 ASSERT_EQ(0, kTwoByteStringTag); 10893 STATIC_ASSERT(kTwoByteStringTag == 0);
10894 __ b(eq, &non_ascii_flat); 10894 __ b(eq, &non_ascii_flat);
10895 10895
10896 Label result_longer_than_two; 10896 Label result_longer_than_two;
10897 __ cmp(r2, Operand(2)); 10897 __ cmp(r2, Operand(2));
10898 __ b(gt, &result_longer_than_two); 10898 __ b(gt, &result_longer_than_two);
10899 10899
10900 // Sub string of length 2 requested. 10900 // Sub string of length 2 requested.
10901 // Get the two characters forming the sub string. 10901 // Get the two characters forming the sub string.
10902 __ add(r5, r5, Operand(r3)); 10902 __ add(r5, r5, Operand(r3));
10903 __ ldrb(r3, FieldMemOperand(r5, SeqAsciiString::kHeaderSize)); 10903 __ ldrb(r3, FieldMemOperand(r5, SeqAsciiString::kHeaderSize));
(...skipping 28 matching lines...) Expand all
10932 // Locate first character of result. 10932 // Locate first character of result.
10933 __ add(r1, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 10933 __ add(r1, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10934 // Locate 'from' character of string. 10934 // Locate 'from' character of string.
10935 __ add(r5, r5, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 10935 __ add(r5, r5, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10936 __ add(r5, r5, Operand(r6, ASR, 1)); 10936 __ add(r5, r5, Operand(r6, ASR, 1));
10937 10937
10938 // r0: result string. 10938 // r0: result string.
10939 // r1: first character of result string. 10939 // r1: first character of result string.
10940 // r2: result string length. 10940 // r2: result string length.
10941 // r5: first character of sub string to copy. 10941 // r5: first character of sub string to copy.
10942 ASSERT_EQ(0, SeqAsciiString::kHeaderSize & kObjectAlignmentMask); 10942 STATIC_ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
10943 StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9, 10943 StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
10944 COPY_ASCII | DEST_ALWAYS_ALIGNED); 10944 COPY_ASCII | DEST_ALWAYS_ALIGNED);
10945 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4); 10945 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
10946 __ add(sp, sp, Operand(3 * kPointerSize)); 10946 __ add(sp, sp, Operand(3 * kPointerSize));
10947 __ Ret(); 10947 __ Ret();
10948 10948
10949 __ bind(&non_ascii_flat); 10949 __ bind(&non_ascii_flat);
10950 // r2: result string length. 10950 // r2: result string length.
10951 // r5: string. 10951 // r5: string.
10952 // r6: from offset (smi) 10952 // r6: from offset (smi)
(...skipping 10 matching lines...) Expand all
10963 // Locate 'from' character of string. 10963 // Locate 'from' character of string.
10964 __ add(r5, r5, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 10964 __ add(r5, r5, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10965 // As "from" is a smi it is 2 times the value which matches the size of a two 10965 // As "from" is a smi it is 2 times the value which matches the size of a two
10966 // byte character. 10966 // byte character.
10967 __ add(r5, r5, Operand(r6)); 10967 __ add(r5, r5, Operand(r6));
10968 10968
10969 // r0: result string. 10969 // r0: result string.
10970 // r1: first character of result. 10970 // r1: first character of result.
10971 // r2: result length. 10971 // r2: result length.
10972 // r5: first character of string to copy. 10972 // r5: first character of string to copy.
10973 ASSERT_EQ(0, SeqTwoByteString::kHeaderSize & kObjectAlignmentMask); 10973 STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
10974 StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9, 10974 StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
10975 DEST_ALWAYS_ALIGNED); 10975 DEST_ALWAYS_ALIGNED);
10976 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4); 10976 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
10977 __ add(sp, sp, Operand(3 * kPointerSize)); 10977 __ add(sp, sp, Operand(3 * kPointerSize));
10978 __ Ret(); 10978 __ Ret();
10979 10979
10980 // Just jump to runtime to create the sub string. 10980 // Just jump to runtime to create the sub string.
10981 __ bind(&runtime); 10981 __ bind(&runtime);
10982 __ TailCallRuntime(Runtime::kSubString, 3, 1); 10982 __ TailCallRuntime(Runtime::kSubString, 3, 1);
10983 } 10983 }
10984 10984
10985 10985
10986 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, 10986 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
10987 Register left, 10987 Register left,
10988 Register right, 10988 Register right,
10989 Register scratch1, 10989 Register scratch1,
10990 Register scratch2, 10990 Register scratch2,
10991 Register scratch3, 10991 Register scratch3,
10992 Register scratch4) { 10992 Register scratch4) {
10993 Label compare_lengths; 10993 Label compare_lengths;
10994 // Find minimum length and length difference. 10994 // Find minimum length and length difference.
10995 __ ldr(scratch1, FieldMemOperand(left, String::kLengthOffset)); 10995 __ ldr(scratch1, FieldMemOperand(left, String::kLengthOffset));
10996 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset)); 10996 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset));
10997 __ sub(scratch3, scratch1, Operand(scratch2), SetCC); 10997 __ sub(scratch3, scratch1, Operand(scratch2), SetCC);
10998 Register length_delta = scratch3; 10998 Register length_delta = scratch3;
10999 __ mov(scratch1, scratch2, LeaveCC, gt); 10999 __ mov(scratch1, scratch2, LeaveCC, gt);
11000 Register min_length = scratch1; 11000 Register min_length = scratch1;
11001 ASSERT(kSmiTag == 0); 11001 STATIC_ASSERT(kSmiTag == 0);
11002 __ tst(min_length, Operand(min_length)); 11002 __ tst(min_length, Operand(min_length));
11003 __ b(eq, &compare_lengths); 11003 __ b(eq, &compare_lengths);
11004 11004
11005 // Untag smi. 11005 // Untag smi.
11006 __ mov(min_length, Operand(min_length, ASR, kSmiTagSize)); 11006 __ mov(min_length, Operand(min_length, ASR, kSmiTagSize));
11007 11007
11008 // Setup registers so that we only need to increment one register 11008 // Setup registers so that we only need to increment one register
11009 // in the loop. 11009 // in the loop.
11010 __ add(scratch2, min_length, 11010 __ add(scratch2, min_length,
11011 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 11011 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
11047 11047
11048 // Stack frame on entry. 11048 // Stack frame on entry.
11049 // sp[0]: right string 11049 // sp[0]: right string
11050 // sp[4]: left string 11050 // sp[4]: left string
11051 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); // left 11051 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); // left
11052 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // right 11052 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // right
11053 11053
11054 Label not_same; 11054 Label not_same;
11055 __ cmp(r0, r1); 11055 __ cmp(r0, r1);
11056 __ b(ne, &not_same); 11056 __ b(ne, &not_same);
11057 ASSERT_EQ(0, EQUAL); 11057 STATIC_ASSERT(EQUAL == 0);
11058 ASSERT_EQ(0, kSmiTag); 11058 STATIC_ASSERT(kSmiTag == 0);
11059 __ mov(r0, Operand(Smi::FromInt(EQUAL))); 11059 __ mov(r0, Operand(Smi::FromInt(EQUAL)));
11060 __ IncrementCounter(&Counters::string_compare_native, 1, r1, r2); 11060 __ IncrementCounter(&Counters::string_compare_native, 1, r1, r2);
11061 __ add(sp, sp, Operand(2 * kPointerSize)); 11061 __ add(sp, sp, Operand(2 * kPointerSize));
11062 __ Ret(); 11062 __ Ret();
11063 11063
11064 __ bind(&not_same); 11064 __ bind(&not_same);
11065 11065
11066 // Check that both objects are sequential ascii strings. 11066 // Check that both objects are sequential ascii strings.
11067 __ JumpIfNotBothSequentialAsciiStrings(r0, r1, r2, r3, &runtime); 11067 __ JumpIfNotBothSequentialAsciiStrings(r0, r1, r2, r3, &runtime);
11068 11068
(...skipping 14 matching lines...) Expand all
11083 // Stack on entry: 11083 // Stack on entry:
11084 // sp[0]: second argument. 11084 // sp[0]: second argument.
11085 // sp[4]: first argument. 11085 // sp[4]: first argument.
11086 11086
11087 // Load the two arguments. 11087 // Load the two arguments.
11088 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); // First argument. 11088 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); // First argument.
11089 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // Second argument. 11089 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // Second argument.
11090 11090
11091 // Make sure that both arguments are strings if not known in advance. 11091 // Make sure that both arguments are strings if not known in advance.
11092 if (string_check_) { 11092 if (string_check_) {
11093 ASSERT_EQ(0, kSmiTag); 11093 STATIC_ASSERT(kSmiTag == 0);
11094 __ JumpIfEitherSmi(r0, r1, &string_add_runtime); 11094 __ JumpIfEitherSmi(r0, r1, &string_add_runtime);
11095 // Load instance types. 11095 // Load instance types.
11096 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset)); 11096 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
11097 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset)); 11097 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
11098 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset)); 11098 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
11099 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset)); 11099 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
11100 ASSERT_EQ(0, kStringTag); 11100 STATIC_ASSERT(kStringTag == 0);
11101 // If either is not a string, go to runtime. 11101 // If either is not a string, go to runtime.
11102 __ tst(r4, Operand(kIsNotStringMask)); 11102 __ tst(r4, Operand(kIsNotStringMask));
11103 __ tst(r5, Operand(kIsNotStringMask), eq); 11103 __ tst(r5, Operand(kIsNotStringMask), eq);
11104 __ b(ne, &string_add_runtime); 11104 __ b(ne, &string_add_runtime);
11105 } 11105 }
11106 11106
11107 // Both arguments are strings. 11107 // Both arguments are strings.
11108 // r0: first string 11108 // r0: first string
11109 // r1: second string 11109 // r1: second string
11110 // r4: first string instance type (if string_check_) 11110 // r4: first string instance type (if string_check_)
11111 // r5: second string instance type (if string_check_) 11111 // r5: second string instance type (if string_check_)
11112 { 11112 {
11113 Label strings_not_empty; 11113 Label strings_not_empty;
11114 // Check if either of the strings are empty. In that case return the other. 11114 // Check if either of the strings are empty. In that case return the other.
11115 __ ldr(r2, FieldMemOperand(r0, String::kLengthOffset)); 11115 __ ldr(r2, FieldMemOperand(r0, String::kLengthOffset));
11116 __ ldr(r3, FieldMemOperand(r1, String::kLengthOffset)); 11116 __ ldr(r3, FieldMemOperand(r1, String::kLengthOffset));
11117 ASSERT(kSmiTag == 0); 11117 STATIC_ASSERT(kSmiTag == 0);
11118 __ cmp(r2, Operand(Smi::FromInt(0))); // Test if first string is empty. 11118 __ cmp(r2, Operand(Smi::FromInt(0))); // Test if first string is empty.
11119 __ mov(r0, Operand(r1), LeaveCC, eq); // If first is empty, return second. 11119 __ mov(r0, Operand(r1), LeaveCC, eq); // If first is empty, return second.
11120 ASSERT(kSmiTag == 0); 11120 STATIC_ASSERT(kSmiTag == 0);
11121 // Else test if second string is empty. 11121 // Else test if second string is empty.
11122 __ cmp(r3, Operand(Smi::FromInt(0)), ne); 11122 __ cmp(r3, Operand(Smi::FromInt(0)), ne);
11123 __ b(ne, &strings_not_empty); // If either string was empty, return r0. 11123 __ b(ne, &strings_not_empty); // If either string was empty, return r0.
11124 11124
11125 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3); 11125 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
11126 __ add(sp, sp, Operand(2 * kPointerSize)); 11126 __ add(sp, sp, Operand(2 * kPointerSize));
11127 __ Ret(); 11127 __ Ret();
11128 11128
11129 __ bind(&strings_not_empty); 11129 __ bind(&strings_not_empty);
11130 } 11130 }
11131 11131
11132 __ mov(r2, Operand(r2, ASR, kSmiTagSize)); 11132 __ mov(r2, Operand(r2, ASR, kSmiTagSize));
11133 __ mov(r3, Operand(r3, ASR, kSmiTagSize)); 11133 __ mov(r3, Operand(r3, ASR, kSmiTagSize));
11134 // Both strings are non-empty. 11134 // Both strings are non-empty.
11135 // r0: first string 11135 // r0: first string
11136 // r1: second string 11136 // r1: second string
11137 // r2: length of first string 11137 // r2: length of first string
11138 // r3: length of second string 11138 // r3: length of second string
11139 // r4: first string instance type (if string_check_) 11139 // r4: first string instance type (if string_check_)
11140 // r5: second string instance type (if string_check_) 11140 // r5: second string instance type (if string_check_)
11141 // Look at the length of the result of adding the two strings. 11141 // Look at the length of the result of adding the two strings.
11142 Label string_add_flat_result, longer_than_two; 11142 Label string_add_flat_result, longer_than_two;
11143 // Adding two lengths can't overflow. 11143 // Adding two lengths can't overflow.
11144 ASSERT(String::kMaxLength * 2 > String::kMaxLength); 11144 STATIC_ASSERT(String::kMaxLength < String::kMaxLength * 2);
11145 __ add(r6, r2, Operand(r3)); 11145 __ add(r6, r2, Operand(r3));
11146 // Use the runtime system when adding two one character strings, as it 11146 // Use the runtime system when adding two one character strings, as it
11147 // contains optimizations for this specific case using the symbol table. 11147 // contains optimizations for this specific case using the symbol table.
11148 __ cmp(r6, Operand(2)); 11148 __ cmp(r6, Operand(2));
11149 __ b(ne, &longer_than_two); 11149 __ b(ne, &longer_than_two);
11150 11150
11151 // Check that both strings are non-external ascii strings. 11151 // Check that both strings are non-external ascii strings.
11152 if (!string_check_) { 11152 if (!string_check_) {
11153 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset)); 11153 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
11154 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset)); 11154 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
(...skipping 27 matching lines...) Expand all
11182 __ strh(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize)); 11182 __ strh(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
11183 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3); 11183 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
11184 __ add(sp, sp, Operand(2 * kPointerSize)); 11184 __ add(sp, sp, Operand(2 * kPointerSize));
11185 __ Ret(); 11185 __ Ret();
11186 11186
11187 __ bind(&longer_than_two); 11187 __ bind(&longer_than_two);
11188 // Check if resulting string will be flat. 11188 // Check if resulting string will be flat.
11189 __ cmp(r6, Operand(String::kMinNonFlatLength)); 11189 __ cmp(r6, Operand(String::kMinNonFlatLength));
11190 __ b(lt, &string_add_flat_result); 11190 __ b(lt, &string_add_flat_result);
11191 // Handle exceptionally long strings in the runtime system. 11191 // Handle exceptionally long strings in the runtime system.
11192 ASSERT((String::kMaxLength & 0x80000000) == 0); 11192 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
11193 ASSERT(IsPowerOf2(String::kMaxLength + 1)); 11193 ASSERT(IsPowerOf2(String::kMaxLength + 1));
11194 // kMaxLength + 1 is representable as shifted literal, kMaxLength is not. 11194 // kMaxLength + 1 is representable as shifted literal, kMaxLength is not.
11195 __ cmp(r6, Operand(String::kMaxLength + 1)); 11195 __ cmp(r6, Operand(String::kMaxLength + 1));
11196 __ b(hs, &string_add_runtime); 11196 __ b(hs, &string_add_runtime);
11197 11197
11198 // If result is not supposed to be flat, allocate a cons string object. 11198 // If result is not supposed to be flat, allocate a cons string object.
11199 // If both strings are ascii the result is an ascii cons string. 11199 // If both strings are ascii the result is an ascii cons string.
11200 if (!string_check_) { 11200 if (!string_check_) {
11201 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset)); 11201 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
11202 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset)); 11202 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
11203 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset)); 11203 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
11204 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset)); 11204 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
11205 } 11205 }
11206 Label non_ascii, allocated, ascii_data; 11206 Label non_ascii, allocated, ascii_data;
11207 ASSERT_EQ(0, kTwoByteStringTag); 11207 STATIC_ASSERT(kTwoByteStringTag == 0);
11208 __ tst(r4, Operand(kStringEncodingMask)); 11208 __ tst(r4, Operand(kStringEncodingMask));
11209 __ tst(r5, Operand(kStringEncodingMask), ne); 11209 __ tst(r5, Operand(kStringEncodingMask), ne);
11210 __ b(eq, &non_ascii); 11210 __ b(eq, &non_ascii);
11211 11211
11212 // Allocate an ASCII cons string. 11212 // Allocate an ASCII cons string.
11213 __ bind(&ascii_data); 11213 __ bind(&ascii_data);
11214 __ AllocateAsciiConsString(r7, r6, r4, r5, &string_add_runtime); 11214 __ AllocateAsciiConsString(r7, r6, r4, r5, &string_add_runtime);
11215 __ bind(&allocated); 11215 __ bind(&allocated);
11216 // Fill the fields of the cons string. 11216 // Fill the fields of the cons string.
11217 __ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset)); 11217 __ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
11218 __ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset)); 11218 __ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
11219 __ mov(r0, Operand(r7)); 11219 __ mov(r0, Operand(r7));
11220 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3); 11220 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
11221 __ add(sp, sp, Operand(2 * kPointerSize)); 11221 __ add(sp, sp, Operand(2 * kPointerSize));
11222 __ Ret(); 11222 __ Ret();
11223 11223
11224 __ bind(&non_ascii); 11224 __ bind(&non_ascii);
11225 // At least one of the strings is two-byte. Check whether it happens 11225 // At least one of the strings is two-byte. Check whether it happens
11226 // to contain only ascii characters. 11226 // to contain only ascii characters.
11227 // r4: first instance type. 11227 // r4: first instance type.
11228 // r5: second instance type. 11228 // r5: second instance type.
11229 __ tst(r4, Operand(kAsciiDataHintMask)); 11229 __ tst(r4, Operand(kAsciiDataHintMask));
11230 __ tst(r5, Operand(kAsciiDataHintMask), ne); 11230 __ tst(r5, Operand(kAsciiDataHintMask), ne);
11231 __ b(ne, &ascii_data); 11231 __ b(ne, &ascii_data);
11232 __ eor(r4, r4, Operand(r5)); 11232 __ eor(r4, r4, Operand(r5));
11233 ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0); 11233 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
11234 __ and_(r4, r4, Operand(kAsciiStringTag | kAsciiDataHintTag)); 11234 __ and_(r4, r4, Operand(kAsciiStringTag | kAsciiDataHintTag));
11235 __ cmp(r4, Operand(kAsciiStringTag | kAsciiDataHintTag)); 11235 __ cmp(r4, Operand(kAsciiStringTag | kAsciiDataHintTag));
11236 __ b(eq, &ascii_data); 11236 __ b(eq, &ascii_data);
11237 11237
11238 // Allocate a two byte cons string. 11238 // Allocate a two byte cons string.
11239 __ AllocateTwoByteConsString(r7, r6, r4, r5, &string_add_runtime); 11239 __ AllocateTwoByteConsString(r7, r6, r4, r5, &string_add_runtime);
11240 __ jmp(&allocated); 11240 __ jmp(&allocated);
11241 11241
11242 // Handle creating a flat result. First check that both strings are 11242 // Handle creating a flat result. First check that both strings are
11243 // sequential and that they have the same encoding. 11243 // sequential and that they have the same encoding.
11244 // r0: first string 11244 // r0: first string
11245 // r1: second string 11245 // r1: second string
11246 // r2: length of first string 11246 // r2: length of first string
11247 // r3: length of second string 11247 // r3: length of second string
11248 // r4: first string instance type (if string_check_) 11248 // r4: first string instance type (if string_check_)
11249 // r5: second string instance type (if string_check_) 11249 // r5: second string instance type (if string_check_)
11250 // r6: sum of lengths. 11250 // r6: sum of lengths.
11251 __ bind(&string_add_flat_result); 11251 __ bind(&string_add_flat_result);
11252 if (!string_check_) { 11252 if (!string_check_) {
11253 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset)); 11253 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
11254 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset)); 11254 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
11255 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset)); 11255 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
11256 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset)); 11256 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
11257 } 11257 }
11258 // Check that both strings are sequential. 11258 // Check that both strings are sequential.
11259 ASSERT_EQ(0, kSeqStringTag); 11259 STATIC_ASSERT(kSeqStringTag == 0);
11260 __ tst(r4, Operand(kStringRepresentationMask)); 11260 __ tst(r4, Operand(kStringRepresentationMask));
11261 __ tst(r5, Operand(kStringRepresentationMask), eq); 11261 __ tst(r5, Operand(kStringRepresentationMask), eq);
11262 __ b(ne, &string_add_runtime); 11262 __ b(ne, &string_add_runtime);
11263 // Now check if both strings have the same encoding (ASCII/Two-byte). 11263 // Now check if both strings have the same encoding (ASCII/Two-byte).
11264 // r0: first string. 11264 // r0: first string.
11265 // r1: second string. 11265 // r1: second string.
11266 // r2: length of first string. 11266 // r2: length of first string.
11267 // r3: length of second string. 11267 // r3: length of second string.
11268 // r6: sum of lengths.. 11268 // r6: sum of lengths..
11269 Label non_ascii_string_add_flat_result; 11269 Label non_ascii_string_add_flat_result;
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
11348 __ bind(&string_add_runtime); 11348 __ bind(&string_add_runtime);
11349 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); 11349 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
11350 } 11350 }
11351 11351
11352 11352
11353 #undef __ 11353 #undef __
11354 11354
11355 } } // namespace v8::internal 11355 } } // namespace v8::internal
11356 11356
11357 #endif // V8_TARGET_ARCH_ARM 11357 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « no previous file | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698