OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3223 | 3223 |
3224 | 3224 |
3225 static void BranchIfNotInternalizedString(MacroAssembler* masm, | 3225 static void BranchIfNotInternalizedString(MacroAssembler* masm, |
3226 Label* label, | 3226 Label* label, |
3227 Register object, | 3227 Register object, |
3228 Register scratch) { | 3228 Register scratch) { |
3229 __ JumpIfSmi(object, label); | 3229 __ JumpIfSmi(object, label); |
3230 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); | 3230 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); |
3231 __ movzxbq(scratch, | 3231 __ movzxbq(scratch, |
3232 FieldOperand(scratch, Map::kInstanceTypeOffset)); | 3232 FieldOperand(scratch, Map::kInstanceTypeOffset)); |
3233 STATIC_ASSERT(kInternalizedTag != 0); | 3233 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); |
3234 __ and_(scratch, Immediate(kIsNotStringMask | kIsInternalizedMask)); | 3234 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); |
3235 __ cmpb(scratch, Immediate(kInternalizedTag | kStringTag)); | 3235 __ j(not_zero, label); |
3236 __ j(not_equal, label); | |
3237 } | 3236 } |
3238 | 3237 |
3239 | 3238 |
3240 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { | 3239 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { |
3241 Label check_unequal_objects, done; | 3240 Label check_unequal_objects, done; |
3242 Condition cc = GetCondition(); | 3241 Condition cc = GetCondition(); |
3243 Factory* factory = masm->isolate()->factory(); | 3242 Factory* factory = masm->isolate()->factory(); |
3244 | 3243 |
3245 Label miss; | 3244 Label miss; |
3246 CheckInputType(masm, rdx, left_, &miss); | 3245 CheckInputType(masm, rdx, left_, &miss); |
(...skipping 2398 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5645 // Check that both operands are heap objects. | 5644 // Check that both operands are heap objects. |
5646 Label miss; | 5645 Label miss; |
5647 Condition cond = masm->CheckEitherSmi(left, right, tmp1); | 5646 Condition cond = masm->CheckEitherSmi(left, right, tmp1); |
5648 __ j(cond, &miss, Label::kNear); | 5647 __ j(cond, &miss, Label::kNear); |
5649 | 5648 |
5650 // Check that both operands are internalized strings. | 5649 // Check that both operands are internalized strings. |
5651 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 5650 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
5652 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 5651 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
5653 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 5652 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
5654 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 5653 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
5655 STATIC_ASSERT(kInternalizedTag != 0); | 5654 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); |
5656 __ and_(tmp1, Immediate(kIsNotStringMask | kIsInternalizedMask)); | 5655 __ or_(tmp1, tmp2); |
5657 __ cmpb(tmp1, Immediate(kInternalizedTag | kStringTag)); | 5656 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); |
5658 __ j(not_equal, &miss, Label::kNear); | 5657 __ j(not_zero, &miss, Label::kNear); |
5659 | |
5660 __ and_(tmp2, Immediate(kIsNotStringMask | kIsInternalizedMask)); | |
5661 __ cmpb(tmp2, Immediate(kInternalizedTag | kStringTag)); | |
5662 __ j(not_equal, &miss, Label::kNear); | |
5663 | 5658 |
5664 // Internalized strings are compared by identity. | 5659 // Internalized strings are compared by identity. |
5665 Label done; | 5660 Label done; |
5666 __ cmpq(left, right); | 5661 __ cmpq(left, right); |
5667 // Make sure rax is non-zero. At this point input operands are | 5662 // Make sure rax is non-zero. At this point input operands are |
5668 // guaranteed to be non-zero. | 5663 // guaranteed to be non-zero. |
5669 ASSERT(right.is(rax)); | 5664 ASSERT(right.is(rax)); |
5670 __ j(not_equal, &done, Label::kNear); | 5665 __ j(not_equal, &done, Label::kNear); |
5671 STATIC_ASSERT(EQUAL == 0); | 5666 STATIC_ASSERT(EQUAL == 0); |
5672 STATIC_ASSERT(kSmiTag == 0); | 5667 STATIC_ASSERT(kSmiTag == 0); |
(...skipping 16 matching lines...) Expand all Loading... |
5689 Register tmp1 = rcx; | 5684 Register tmp1 = rcx; |
5690 Register tmp2 = rbx; | 5685 Register tmp2 = rbx; |
5691 | 5686 |
5692 // Check that both operands are heap objects. | 5687 // Check that both operands are heap objects. |
5693 Label miss; | 5688 Label miss; |
5694 Condition cond = masm->CheckEitherSmi(left, right, tmp1); | 5689 Condition cond = masm->CheckEitherSmi(left, right, tmp1); |
5695 __ j(cond, &miss, Label::kNear); | 5690 __ j(cond, &miss, Label::kNear); |
5696 | 5691 |
5697 // Check that both operands are unique names. This leaves the instance | 5692 // Check that both operands are unique names. This leaves the instance |
5698 // types loaded in tmp1 and tmp2. | 5693 // types loaded in tmp1 and tmp2. |
5699 STATIC_ASSERT(kInternalizedTag != 0); | |
5700 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 5694 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
5701 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 5695 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
5702 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 5696 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
5703 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 5697 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
5704 | 5698 |
5705 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); | 5699 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); |
5706 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); | 5700 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); |
5707 | 5701 |
5708 // Unique names are compared by identity. | 5702 // Unique names are compared by identity. |
5709 Label done; | 5703 Label done; |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5762 __ ret(0); | 5756 __ ret(0); |
5763 | 5757 |
5764 // Handle not identical strings. | 5758 // Handle not identical strings. |
5765 __ bind(¬_same); | 5759 __ bind(¬_same); |
5766 | 5760 |
5767 // Check that both strings are internalized strings. If they are, we're done | 5761 // Check that both strings are internalized strings. If they are, we're done |
5768 // because we already know they are not identical. We also know they are both | 5762 // because we already know they are not identical. We also know they are both |
5769 // strings. | 5763 // strings. |
5770 if (equality) { | 5764 if (equality) { |
5771 Label do_compare; | 5765 Label do_compare; |
5772 STATIC_ASSERT(kInternalizedTag != 0); | 5766 STATIC_ASSERT(kInternalizedTag == 0); |
5773 __ and_(tmp1, tmp2); | 5767 __ or_(tmp1, tmp2); |
5774 __ testb(tmp1, Immediate(kIsInternalizedMask)); | 5768 __ testb(tmp1, Immediate(kIsNotInternalizedMask)); |
5775 __ j(zero, &do_compare, Label::kNear); | 5769 __ j(not_zero, &do_compare, Label::kNear); |
5776 // Make sure rax is non-zero. At this point input operands are | 5770 // Make sure rax is non-zero. At this point input operands are |
5777 // guaranteed to be non-zero. | 5771 // guaranteed to be non-zero. |
5778 ASSERT(right.is(rax)); | 5772 ASSERT(right.is(rax)); |
5779 __ ret(0); | 5773 __ ret(0); |
5780 __ bind(&do_compare); | 5774 __ bind(&do_compare); |
5781 } | 5775 } |
5782 | 5776 |
5783 // Check that both strings are sequential ASCII. | 5777 // Check that both strings are sequential ASCII. |
5784 Label runtime; | 5778 Label runtime; |
5785 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); | 5779 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); |
(...skipping 1025 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6811 __ bind(&fast_elements_case); | 6805 __ bind(&fast_elements_case); |
6812 GenerateCase(masm, FAST_ELEMENTS); | 6806 GenerateCase(masm, FAST_ELEMENTS); |
6813 } | 6807 } |
6814 | 6808 |
6815 | 6809 |
6816 #undef __ | 6810 #undef __ |
6817 | 6811 |
6818 } } // namespace v8::internal | 6812 } } // namespace v8::internal |
6819 | 6813 |
6820 #endif // V8_TARGET_ARCH_X64 | 6814 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |