| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3418 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3429 | 3429 |
| 3430 | 3430 |
| 3431 static void BranchIfNotInternalizedString(MacroAssembler* masm, | 3431 static void BranchIfNotInternalizedString(MacroAssembler* masm, |
| 3432 Label* label, | 3432 Label* label, |
| 3433 Register object, | 3433 Register object, |
| 3434 Register scratch) { | 3434 Register scratch) { |
| 3435 __ JumpIfSmi(object, label); | 3435 __ JumpIfSmi(object, label); |
| 3436 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); | 3436 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); |
| 3437 __ movzxbq(scratch, | 3437 __ movzxbq(scratch, |
| 3438 FieldOperand(scratch, Map::kInstanceTypeOffset)); | 3438 FieldOperand(scratch, Map::kInstanceTypeOffset)); |
| 3439 // Ensure that no non-strings have the internalized bit set. | |
| 3440 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsInternalizedMask); | |
| 3441 STATIC_ASSERT(kInternalizedTag != 0); | 3439 STATIC_ASSERT(kInternalizedTag != 0); |
| 3442 __ testb(scratch, Immediate(kIsInternalizedMask)); | 3440 __ and_(scratch, Immediate(kIsNotStringMask | kIsInternalizedMask)); |
| 3443 __ j(zero, label); | 3441 __ cmpb(scratch, Immediate(kInternalizedTag | kStringTag)); |
| 3442 __ j(not_equal, label); |
| 3444 } | 3443 } |
| 3445 | 3444 |
| 3446 | 3445 |
| 3447 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { | 3446 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { |
| 3448 Label check_unequal_objects, done; | 3447 Label check_unequal_objects, done; |
| 3449 Condition cc = GetCondition(); | 3448 Condition cc = GetCondition(); |
| 3450 Factory* factory = masm->isolate()->factory(); | 3449 Factory* factory = masm->isolate()->factory(); |
| 3451 | 3450 |
| 3452 Label miss; | 3451 Label miss; |
| 3453 CheckInputType(masm, rdx, left_, &miss); | 3452 CheckInputType(masm, rdx, left_, &miss); |
| (...skipping 2384 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5838 Label miss; | 5837 Label miss; |
| 5839 Condition cond = masm->CheckEitherSmi(left, right, tmp1); | 5838 Condition cond = masm->CheckEitherSmi(left, right, tmp1); |
| 5840 __ j(cond, &miss, Label::kNear); | 5839 __ j(cond, &miss, Label::kNear); |
| 5841 | 5840 |
| 5842 // Check that both operands are internalized strings. | 5841 // Check that both operands are internalized strings. |
| 5843 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 5842 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
| 5844 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 5843 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
| 5845 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 5844 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
| 5846 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 5845 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
| 5847 STATIC_ASSERT(kInternalizedTag != 0); | 5846 STATIC_ASSERT(kInternalizedTag != 0); |
| 5848 __ and_(tmp1, tmp2); | 5847 __ and_(tmp1, Immediate(kIsNotStringMask | kIsInternalizedMask)); |
| 5849 __ testb(tmp1, Immediate(kIsInternalizedMask)); | 5848 __ cmpb(tmp1, Immediate(kInternalizedTag | kStringTag)); |
| 5850 __ j(zero, &miss, Label::kNear); | 5849 __ j(not_equal, &miss, Label::kNear); |
| 5850 |
| 5851 __ and_(tmp2, Immediate(kIsNotStringMask | kIsInternalizedMask)); |
| 5852 __ cmpb(tmp2, Immediate(kInternalizedTag | kStringTag)); |
| 5853 __ j(not_equal, &miss, Label::kNear); |
| 5851 | 5854 |
| 5852 // Internalized strings are compared by identity. | 5855 // Internalized strings are compared by identity. |
| 5853 Label done; | 5856 Label done; |
| 5854 __ cmpq(left, right); | 5857 __ cmpq(left, right); |
| 5855 // Make sure rax is non-zero. At this point input operands are | 5858 // Make sure rax is non-zero. At this point input operands are |
| 5856 // guaranteed to be non-zero. | 5859 // guaranteed to be non-zero. |
| 5857 ASSERT(right.is(rax)); | 5860 ASSERT(right.is(rax)); |
| 5858 __ j(not_equal, &done, Label::kNear); | 5861 __ j(not_equal, &done, Label::kNear); |
| 5859 STATIC_ASSERT(EQUAL == 0); | 5862 STATIC_ASSERT(EQUAL == 0); |
| 5860 STATIC_ASSERT(kSmiTag == 0); | 5863 STATIC_ASSERT(kSmiTag == 0); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 5883 __ j(cond, &miss, Label::kNear); | 5886 __ j(cond, &miss, Label::kNear); |
| 5884 | 5887 |
| 5885 // Check that both operands are unique names. This leaves the instance | 5888 // Check that both operands are unique names. This leaves the instance |
| 5886 // types loaded in tmp1 and tmp2. | 5889 // types loaded in tmp1 and tmp2. |
| 5887 STATIC_ASSERT(kInternalizedTag != 0); | 5890 STATIC_ASSERT(kInternalizedTag != 0); |
| 5888 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 5891 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
| 5889 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 5892 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
| 5890 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 5893 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
| 5891 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 5894 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
| 5892 | 5895 |
| 5893 Label succeed1; | 5896 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); |
| 5894 __ testb(tmp1, Immediate(kIsInternalizedMask)); | 5897 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); |
| 5895 __ j(not_zero, &succeed1, Label::kNear); | |
| 5896 __ cmpb(tmp1, Immediate(static_cast<uint8_t>(SYMBOL_TYPE))); | |
| 5897 __ j(not_equal, &miss, Label::kNear); | |
| 5898 __ bind(&succeed1); | |
| 5899 | |
| 5900 Label succeed2; | |
| 5901 __ testb(tmp2, Immediate(kIsInternalizedMask)); | |
| 5902 __ j(not_zero, &succeed2, Label::kNear); | |
| 5903 __ cmpb(tmp2, Immediate(static_cast<uint8_t>(SYMBOL_TYPE))); | |
| 5904 __ j(not_equal, &miss, Label::kNear); | |
| 5905 __ bind(&succeed2); | |
| 5906 | 5898 |
| 5907 // Unique names are compared by identity. | 5899 // Unique names are compared by identity. |
| 5908 Label done; | 5900 Label done; |
| 5909 __ cmpq(left, right); | 5901 __ cmpq(left, right); |
| 5910 // Make sure rax is non-zero. At this point input operands are | 5902 // Make sure rax is non-zero. At this point input operands are |
| 5911 // guaranteed to be non-zero. | 5903 // guaranteed to be non-zero. |
| 5912 ASSERT(right.is(rax)); | 5904 ASSERT(right.is(rax)); |
| 5913 __ j(not_equal, &done, Label::kNear); | 5905 __ j(not_equal, &done, Label::kNear); |
| 5914 STATIC_ASSERT(EQUAL == 0); | 5906 STATIC_ASSERT(EQUAL == 0); |
| 5915 STATIC_ASSERT(kSmiTag == 0); | 5907 STATIC_ASSERT(kSmiTag == 0); |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5957 __ j(not_equal, ¬_same, Label::kNear); | 5949 __ j(not_equal, ¬_same, Label::kNear); |
| 5958 STATIC_ASSERT(EQUAL == 0); | 5950 STATIC_ASSERT(EQUAL == 0); |
| 5959 STATIC_ASSERT(kSmiTag == 0); | 5951 STATIC_ASSERT(kSmiTag == 0); |
| 5960 __ Move(rax, Smi::FromInt(EQUAL)); | 5952 __ Move(rax, Smi::FromInt(EQUAL)); |
| 5961 __ ret(0); | 5953 __ ret(0); |
| 5962 | 5954 |
| 5963 // Handle not identical strings. | 5955 // Handle not identical strings. |
| 5964 __ bind(¬_same); | 5956 __ bind(¬_same); |
| 5965 | 5957 |
| 5966 // Check that both strings are internalized strings. If they are, we're done | 5958 // Check that both strings are internalized strings. If they are, we're done |
| 5967 // because we already know they are not identical. | 5959 // because we already know they are not identical. We also know they are both |
| 5960 // strings. |
| 5968 if (equality) { | 5961 if (equality) { |
| 5969 Label do_compare; | 5962 Label do_compare; |
| 5970 STATIC_ASSERT(kInternalizedTag != 0); | 5963 STATIC_ASSERT(kInternalizedTag != 0); |
| 5971 __ and_(tmp1, tmp2); | 5964 __ and_(tmp1, tmp2); |
| 5972 __ testb(tmp1, Immediate(kIsInternalizedMask)); | 5965 __ testb(tmp1, Immediate(kIsInternalizedMask)); |
| 5973 __ j(zero, &do_compare, Label::kNear); | 5966 __ j(zero, &do_compare, Label::kNear); |
| 5974 // Make sure rax is non-zero. At this point input operands are | 5967 // Make sure rax is non-zero. At this point input operands are |
| 5975 // guaranteed to be non-zero. | 5968 // guaranteed to be non-zero. |
| 5976 ASSERT(right.is(rax)); | 5969 ASSERT(right.is(rax)); |
| 5977 __ ret(0); | 5970 __ ret(0); |
| (...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6113 __ Cmp(entity_name, Handle<Name>(name)); | 6106 __ Cmp(entity_name, Handle<Name>(name)); |
| 6114 __ j(equal, miss); | 6107 __ j(equal, miss); |
| 6115 | 6108 |
| 6116 Label good; | 6109 Label good; |
| 6117 // Check for the hole and skip. | 6110 // Check for the hole and skip. |
| 6118 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); | 6111 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); |
| 6119 __ j(equal, &good, Label::kNear); | 6112 __ j(equal, &good, Label::kNear); |
| 6120 | 6113 |
| 6121 // Check if the entry name is not a unique name. | 6114 // Check if the entry name is not a unique name. |
| 6122 __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); | 6115 __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); |
| 6123 __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset), | 6116 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset), |
| 6124 Immediate(kIsInternalizedMask)); | 6117 miss); |
| 6125 __ j(not_zero, &good, Label::kNear); | |
| 6126 __ cmpb(FieldOperand(entity_name, Map::kInstanceTypeOffset), | |
| 6127 Immediate(static_cast<uint8_t>(SYMBOL_TYPE))); | |
| 6128 __ j(not_equal, miss); | |
| 6129 | |
| 6130 __ bind(&good); | 6118 __ bind(&good); |
| 6131 } | 6119 } |
| 6132 | 6120 |
| 6133 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); | 6121 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); |
| 6134 __ Push(Handle<Object>(name)); | 6122 __ Push(Handle<Object>(name)); |
| 6135 __ push(Immediate(name->Hash())); | 6123 __ push(Immediate(name->Hash())); |
| 6136 __ CallStub(&stub); | 6124 __ CallStub(&stub); |
| 6137 __ testq(r0, r0); | 6125 __ testq(r0, r0); |
| 6138 __ j(not_zero, miss); | 6126 __ j(not_zero, miss); |
| 6139 __ jmp(done); | 6127 __ jmp(done); |
| (...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6245 // Stop if found the property. | 6233 // Stop if found the property. |
| 6246 __ cmpq(scratch, Operand(rsp, 3 * kPointerSize)); | 6234 __ cmpq(scratch, Operand(rsp, 3 * kPointerSize)); |
| 6247 __ j(equal, &in_dictionary); | 6235 __ j(equal, &in_dictionary); |
| 6248 | 6236 |
| 6249 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { | 6237 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
| 6250 // If we hit a key that is not a unique name during negative | 6238 // If we hit a key that is not a unique name during negative |
| 6251 // lookup we have to bailout as this key might be equal to the | 6239 // lookup we have to bailout as this key might be equal to the |
| 6252 // key we are looking for. | 6240 // key we are looking for. |
| 6253 | 6241 |
| 6254 // Check if the entry name is not a unique name. | 6242 // Check if the entry name is not a unique name. |
| 6255 Label cont; | |
| 6256 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 6243 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 6257 __ testb(FieldOperand(scratch, Map::kInstanceTypeOffset), | 6244 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), |
| 6258 Immediate(kIsInternalizedMask)); | 6245 &maybe_in_dictionary); |
| 6259 __ j(not_zero, &cont); | |
| 6260 __ cmpb(FieldOperand(scratch, Map::kInstanceTypeOffset), | |
| 6261 Immediate(static_cast<uint8_t>(SYMBOL_TYPE))); | |
| 6262 __ j(not_equal, &maybe_in_dictionary); | |
| 6263 __ bind(&cont); | |
| 6264 } | 6246 } |
| 6265 } | 6247 } |
| 6266 | 6248 |
| 6267 __ bind(&maybe_in_dictionary); | 6249 __ bind(&maybe_in_dictionary); |
| 6268 // If we are doing negative lookup then probing failure should be | 6250 // If we are doing negative lookup then probing failure should be |
| 6269 // treated as a lookup success. For positive lookup probing failure | 6251 // treated as a lookup success. For positive lookup probing failure |
| 6270 // should be treated as lookup failure. | 6252 // should be treated as lookup failure. |
| 6271 if (mode_ == POSITIVE_LOOKUP) { | 6253 if (mode_ == POSITIVE_LOOKUP) { |
| 6272 __ movq(scratch, Immediate(0)); | 6254 __ movq(scratch, Immediate(0)); |
| 6273 __ Drop(1); | 6255 __ Drop(1); |
| (...skipping 750 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7024 __ bind(&fast_elements_case); | 7006 __ bind(&fast_elements_case); |
| 7025 GenerateCase(masm, FAST_ELEMENTS); | 7007 GenerateCase(masm, FAST_ELEMENTS); |
| 7026 } | 7008 } |
| 7027 | 7009 |
| 7028 | 7010 |
| 7029 #undef __ | 7011 #undef __ |
| 7030 | 7012 |
| 7031 } } // namespace v8::internal | 7013 } } // namespace v8::internal |
| 7032 | 7014 |
| 7033 #endif // V8_TARGET_ARCH_X64 | 7015 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |