OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 6835 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6846 STATIC_ASSERT(kSmiTag == 0); | 6846 STATIC_ASSERT(kSmiTag == 0); |
6847 __ and_(tmp1, right); | 6847 __ and_(tmp1, right); |
6848 __ JumpIfSmi(tmp1, &miss, Label::kNear); | 6848 __ JumpIfSmi(tmp1, &miss, Label::kNear); |
6849 | 6849 |
6850 // Check that both operands are internalized strings. | 6850 // Check that both operands are internalized strings. |
6851 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 6851 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
6852 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 6852 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
6853 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 6853 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
6854 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 6854 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
6855 STATIC_ASSERT(kInternalizedTag != 0); | 6855 STATIC_ASSERT(kInternalizedTag != 0); |
6856 __ and_(tmp1, tmp2); | 6856 __ and_(tmp1, Immediate(kIsNotStringMask | kIsInternalizedMask)); |
6857 __ test(tmp1, Immediate(kIsInternalizedMask)); | 6857 __ cmpb(tmp1, kInternalizedTag | kStringTag); |
6858 __ j(zero, &miss, Label::kNear); | 6858 __ j(not_equal, &miss, Label::kNear); |
| 6859 |
| 6860 __ and_(tmp2, Immediate(kIsNotStringMask | kIsInternalizedMask)); |
| 6861 __ cmpb(tmp2, kInternalizedTag | kStringTag); |
| 6862 __ j(not_equal, &miss, Label::kNear); |
6859 | 6863 |
6860 // Internalized strings are compared by identity. | 6864 // Internalized strings are compared by identity. |
6861 Label done; | 6865 Label done; |
6862 __ cmp(left, right); | 6866 __ cmp(left, right); |
6863 // Make sure eax is non-zero. At this point input operands are | 6867 // Make sure eax is non-zero. At this point input operands are |
6864 // guaranteed to be non-zero. | 6868 // guaranteed to be non-zero. |
6865 ASSERT(right.is(eax)); | 6869 ASSERT(right.is(eax)); |
6866 __ j(not_equal, &done, Label::kNear); | 6870 __ j(not_equal, &done, Label::kNear); |
6867 STATIC_ASSERT(EQUAL == 0); | 6871 STATIC_ASSERT(EQUAL == 0); |
6868 STATIC_ASSERT(kSmiTag == 0); | 6872 STATIC_ASSERT(kSmiTag == 0); |
(...skipping 25 matching lines...) Expand all Loading... |
6894 | 6898 |
6895 // Check that both operands are unique names. This leaves the instance | 6899 // Check that both operands are unique names. This leaves the instance |
6896 // types loaded in tmp1 and tmp2. | 6900 // types loaded in tmp1 and tmp2. |
6897 STATIC_ASSERT(kInternalizedTag != 0); | 6901 STATIC_ASSERT(kInternalizedTag != 0); |
6898 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 6902 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
6899 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 6903 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
6900 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 6904 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
6901 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 6905 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
6902 | 6906 |
6903 Label succeed1; | 6907 Label succeed1; |
6904 __ test(tmp1, Immediate(kIsInternalizedMask)); | |
6905 __ j(not_zero, &succeed1); | |
6906 __ cmpb(tmp1, static_cast<uint8_t>(SYMBOL_TYPE)); | 6908 __ cmpb(tmp1, static_cast<uint8_t>(SYMBOL_TYPE)); |
6907 __ j(not_equal, &miss); | 6909 __ j(equal, &succeed1); |
| 6910 __ and_(tmp1, Immediate(kIsNotStringMask | kIsInternalizedMask)); |
| 6911 __ cmpb(tmp1, kIsInternalizedMask | kStringTag); |
| 6912 __ j(not_equal, &miss, Label::kNear); |
6908 __ bind(&succeed1); | 6913 __ bind(&succeed1); |
6909 | 6914 |
6910 Label succeed2; | 6915 Label succeed2; |
6911 __ test(tmp2, Immediate(kIsInternalizedMask)); | |
6912 __ j(not_zero, &succeed2); | |
6913 __ cmpb(tmp2, static_cast<uint8_t>(SYMBOL_TYPE)); | 6916 __ cmpb(tmp2, static_cast<uint8_t>(SYMBOL_TYPE)); |
6914 __ j(not_equal, &miss); | 6917 __ j(equal, &succeed2); |
| 6918 __ and_(tmp2, Immediate(kIsNotStringMask | kIsInternalizedMask)); |
| 6919 __ cmpb(tmp2, kIsInternalizedMask | kStringTag); |
| 6920 __ j(not_equal, &miss, Label::kNear); |
6915 __ bind(&succeed2); | 6921 __ bind(&succeed2); |
6916 | 6922 |
6917 // Unique names are compared by identity. | 6923 // Unique names are compared by identity. |
6918 Label done; | 6924 Label done; |
6919 __ cmp(left, right); | 6925 __ cmp(left, right); |
6920 // Make sure eax is non-zero. At this point input operands are | 6926 // Make sure eax is non-zero. At this point input operands are |
6921 // guaranteed to be non-zero. | 6927 // guaranteed to be non-zero. |
6922 ASSERT(right.is(eax)); | 6928 ASSERT(right.is(eax)); |
6923 __ j(not_equal, &done, Label::kNear); | 6929 __ j(not_equal, &done, Label::kNear); |
6924 STATIC_ASSERT(EQUAL == 0); | 6930 STATIC_ASSERT(EQUAL == 0); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6970 STATIC_ASSERT(EQUAL == 0); | 6976 STATIC_ASSERT(EQUAL == 0); |
6971 STATIC_ASSERT(kSmiTag == 0); | 6977 STATIC_ASSERT(kSmiTag == 0); |
6972 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); | 6978 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); |
6973 __ ret(0); | 6979 __ ret(0); |
6974 | 6980 |
6975 // Handle not identical strings. | 6981 // Handle not identical strings. |
6976 __ bind(¬_same); | 6982 __ bind(¬_same); |
6977 | 6983 |
6978 // Check that both strings are internalized. If they are, we're done | 6984 // Check that both strings are internalized. If they are, we're done |
6979 // because we already know they are not identical. But in the case of | 6985 // because we already know they are not identical. But in the case of |
6980 // non-equality compare, we still need to determine the order. | 6986 // non-equality compare, we still need to determine the order. We |
| 6987 // also know they are both strings. |
6981 if (equality) { | 6988 if (equality) { |
6982 Label do_compare; | 6989 Label do_compare; |
6983 STATIC_ASSERT(kInternalizedTag != 0); | 6990 STATIC_ASSERT(kInternalizedTag != 0); |
6984 __ and_(tmp1, tmp2); | 6991 __ and_(tmp1, tmp2); |
6985 __ test(tmp1, Immediate(kIsInternalizedMask)); | 6992 __ test(tmp1, Immediate(kIsInternalizedMask)); |
6986 __ j(zero, &do_compare, Label::kNear); | 6993 __ j(zero, &do_compare, Label::kNear); |
6987 // Make sure eax is non-zero. At this point input operands are | 6994 // Make sure eax is non-zero. At this point input operands are |
6988 // guaranteed to be non-zero. | 6995 // guaranteed to be non-zero. |
6989 ASSERT(right.is(eax)); | 6996 ASSERT(right.is(eax)); |
6990 __ ret(0); | 6997 __ ret(0); |
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7129 __ cmp(entity_name, Handle<Name>(name)); | 7136 __ cmp(entity_name, Handle<Name>(name)); |
7130 __ j(equal, miss); | 7137 __ j(equal, miss); |
7131 | 7138 |
7132 Label good; | 7139 Label good; |
7133 // Check for the hole and skip. | 7140 // Check for the hole and skip. |
7134 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value()); | 7141 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value()); |
7135 __ j(equal, &good, Label::kNear); | 7142 __ j(equal, &good, Label::kNear); |
7136 | 7143 |
7137 // Check if the entry name is not a unique name. | 7144 // Check if the entry name is not a unique name. |
7138 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); | 7145 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); |
| 7146 __ cmpb(FieldOperand(entity_name, Map::kInstanceTypeOffset), |
| 7147 static_cast<uint8_t>(SYMBOL_TYPE)); |
| 7148 __ j(equal, &good); |
| 7149 __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset), |
| 7150 kIsNotStringMask); |
| 7151 __ j(not_zero, miss); |
7139 __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset), | 7152 __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset), |
7140 kIsInternalizedMask); | 7153 kIsInternalizedMask); |
7141 __ j(not_zero, &good); | 7154 __ j(zero, miss); |
7142 __ cmpb(FieldOperand(entity_name, Map::kInstanceTypeOffset), | |
7143 static_cast<uint8_t>(SYMBOL_TYPE)); | |
7144 __ j(not_equal, miss); | |
7145 __ bind(&good); | 7155 __ bind(&good); |
7146 } | 7156 } |
7147 | 7157 |
7148 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); | 7158 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP); |
7149 __ push(Immediate(Handle<Object>(name))); | 7159 __ push(Immediate(Handle<Object>(name))); |
7150 __ push(Immediate(name->Hash())); | 7160 __ push(Immediate(name->Hash())); |
7151 __ CallStub(&stub); | 7161 __ CallStub(&stub); |
7152 __ test(r0, r0); | 7162 __ test(r0, r0); |
7153 __ j(not_zero, miss); | 7163 __ j(not_zero, miss); |
7154 __ jmp(done); | 7164 __ jmp(done); |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7269 __ j(equal, &in_dictionary); | 7279 __ j(equal, &in_dictionary); |
7270 | 7280 |
7271 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { | 7281 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
7272 // If we hit a key that is not a unique name during negative | 7282 // If we hit a key that is not a unique name during negative |
7273 // lookup we have to bailout as this key might be equal to the | 7283 // lookup we have to bailout as this key might be equal to the |
7274 // key we are looking for. | 7284 // key we are looking for. |
7275 | 7285 |
7276 // Check if the entry name is not a unique name. | 7286 // Check if the entry name is not a unique name. |
7277 Label cont; | 7287 Label cont; |
7278 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 7288 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 7289 __ cmpb(FieldOperand(scratch, Map::kInstanceTypeOffset), |
| 7290 static_cast<uint8_t>(SYMBOL_TYPE)); |
| 7291 __ j(equal, &cont); |
| 7292 __ test_b(FieldOperand(scratch, Map::kInstanceTypeOffset), |
| 7293 kIsNotStringMask); |
| 7294 __ j(not_zero, &maybe_in_dictionary); |
7279 __ test_b(FieldOperand(scratch, Map::kInstanceTypeOffset), | 7295 __ test_b(FieldOperand(scratch, Map::kInstanceTypeOffset), |
7280 kIsInternalizedMask); | 7296 kIsInternalizedMask); |
7281 __ j(not_zero, &cont); | 7297 __ j(zero, &maybe_in_dictionary); |
7282 __ cmpb(FieldOperand(scratch, Map::kInstanceTypeOffset), | |
7283 static_cast<uint8_t>(SYMBOL_TYPE)); | |
7284 __ j(not_equal, &maybe_in_dictionary); | |
7285 __ bind(&cont); | 7298 __ bind(&cont); |
7286 } | 7299 } |
7287 } | 7300 } |
7288 | 7301 |
7289 __ bind(&maybe_in_dictionary); | 7302 __ bind(&maybe_in_dictionary); |
7290 // If we are doing negative lookup then probing failure should be | 7303 // If we are doing negative lookup then probing failure should be |
7291 // treated as a lookup success. For positive lookup probing failure | 7304 // treated as a lookup success. For positive lookup probing failure |
7292 // should be treated as lookup failure. | 7305 // should be treated as lookup failure. |
7293 if (mode_ == POSITIVE_LOOKUP) { | 7306 if (mode_ == POSITIVE_LOOKUP) { |
7294 __ mov(result_, Immediate(0)); | 7307 __ mov(result_, Immediate(0)); |
(...skipping 719 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8014 __ bind(&fast_elements_case); | 8027 __ bind(&fast_elements_case); |
8015 GenerateCase(masm, FAST_ELEMENTS); | 8028 GenerateCase(masm, FAST_ELEMENTS); |
8016 } | 8029 } |
8017 | 8030 |
8018 | 8031 |
8019 #undef __ | 8032 #undef __ |
8020 | 8033 |
8021 } } // namespace v8::internal | 8034 } } // namespace v8::internal |
8022 | 8035 |
8023 #endif // V8_TARGET_ARCH_IA32 | 8036 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |