Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(191)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 18083004: MIPS: The check for internalized strings relied on the fact that we had less than 64 distinct Insta… (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/mips/ic-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1163 matching lines...) Expand 10 before | Expand all | Expand 10 after
1174 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE)); 1174 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE));
1175 1175
1176 __ GetObjectType(rhs, a3, a3); 1176 __ GetObjectType(rhs, a3, a3);
1177 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); 1177 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1178 1178
1179 // Check for oddballs: true, false, null, undefined. 1179 // Check for oddballs: true, false, null, undefined.
1180 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE)); 1180 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE));
1181 1181
1182 // Now that we have the types we might as well check for 1182 // Now that we have the types we might as well check for
1183 // internalized-internalized. 1183 // internalized-internalized.
1184 // Ensure that no non-strings have the internalized bit set. 1184 Label not_internalized;
1185 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsInternalizedMask);
1186 STATIC_ASSERT(kInternalizedTag != 0); 1185 STATIC_ASSERT(kInternalizedTag != 0);
1187 __ And(t2, a2, Operand(a3)); 1186 __ And(t2, a2, Operand(kIsNotStringMask | kIsInternalizedMask));
1188 __ And(t0, t2, Operand(kIsInternalizedMask)); 1187 __ Branch(&not_internalized, ne, t2,
1189 __ Branch(&return_not_equal, ne, t0, Operand(zero_reg)); 1188 Operand(kInternalizedTag | kStringTag));
1189
1190 __ And(a3, a3, Operand(kIsNotStringMask | kIsInternalizedMask));
1191 __ Branch(&return_not_equal, eq, a3,
1192 Operand(kInternalizedTag | kStringTag));
1193
1194 __ bind(&not_internalized);
1190 } 1195 }
1191 1196
1192 1197
1193 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm, 1198 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
1194 Register lhs, 1199 Register lhs,
1195 Register rhs, 1200 Register rhs,
1196 Label* both_loaded_as_doubles, 1201 Label* both_loaded_as_doubles,
1197 Label* not_heap_numbers, 1202 Label* not_heap_numbers,
1198 Label* slow) { 1203 Label* slow) {
1199 __ GetObjectType(lhs, a3, a2); 1204 __ GetObjectType(lhs, a3, a2);
(...skipping 13 matching lines...) Expand all
1213 1218
1214 // Fast negative check for internalized-to-internalized equality. 1219 // Fast negative check for internalized-to-internalized equality.
1215 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm, 1220 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
1216 Register lhs, 1221 Register lhs,
1217 Register rhs, 1222 Register rhs,
1218 Label* possible_strings, 1223 Label* possible_strings,
1219 Label* not_both_strings) { 1224 Label* not_both_strings) {
1220 ASSERT((lhs.is(a0) && rhs.is(a1)) || 1225 ASSERT((lhs.is(a0) && rhs.is(a1)) ||
1221 (lhs.is(a1) && rhs.is(a0))); 1226 (lhs.is(a1) && rhs.is(a0)));
1222 1227
1223 // a2 is object type of lhs. 1228 // a2 is object type of rhs.
1224 // Ensure that no non-strings have the internalized bit set.
1225 Label object_test; 1229 Label object_test;
1226 STATIC_ASSERT(kInternalizedTag != 0); 1230 STATIC_ASSERT(kInternalizedTag != 0);
1227 __ And(at, a2, Operand(kIsNotStringMask)); 1231 __ And(at, a2, Operand(kIsNotStringMask));
1228 __ Branch(&object_test, ne, at, Operand(zero_reg)); 1232 __ Branch(&object_test, ne, at, Operand(zero_reg));
1229 __ And(at, a2, Operand(kIsInternalizedMask)); 1233 __ And(at, a2, Operand(kIsInternalizedMask));
1230 __ Branch(possible_strings, eq, at, Operand(zero_reg)); 1234 __ Branch(possible_strings, eq, at, Operand(zero_reg));
1231 __ GetObjectType(rhs, a3, a3); 1235 __ GetObjectType(rhs, a3, a3);
1232 __ Branch(not_both_strings, ge, a3, Operand(FIRST_NONSTRING_TYPE)); 1236 __ Branch(not_both_strings, ge, a3, Operand(FIRST_NONSTRING_TYPE));
1233 __ And(at, a3, Operand(kIsInternalizedMask)); 1237 __ And(at, a3, Operand(kIsInternalizedMask));
1234 __ Branch(possible_strings, eq, at, Operand(zero_reg)); 1238 __ Branch(possible_strings, eq, at, Operand(zero_reg));
(...skipping 5370 matching lines...) Expand 10 before | Expand all | Expand 10 after
6605 6609
6606 // Check that both operands are heap objects. 6610 // Check that both operands are heap objects.
6607 __ JumpIfEitherSmi(left, right, &miss); 6611 __ JumpIfEitherSmi(left, right, &miss);
6608 6612
6609 // Check that both operands are internalized strings. 6613 // Check that both operands are internalized strings.
6610 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); 6614 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
6611 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); 6615 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
6612 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); 6616 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
6613 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); 6617 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
6614 STATIC_ASSERT(kInternalizedTag != 0); 6618 STATIC_ASSERT(kInternalizedTag != 0);
6615 __ And(tmp1, tmp1, Operand(tmp2)); 6619
6616 __ And(tmp1, tmp1, kIsInternalizedMask); 6620 __ And(tmp1, tmp1, Operand(kIsNotStringMask | kIsInternalizedMask));
6617 __ Branch(&miss, eq, tmp1, Operand(zero_reg)); 6621 __ Branch(&miss, ne, tmp1, Operand(kInternalizedTag | kStringTag));
6622
6623 __ And(tmp2, tmp2, Operand(kIsNotStringMask | kIsInternalizedMask));
6624 __ Branch(&miss, ne, tmp2, Operand(kInternalizedTag | kStringTag));
6625
6618 // Make sure a0 is non-zero. At this point input operands are 6626 // Make sure a0 is non-zero. At this point input operands are
6619 // guaranteed to be non-zero. 6627 // guaranteed to be non-zero.
6620 ASSERT(right.is(a0)); 6628 ASSERT(right.is(a0));
6621 STATIC_ASSERT(EQUAL == 0); 6629 STATIC_ASSERT(EQUAL == 0);
6622 STATIC_ASSERT(kSmiTag == 0); 6630 STATIC_ASSERT(kSmiTag == 0);
6623 __ mov(v0, right); 6631 __ mov(v0, right);
6624 // Internalized strings are compared by identity. 6632 // Internalized strings are compared by identity.
6625 __ Ret(ne, left, Operand(right)); 6633 __ Ret(ne, left, Operand(right));
6626 ASSERT(is_int16(EQUAL)); 6634 ASSERT(is_int16(EQUAL));
6627 __ Ret(USE_DELAY_SLOT); 6635 __ Ret(USE_DELAY_SLOT);
(...skipping 19 matching lines...) Expand all
6647 __ JumpIfEitherSmi(left, right, &miss); 6655 __ JumpIfEitherSmi(left, right, &miss);
6648 6656
6649 // Check that both operands are unique names. This leaves the instance 6657 // Check that both operands are unique names. This leaves the instance
6650 // types loaded in tmp1 and tmp2. 6658 // types loaded in tmp1 and tmp2.
6651 STATIC_ASSERT(kInternalizedTag != 0); 6659 STATIC_ASSERT(kInternalizedTag != 0);
6652 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); 6660 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
6653 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); 6661 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
6654 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); 6662 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
6655 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); 6663 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
6656 6664
6657 Label succeed1; 6665 __ JumpIfNotUniqueName(tmp1, &miss);
6658 __ And(at, tmp1, Operand(kIsInternalizedMask)); 6666 __ JumpIfNotUniqueName(tmp2, &miss);
6659 __ Branch(&succeed1, ne, at, Operand(zero_reg));
6660 __ Branch(&miss, ne, tmp1, Operand(SYMBOL_TYPE));
6661 __ bind(&succeed1);
6662
6663 Label succeed2;
6664 __ And(at, tmp2, Operand(kIsInternalizedMask));
6665 __ Branch(&succeed2, ne, at, Operand(zero_reg));
6666 __ Branch(&miss, ne, tmp2, Operand(SYMBOL_TYPE));
6667 __ bind(&succeed2);
6668 6667
6669 // Use a0 as result 6668 // Use a0 as result
6670 __ mov(v0, a0); 6669 __ mov(v0, a0);
6671 6670
6672 // Unique names are compared by identity. 6671 // Unique names are compared by identity.
6673 Label done; 6672 Label done;
6674 __ Branch(&done, ne, left, Operand(right)); 6673 __ Branch(&done, ne, left, Operand(right));
6675 // Make sure a0 is non-zero. At this point input operands are 6674 // Make sure a0 is non-zero. At this point input operands are
6676 // guaranteed to be non-zero. 6675 // guaranteed to be non-zero.
6677 ASSERT(right.is(a0)); 6676 ASSERT(right.is(a0));
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
6720 STATIC_ASSERT(EQUAL == 0); 6719 STATIC_ASSERT(EQUAL == 0);
6721 STATIC_ASSERT(kSmiTag == 0); 6720 STATIC_ASSERT(kSmiTag == 0);
6722 __ Branch(&left_ne_right, ne, left, Operand(right)); 6721 __ Branch(&left_ne_right, ne, left, Operand(right));
6723 __ Ret(USE_DELAY_SLOT); 6722 __ Ret(USE_DELAY_SLOT);
6724 __ mov(v0, zero_reg); // In the delay slot. 6723 __ mov(v0, zero_reg); // In the delay slot.
6725 __ bind(&left_ne_right); 6724 __ bind(&left_ne_right);
6726 6725
6727 // Handle not identical strings. 6726 // Handle not identical strings.
6728 6727
6729 // Check that both strings are internalized strings. If they are, we're done 6728 // Check that both strings are internalized strings. If they are, we're done
6730 // because we already know they are not identical. 6729 // because we already know they are not identical. We know they are both
6730 // strings.
6731 if (equality) { 6731 if (equality) {
6732 ASSERT(GetCondition() == eq); 6732 ASSERT(GetCondition() == eq);
6733 STATIC_ASSERT(kInternalizedTag != 0); 6733 STATIC_ASSERT(kInternalizedTag != 0);
6734 __ And(tmp3, tmp1, Operand(tmp2)); 6734 __ And(tmp3, tmp1, Operand(tmp2));
6735 __ And(tmp5, tmp3, Operand(kIsInternalizedMask)); 6735 __ And(tmp5, tmp3, Operand(kIsInternalizedMask));
6736 Label is_symbol; 6736 Label is_symbol;
6737 __ Branch(&is_symbol, eq, tmp5, Operand(zero_reg)); 6737 __ Branch(&is_symbol, eq, tmp5, Operand(zero_reg));
6738 // Make sure a0 is non-zero. At this point input operands are 6738 // Make sure a0 is non-zero. At this point input operands are
6739 // guaranteed to be non-zero. 6739 // guaranteed to be non-zero.
6740 ASSERT(right.is(a0)); 6740 ASSERT(right.is(a0));
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after
6925 // Stop if found the property. 6925 // Stop if found the property.
6926 __ Branch(miss, eq, entity_name, Operand(Handle<Name>(name))); 6926 __ Branch(miss, eq, entity_name, Operand(Handle<Name>(name)));
6927 6927
6928 Label good; 6928 Label good;
6929 __ Branch(&good, eq, entity_name, Operand(tmp)); 6929 __ Branch(&good, eq, entity_name, Operand(tmp));
6930 6930
6931 // Check if the entry name is not a unique name. 6931 // Check if the entry name is not a unique name.
6932 __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); 6932 __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
6933 __ lbu(entity_name, 6933 __ lbu(entity_name,
6934 FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); 6934 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
6935 __ And(scratch0, entity_name, Operand(kIsInternalizedMask)); 6935 __ JumpIfNotUniqueName(entity_name, miss);
6936 __ Branch(&good, ne, scratch0, Operand(zero_reg));
6937 __ Branch(miss, ne, entity_name, Operand(SYMBOL_TYPE));
6938
6939 __ bind(&good); 6936 __ bind(&good);
6940 6937
6941 // Restore the properties. 6938 // Restore the properties.
6942 __ lw(properties, 6939 __ lw(properties,
6943 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 6940 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
6944 } 6941 }
6945 6942
6946 const int spill_mask = 6943 const int spill_mask =
6947 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() | 6944 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
6948 a2.bit() | a1.bit() | a0.bit() | v0.bit()); 6945 a2.bit() | a1.bit() | a0.bit() | v0.bit());
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
7102 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset)); 7099 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset));
7103 7100
7104 // Having undefined at this place means the name is not contained. 7101 // Having undefined at this place means the name is not contained.
7105 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined)); 7102 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined));
7106 7103
7107 // Stop if found the property. 7104 // Stop if found the property.
7108 __ Branch(&in_dictionary, eq, entry_key, Operand(key)); 7105 __ Branch(&in_dictionary, eq, entry_key, Operand(key));
7109 7106
7110 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { 7107 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
7111 // Check if the entry name is not a unique name. 7108 // Check if the entry name is not a unique name.
7112 Label cont;
7113 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); 7109 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
7114 __ lbu(entry_key, 7110 __ lbu(entry_key,
7115 FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); 7111 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
7116 __ And(result, entry_key, Operand(kIsInternalizedMask)); 7112 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
7117 __ Branch(&cont, ne, result, Operand(zero_reg));
7118 __ Branch(&maybe_in_dictionary, ne, entry_key, Operand(SYMBOL_TYPE));
7119 __ bind(&cont);
7120 } 7113 }
7121 } 7114 }
7122 7115
7123 __ bind(&maybe_in_dictionary); 7116 __ bind(&maybe_in_dictionary);
7124 // If we are doing negative lookup then probing failure should be 7117 // If we are doing negative lookup then probing failure should be
7125 // treated as a lookup success. For positive lookup probing failure 7118 // treated as a lookup success. For positive lookup probing failure
7126 // should be treated as lookup failure. 7119 // should be treated as lookup failure.
7127 if (mode_ == POSITIVE_LOOKUP) { 7120 if (mode_ == POSITIVE_LOOKUP) {
7128 __ Ret(USE_DELAY_SLOT); 7121 __ Ret(USE_DELAY_SLOT);
7129 __ mov(result, zero_reg); 7122 __ mov(result, zero_reg);
(...skipping 706 matching lines...) Expand 10 before | Expand all | Expand 10 after
7836 __ bind(&fast_elements_case); 7829 __ bind(&fast_elements_case);
7837 GenerateCase(masm, FAST_ELEMENTS); 7830 GenerateCase(masm, FAST_ELEMENTS);
7838 } 7831 }
7839 7832
7840 7833
7841 #undef __ 7834 #undef __
7842 7835
7843 } } // namespace v8::internal 7836 } } // namespace v8::internal
7844 7837
7845 #endif // V8_TARGET_ARCH_MIPS 7838 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « no previous file | src/mips/ic-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698