OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2246 // Load instance type for both strings. | 2246 // Load instance type for both strings. |
2247 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset)); | 2247 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset)); |
2248 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset)); | 2248 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset)); |
2249 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset)); | 2249 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset)); |
2250 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset)); | 2250 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset)); |
2251 | 2251 |
2252 // Check that both are flat ASCII strings. | 2252 // Check that both are flat ASCII strings. |
2253 ASSERT(kNotStringTag != 0); | 2253 ASSERT(kNotStringTag != 0); |
2254 const int kFlatAsciiStringMask = | 2254 const int kFlatAsciiStringMask = |
2255 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; | 2255 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; |
2256 const int kFlatAsciiStringTag = ASCII_STRING_TYPE; | 2256 // Use ASCII_INTERNALIZED_STRING_TYPE because it's internalized bit is zero. |
2257 // Since we don't include that bit in the mask above, the test below will | |
2258 // succeed for ASCII_STRING_TYPE and ASCII_INTERNALIZED_STRING_TYPE. | |
2259 STATIC_ASSERT(kInternalizedTag == 0); | |
2260 const int kFlatAsciiStringTag = ASCII_INTERNALIZED_STRING_TYPE; | |
2257 | 2261 |
2258 andl(scratch1, Immediate(kFlatAsciiStringMask)); | 2262 andl(scratch1, Immediate(kFlatAsciiStringMask)); |
2259 andl(scratch2, Immediate(kFlatAsciiStringMask)); | 2263 andl(scratch2, Immediate(kFlatAsciiStringMask)); |
2260 // Interleave the bits to check both scratch1 and scratch2 in one test. | 2264 // Interleave the bits to check both scratch1 and scratch2 in one test. |
2261 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); | 2265 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); |
2262 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); | 2266 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); |
2263 cmpl(scratch1, | 2267 cmpl(scratch1, |
2264 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3))); | 2268 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3))); |
2265 j(not_equal, on_fail, near_jump); | 2269 j(not_equal, on_fail, near_jump); |
2266 } | 2270 } |
2267 | 2271 |
2268 | 2272 |
2269 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii( | 2273 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii( |
2270 Register instance_type, | 2274 Register instance_type, |
2271 Register scratch, | 2275 Register scratch, |
2272 Label* failure, | 2276 Label* failure, |
2273 Label::Distance near_jump) { | 2277 Label::Distance near_jump) { |
2274 if (!scratch.is(instance_type)) { | 2278 if (!scratch.is(instance_type)) { |
2275 movl(scratch, instance_type); | 2279 movl(scratch, instance_type); |
2276 } | 2280 } |
2277 | 2281 |
2278 const int kFlatAsciiStringMask = | 2282 const int kFlatAsciiStringMask = |
2279 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; | 2283 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; |
2280 | 2284 |
2281 andl(scratch, Immediate(kFlatAsciiStringMask)); | 2285 andl(scratch, Immediate(kFlatAsciiStringMask)); |
2282 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kOneByteStringTag)); | 2286 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kOneByteStringTag)); |
Yang
2013/07/22 09:09:46
I like it this way. Less error prone apparently.
mvstanton
2013/07/22 09:40:17
Indeed, way better. thx.
| |
2283 j(not_equal, failure, near_jump); | 2287 j(not_equal, failure, near_jump); |
2284 } | 2288 } |
2285 | 2289 |
2286 | 2290 |
2287 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii( | 2291 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii( |
2288 Register first_object_instance_type, | 2292 Register first_object_instance_type, |
2289 Register second_object_instance_type, | 2293 Register second_object_instance_type, |
2290 Register scratch1, | 2294 Register scratch1, |
2291 Register scratch2, | 2295 Register scratch2, |
2292 Label* on_fail, | 2296 Label* on_fail, |
(...skipping 2398 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4691 j(greater, &no_memento_available); | 4695 j(greater, &no_memento_available); |
4692 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 4696 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
4693 Heap::kAllocationMementoMapRootIndex); | 4697 Heap::kAllocationMementoMapRootIndex); |
4694 bind(&no_memento_available); | 4698 bind(&no_memento_available); |
4695 } | 4699 } |
4696 | 4700 |
4697 | 4701 |
4698 } } // namespace v8::internal | 4702 } } // namespace v8::internal |
4699 | 4703 |
4700 #endif // V8_TARGET_ARCH_X64 | 4704 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |