| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 82 __ cmpp(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset)); | 82 __ cmpp(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset)); |
| 83 __ j(not_equal, &miss); | 83 __ j(not_equal, &miss); |
| 84 | 84 |
| 85 // Get the code entry from the cache. | 85 // Get the code entry from the cache. |
| 86 __ LoadAddress(kScratchRegister, value_offset); | 86 __ LoadAddress(kScratchRegister, value_offset); |
| 87 __ movp(kScratchRegister, | 87 __ movp(kScratchRegister, |
| 88 Operand(kScratchRegister, offset, scale_factor, 0)); | 88 Operand(kScratchRegister, offset, scale_factor, 0)); |
| 89 | 89 |
| 90 // Check that the flags match what we're looking for. | 90 // Check that the flags match what we're looking for. |
| 91 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset)); | 91 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset)); |
| 92 __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup)); | 92 __ andp(offset, Immediate(~Code::kFlagsNotUsedInLookup)); |
| 93 __ cmpl(offset, Immediate(flags)); | 93 __ cmpl(offset, Immediate(flags)); |
| 94 __ j(not_equal, &miss); | 94 __ j(not_equal, &miss); |
| 95 | 95 |
| 96 #ifdef DEBUG | 96 #ifdef DEBUG |
| 97 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { | 97 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { |
| 98 __ jmp(&miss); | 98 __ jmp(&miss); |
| 99 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { | 99 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { |
| 100 __ jmp(&miss); | 100 __ jmp(&miss); |
| 101 } | 101 } |
| 102 #endif | 102 #endif |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 188 Counters* counters = masm->isolate()->counters(); | 188 Counters* counters = masm->isolate()->counters(); |
| 189 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1); | 189 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1); |
| 190 | 190 |
| 191 // Check that the receiver isn't a smi. | 191 // Check that the receiver isn't a smi. |
| 192 __ JumpIfSmi(receiver, &miss); | 192 __ JumpIfSmi(receiver, &miss); |
| 193 | 193 |
| 194 // Get the map of the receiver and compute the hash. | 194 // Get the map of the receiver and compute the hash. |
| 195 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); | 195 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); |
| 196 // Use only the low 32 bits of the map pointer. | 196 // Use only the low 32 bits of the map pointer. |
| 197 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); | 197 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); |
| 198 __ xor_(scratch, Immediate(flags)); | 198 __ xorp(scratch, Immediate(flags)); |
| 199 // We mask out the last two bits because they are not part of the hash and | 199 // We mask out the last two bits because they are not part of the hash and |
| 200 // they are always 01 for maps. Also in the two 'and' instructions below. | 200 // they are always 01 for maps. Also in the two 'and' instructions below. |
| 201 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); | 201 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); |
| 202 | 202 |
| 203 // Probe the primary table. | 203 // Probe the primary table. |
| 204 ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch); | 204 ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch); |
| 205 | 205 |
| 206 // Primary miss: Compute hash for secondary probe. | 206 // Primary miss: Compute hash for secondary probe. |
| 207 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); | 207 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); |
| 208 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); | 208 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); |
| 209 __ xor_(scratch, Immediate(flags)); | 209 __ xorp(scratch, Immediate(flags)); |
| 210 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); | 210 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); |
| 211 __ subl(scratch, name); | 211 __ subl(scratch, name); |
| 212 __ addl(scratch, Immediate(flags)); | 212 __ addl(scratch, Immediate(flags)); |
| 213 __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize)); | 213 __ andp(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize)); |
| 214 | 214 |
| 215 // Probe the secondary table. | 215 // Probe the secondary table. |
| 216 ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch); | 216 ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch); |
| 217 | 217 |
| 218 // Cache miss: Fall-through and let caller handle the miss by | 218 // Cache miss: Fall-through and let caller handle the miss by |
| 219 // entering the runtime system. | 219 // entering the runtime system. |
| 220 __ bind(&miss); | 220 __ bind(&miss); |
| 221 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); | 221 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); |
| 222 } | 222 } |
| 223 | 223 |
| (...skipping 1218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1442 // ----------------------------------- | 1442 // ----------------------------------- |
| 1443 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | 1443 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); |
| 1444 } | 1444 } |
| 1445 | 1445 |
| 1446 | 1446 |
| 1447 #undef __ | 1447 #undef __ |
| 1448 | 1448 |
| 1449 } } // namespace v8::internal | 1449 } } // namespace v8::internal |
| 1450 | 1450 |
| 1451 #endif // V8_TARGET_ARCH_X64 | 1451 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |