| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
| 8 | 8 |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
| (...skipping 23 matching lines...) Expand all Loading... |
| 34 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); | 34 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); |
| 35 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); | 35 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); |
| 36 Label miss; | 36 Label miss; |
| 37 | 37 |
| 38 // Multiply by 3 because there are 3 fields per entry (name, code, map). | 38 // Multiply by 3 because there are 3 fields per entry (name, code, map). |
| 39 __ leap(offset, Operand(offset, offset, times_2, 0)); | 39 __ leap(offset, Operand(offset, offset, times_2, 0)); |
| 40 | 40 |
| 41 __ LoadAddress(kScratchRegister, key_offset); | 41 __ LoadAddress(kScratchRegister, key_offset); |
| 42 | 42 |
| 43 // Check that the key in the entry matches the name. | 43 // Check that the key in the entry matches the name. |
| 44 // Multiply entry offset by 16 to get the entry address. Since the | 44 __ cmpp(name, Operand(kScratchRegister, offset, scale_factor, 0)); |
| 45 // offset register already holds the entry offset times four, multiply | |
| 46 // by a further four. | |
| 47 __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0)); | |
| 48 __ j(not_equal, &miss); | 45 __ j(not_equal, &miss); |
| 49 | 46 |
| 50 // Get the map entry from the cache. | 47 // Get the map entry from the cache. |
| 51 // Use key_offset + kPointerSize * 2, rather than loading map_offset. | 48 // Use key_offset + kPointerSize * 2, rather than loading map_offset. |
| 49 DCHECK(isolate->stub_cache()->map_reference(table).address() - |
| 50 isolate->stub_cache()->key_reference(table).address() == |
| 51 kPointerSize * 2); |
| 52 __ movp(kScratchRegister, | 52 __ movp(kScratchRegister, |
| 53 Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2)); | 53 Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2)); |
| 54 __ cmpp(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset)); | 54 __ cmpp(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset)); |
| 55 __ j(not_equal, &miss); | 55 __ j(not_equal, &miss); |
| 56 | 56 |
| 57 // Get the code entry from the cache. | 57 // Get the code entry from the cache. |
| 58 __ LoadAddress(kScratchRegister, value_offset); | 58 __ LoadAddress(kScratchRegister, value_offset); |
| 59 __ movp(kScratchRegister, Operand(kScratchRegister, offset, scale_factor, 0)); | 59 __ movp(kScratchRegister, Operand(kScratchRegister, offset, scale_factor, 0)); |
| 60 | 60 |
| 61 // Check that the flags match what we're looking for. | 61 // Check that the flags match what we're looking for. |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 144 __ bind(&miss); | 144 __ bind(&miss); |
| 145 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); | 145 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); |
| 146 } | 146 } |
| 147 | 147 |
| 148 | 148 |
| 149 #undef __ | 149 #undef __ |
| 150 } | 150 } |
| 151 } // namespace v8::internal | 151 } // namespace v8::internal |
| 152 | 152 |
| 153 #endif // V8_TARGET_ARCH_X64 | 153 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |