| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_X87 | 5 #if V8_TARGET_ARCH_X87 |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
| 9 #include "src/ic/stub-cache.h" | 9 #include "src/ic/stub-cache.h" |
| 10 #include "src/interface-descriptors.h" | 10 #include "src/interface-descriptors.h" |
| 11 | 11 |
| 12 namespace v8 { | 12 namespace v8 { |
| 13 namespace internal { | 13 namespace internal { |
| 14 | 14 |
| 15 #define __ ACCESS_MASM(masm) | 15 #define __ ACCESS_MASM(masm) |
| 16 | 16 |
| 17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, | 17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, |
| 18 StubCache::Table table, Register name, Register receiver, | 18 StubCache::Table table, Register name, Register receiver, |
| 19 // The offset is scaled by 4, based on | 19 // The offset is scaled by 4, based on |
| 20 // kCacheIndexShift, which is two bits | 20 // kCacheIndexShift, which is two bits |
| 21 Register offset, Register extra) { | 21 Register offset, Register extra) { |
| 22 ExternalReference key_offset(stub_cache->key_reference(table)); | 22 ExternalReference key_offset(stub_cache->key_reference(table)); |
| 23 ExternalReference value_offset(stub_cache->value_reference(table)); | 23 ExternalReference value_offset(stub_cache->value_reference(table)); |
| 24 ExternalReference map_offset(stub_cache->map_reference(table)); | 24 ExternalReference map_offset(stub_cache->map_reference(table)); |
| 25 ExternalReference virtual_register = | |
| 26 ExternalReference::virtual_handler_register(masm->isolate()); | |
| 27 | 25 |
| 28 Label miss; | 26 Label miss; |
| 29 Code::Kind ic_kind = stub_cache->ic_kind(); | 27 Code::Kind ic_kind = stub_cache->ic_kind(); |
| 30 bool is_vector_store = | 28 bool is_vector_store = |
| 31 IC::ICUseVector(ic_kind) && | 29 IC::ICUseVector(ic_kind) && |
| 32 (ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC); | 30 (ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC); |
| 33 | 31 |
| 34 // Multiply by 3 because there are 3 fields per entry (name, code, map). | 32 // Multiply by 3 because there are 3 fields per entry (name, code, map). |
| 35 __ lea(offset, Operand(offset, offset, times_2, 0)); | 33 __ lea(offset, Operand(offset, offset, times_2, 0)); |
| 36 | 34 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 48 __ j(not_equal, &miss); | 46 __ j(not_equal, &miss); |
| 49 | 47 |
| 50 #ifdef DEBUG | 48 #ifdef DEBUG |
| 51 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { | 49 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { |
| 52 __ jmp(&miss); | 50 __ jmp(&miss); |
| 53 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { | 51 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { |
| 54 __ jmp(&miss); | 52 __ jmp(&miss); |
| 55 } | 53 } |
| 56 #endif | 54 #endif |
| 57 | 55 |
| 58 // The vector and slot were pushed onto the stack before starting the | |
| 59 // probe, and need to be dropped before calling the handler. | |
| 60 if (is_vector_store) { | 56 if (is_vector_store) { |
| 61 // The overlap here is rather embarrassing. One does what one must. | 57 // The value, vector and slot were passed to the IC on the stack and |
| 62 Register vector = StoreWithVectorDescriptor::VectorRegister(); | 58 // they are still there. So we can just jump to the handler. |
| 63 DCHECK(extra.is(StoreWithVectorDescriptor::SlotRegister())); | 59 DCHECK(extra.is(StoreWithVectorDescriptor::SlotRegister())); |
| 64 __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 60 __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
| 65 __ pop(vector); | 61 __ jmp(extra); |
| 66 __ mov(Operand::StaticVariable(virtual_register), extra); | |
| 67 __ pop(extra); // Pop "slot". | |
| 68 // Jump to the first instruction in the code stub. | |
| 69 __ jmp(Operand::StaticVariable(virtual_register)); | |
| 70 } else { | 62 } else { |
| 63 // The vector and slot were pushed onto the stack before starting the |
| 64 // probe, and need to be dropped before calling the handler. |
| 71 __ pop(LoadWithVectorDescriptor::VectorRegister()); | 65 __ pop(LoadWithVectorDescriptor::VectorRegister()); |
| 72 __ pop(LoadDescriptor::SlotRegister()); | 66 __ pop(LoadDescriptor::SlotRegister()); |
| 73 __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 67 __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
| 74 __ jmp(extra); | 68 __ jmp(extra); |
| 75 } | 69 } |
| 76 | 70 |
| 77 __ bind(&miss); | 71 __ bind(&miss); |
| 78 } else { | 72 } else { |
| 79 DCHECK(ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC); | 73 DCHECK(ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC); |
| 80 | 74 |
| (...skipping 22 matching lines...) Expand all Loading... |
| 103 __ jmp(&miss); | 97 __ jmp(&miss); |
| 104 } | 98 } |
| 105 #endif | 99 #endif |
| 106 | 100 |
| 107 // Restore offset and re-load code entry from cache. | 101 // Restore offset and re-load code entry from cache. |
| 108 __ pop(offset); | 102 __ pop(offset); |
| 109 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset)); | 103 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset)); |
| 110 | 104 |
| 111 // Jump to the first instruction in the code stub. | 105 // Jump to the first instruction in the code stub. |
| 112 if (is_vector_store) { | 106 if (is_vector_store) { |
| 113 // The vector and slot were pushed onto the stack before starting the | |
| 114 // probe, and need to be dropped before calling the handler. | |
| 115 Register vector = StoreWithVectorDescriptor::VectorRegister(); | |
| 116 DCHECK(offset.is(StoreWithVectorDescriptor::SlotRegister())); | 107 DCHECK(offset.is(StoreWithVectorDescriptor::SlotRegister())); |
| 117 __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag)); | |
| 118 __ mov(Operand::StaticVariable(virtual_register), offset); | |
| 119 __ pop(vector); | |
| 120 __ pop(offset); // Pop "slot". | |
| 121 __ jmp(Operand::StaticVariable(virtual_register)); | |
| 122 } else { | |
| 123 __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag)); | |
| 124 __ jmp(offset); | |
| 125 } | 108 } |
| 109 __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
| 110 __ jmp(offset); |
| 126 | 111 |
| 127 // Pop at miss. | 112 // Pop at miss. |
| 128 __ bind(&miss); | 113 __ bind(&miss); |
| 129 __ pop(offset); | 114 __ pop(offset); |
| 130 } | 115 } |
| 131 } | 116 } |
| 132 | 117 |
| 133 void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver, | 118 void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver, |
| 134 Register name, Register scratch, Register extra, | 119 Register name, Register scratch, Register extra, |
| 135 Register extra2, Register extra3) { | 120 Register extra2, Register extra3) { |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 191 __ bind(&miss); | 176 __ bind(&miss); |
| 192 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); | 177 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); |
| 193 } | 178 } |
| 194 | 179 |
| 195 | 180 |
| 196 #undef __ | 181 #undef __ |
| 197 } // namespace internal | 182 } // namespace internal |
| 198 } // namespace v8 | 183 } // namespace v8 |
| 199 | 184 |
| 200 #endif // V8_TARGET_ARCH_X87 | 185 #endif // V8_TARGET_ARCH_X87 |
| OLD | NEW |