Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(45)

Side by Side Diff: src/ic/x87/stub-cache-x87.cc

Issue 2147433002: [ic] [stubs] Don't use Code::flags in megamorphic stub cache hash computations. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@split-stub-cache
Patch Set: Improved stub cache tests and the fixes. Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ic/x64/stub-cache-x64.cc ('k') | src/mips64/macro-assembler-mips64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_X87 5 #if V8_TARGET_ARCH_X87
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/ic/ic.h" 8 #include "src/ic/ic.h"
9 #include "src/ic/stub-cache.h" 9 #include "src/ic/stub-cache.h"
10 #include "src/interface-descriptors.h" 10 #include "src/interface-descriptors.h"
11 11
12 namespace v8 { 12 namespace v8 {
13 namespace internal { 13 namespace internal {
14 14
15 #define __ ACCESS_MASM(masm) 15 #define __ ACCESS_MASM(masm)
16 16
17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, 17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
18 Code::Kind ic_kind, Code::Flags flags,
19 StubCache::Table table, Register name, Register receiver, 18 StubCache::Table table, Register name, Register receiver,
20 // Number of the cache entry pointer-size scaled. 19 // Number of the cache entry pointer-size scaled.
21 Register offset, Register extra) { 20 Register offset, Register extra) {
22 ExternalReference key_offset(stub_cache->key_reference(table)); 21 ExternalReference key_offset(stub_cache->key_reference(table));
23 ExternalReference value_offset(stub_cache->value_reference(table)); 22 ExternalReference value_offset(stub_cache->value_reference(table));
24 ExternalReference map_offset(stub_cache->map_reference(table)); 23 ExternalReference map_offset(stub_cache->map_reference(table));
25 ExternalReference virtual_register = 24 ExternalReference virtual_register =
26 ExternalReference::virtual_handler_register(masm->isolate()); 25 ExternalReference::virtual_handler_register(masm->isolate());
27 26
28 Label miss; 27 Label miss;
28 Code::Kind ic_kind = stub_cache->ic_kind();
29 bool is_vector_store = 29 bool is_vector_store =
30 IC::ICUseVector(ic_kind) && 30 IC::ICUseVector(ic_kind) &&
31 (ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC); 31 (ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC);
32 32
33 // Multiply by 3 because there are 3 fields per entry (name, code, map). 33 // Multiply by 3 because there are 3 fields per entry (name, code, map).
34 __ lea(offset, Operand(offset, offset, times_2, 0)); 34 __ lea(offset, Operand(offset, offset, times_2, 0));
35 35
36 if (extra.is_valid()) { 36 if (extra.is_valid()) {
37 // Get the code entry from the cache. 37 // Get the code entry from the cache.
38 __ mov(extra, Operand::StaticArray(offset, times_1, value_offset)); 38 __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
39 39
40 // Check that the key in the entry matches the name. 40 // Check that the key in the entry matches the name.
41 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset)); 41 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
42 __ j(not_equal, &miss); 42 __ j(not_equal, &miss);
43 43
44 // Check the map matches. 44 // Check the map matches.
45 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset)); 45 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
46 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset)); 46 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
47 __ j(not_equal, &miss); 47 __ j(not_equal, &miss);
48 48
49 #ifdef DEBUG
49 // Check that the flags match what we're looking for. 50 // Check that the flags match what we're looking for.
51 Code::Flags flags = Code::RemoveHolderFromFlags(
52 Code::ComputeHandlerFlags(stub_cache->ic_kind()));
50 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset)); 53 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
51 __ and_(offset, ~Code::kFlagsNotUsedInLookup); 54 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
52 __ cmp(offset, flags); 55 __ cmp(offset, flags);
53 __ j(not_equal, &miss); 56 __ Check(equal, kUnexpectedValue);
54 57
55 #ifdef DEBUG
56 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { 58 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
57 __ jmp(&miss); 59 __ jmp(&miss);
58 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { 60 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
59 __ jmp(&miss); 61 __ jmp(&miss);
60 } 62 }
61 #endif 63 #endif
62 64
63 // The vector and slot were pushed onto the stack before starting the 65 // The vector and slot were pushed onto the stack before starting the
64 // probe, and need to be dropped before calling the handler. 66 // probe, and need to be dropped before calling the handler.
65 if (is_vector_store) { 67 if (is_vector_store) {
(...skipping 28 matching lines...) Expand all
94 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset)); 96 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
95 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset)); 97 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
96 __ j(not_equal, &miss); 98 __ j(not_equal, &miss);
97 99
98 // Restore offset register. 100 // Restore offset register.
99 __ mov(offset, Operand(esp, 0)); 101 __ mov(offset, Operand(esp, 0));
100 102
101 // Get the code entry from the cache. 103 // Get the code entry from the cache.
102 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset)); 104 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
103 105
106 #ifdef DEBUG
104 // Check that the flags match what we're looking for. 107 // Check that the flags match what we're looking for.
108 Code::Flags flags = Code::RemoveHolderFromFlags(
109 Code::ComputeHandlerFlags(stub_cache->ic_kind()));
105 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset)); 110 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
106 __ and_(offset, ~Code::kFlagsNotUsedInLookup); 111 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
107 __ cmp(offset, flags); 112 __ cmp(offset, flags);
108 __ j(not_equal, &miss); 113 __ Check(equal, kUnexpectedValue);
109 114
110 #ifdef DEBUG
111 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { 115 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
112 __ jmp(&miss); 116 __ jmp(&miss);
113 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { 117 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
114 __ jmp(&miss); 118 __ jmp(&miss);
115 } 119 }
116 #endif 120 #endif
117 121
118 // Restore offset and re-load code entry from cache. 122 // Restore offset and re-load code entry from cache.
119 __ pop(offset); 123 __ pop(offset);
120 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset)); 124 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
(...skipping 16 matching lines...) Expand all
137 141
138 // Pop at miss. 142 // Pop at miss.
139 __ bind(&miss); 143 __ bind(&miss);
140 __ pop(offset); 144 __ pop(offset);
141 } 145 }
142 } 146 }
143 147
144 void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver, 148 void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
145 Register name, Register scratch, Register extra, 149 Register name, Register scratch, Register extra,
146 Register extra2, Register extra3) { 150 Register extra2, Register extra3) {
147 Code::Flags flags =
148 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
149
150 Label miss; 151 Label miss;
151 152
152 // Assert that code is valid. The multiplying code relies on the entry size 153 // Assert that code is valid. The multiplying code relies on the entry size
153 // being 12. 154 // being 12.
154 DCHECK(sizeof(Entry) == 12); 155 DCHECK(sizeof(Entry) == 12);
155 156
156 // Assert that there are no register conflicts. 157 // Assert that there are no register conflicts.
157 DCHECK(!scratch.is(receiver)); 158 DCHECK(!scratch.is(receiver));
158 DCHECK(!scratch.is(name)); 159 DCHECK(!scratch.is(name));
159 DCHECK(!extra.is(receiver)); 160 DCHECK(!extra.is(receiver));
(...skipping 10 matching lines...) Expand all
170 171
171 Counters* counters = masm->isolate()->counters(); 172 Counters* counters = masm->isolate()->counters();
172 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1); 173 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
173 174
174 // Check that the receiver isn't a smi. 175 // Check that the receiver isn't a smi.
175 __ JumpIfSmi(receiver, &miss); 176 __ JumpIfSmi(receiver, &miss);
176 177
177 // Get the map of the receiver and compute the hash. 178 // Get the map of the receiver and compute the hash.
178 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset)); 179 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
179 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset)); 180 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
180 __ xor_(offset, flags);
181 // We mask out the last two bits because they are not part of the hash and 181 // We mask out the last two bits because they are not part of the hash and
182 // they are always 01 for maps. Also in the two 'and' instructions below. 182 // they are always 01 for maps. Also in the two 'and' instructions below.
183 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift); 183 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
184 // ProbeTable expects the offset to be pointer scaled, which it is, because 184 // ProbeTable expects the offset to be pointer scaled, which it is, because
185 // the heap object tag size is 2 and the pointer size log 2 is also 2. 185 // the heap object tag size is 2 and the pointer size log 2 is also 2.
186 DCHECK(kCacheIndexShift == kPointerSizeLog2); 186 DCHECK(kCacheIndexShift == kPointerSizeLog2);
187 187
188 // Probe the primary table. 188 // Probe the primary table.
189 ProbeTable(this, masm, ic_kind_, flags, kPrimary, name, receiver, offset, 189 ProbeTable(this, masm, kPrimary, name, receiver, offset, extra);
190 extra);
191 190
192 // Primary miss: Compute hash for secondary probe. 191 // Primary miss: Compute hash for secondary probe.
193 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset)); 192 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
194 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset)); 193 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
195 __ xor_(offset, flags);
196 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift); 194 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
197 __ sub(offset, name); 195 __ sub(offset, name);
198 __ add(offset, Immediate(flags)); 196 __ add(offset, Immediate(kSecondaryMagic));
199 __ and_(offset, (kSecondaryTableSize - 1) << kCacheIndexShift); 197 __ and_(offset, (kSecondaryTableSize - 1) << kCacheIndexShift);
200 198
201 // Probe the secondary table. 199 // Probe the secondary table.
202 ProbeTable(this, masm, ic_kind_, flags, kSecondary, name, receiver, offset, 200 ProbeTable(this, masm, kSecondary, name, receiver, offset, extra);
203 extra);
204 201
205 // Cache miss: Fall-through and let caller handle the miss by 202 // Cache miss: Fall-through and let caller handle the miss by
206 // entering the runtime system. 203 // entering the runtime system.
207 __ bind(&miss); 204 __ bind(&miss);
208 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); 205 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
209 } 206 }
210 207
211 208
212 #undef __ 209 #undef __
213 } // namespace internal 210 } // namespace internal
214 } // namespace v8 211 } // namespace v8
215 212
216 #endif // V8_TARGET_ARCH_X87 213 #endif // V8_TARGET_ARCH_X87
OLDNEW
« no previous file with comments | « src/ic/x64/stub-cache-x64.cc ('k') | src/mips64/macro-assembler-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698