Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1398)

Side by Side Diff: src/ic/ia32/stub-cache-ia32.cc

Issue 2147213004: Revert of [ic] [stubs] Don't use Code::flags in megamorphic stub cache hash computations. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@split-stub-cache
Patch Set: Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ic/arm64/stub-cache-arm64.cc ('k') | src/ic/ic.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_IA32 5 #if V8_TARGET_ARCH_IA32
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/ic/ic.h" 8 #include "src/ic/ic.h"
9 #include "src/ic/stub-cache.h" 9 #include "src/ic/stub-cache.h"
10 #include "src/interface-descriptors.h" 10 #include "src/interface-descriptors.h"
11 11
12 namespace v8 { 12 namespace v8 {
13 namespace internal { 13 namespace internal {
14 14
15 #define __ ACCESS_MASM(masm) 15 #define __ ACCESS_MASM(masm)
16 16
17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, 17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
18 Code::Kind ic_kind, Code::Flags flags,
18 StubCache::Table table, Register name, Register receiver, 19 StubCache::Table table, Register name, Register receiver,
19 // Number of the cache entry pointer-size scaled. 20 // Number of the cache entry pointer-size scaled.
20 Register offset, Register extra) { 21 Register offset, Register extra) {
21 ExternalReference key_offset(stub_cache->key_reference(table)); 22 ExternalReference key_offset(stub_cache->key_reference(table));
22 ExternalReference value_offset(stub_cache->value_reference(table)); 23 ExternalReference value_offset(stub_cache->value_reference(table));
23 ExternalReference map_offset(stub_cache->map_reference(table)); 24 ExternalReference map_offset(stub_cache->map_reference(table));
24 ExternalReference virtual_register = 25 ExternalReference virtual_register =
25 ExternalReference::virtual_handler_register(masm->isolate()); 26 ExternalReference::virtual_handler_register(masm->isolate());
26 27
27 Label miss; 28 Label miss;
28 Code::Kind ic_kind = stub_cache->ic_kind();
29 bool is_vector_store = 29 bool is_vector_store =
30 IC::ICUseVector(ic_kind) && 30 IC::ICUseVector(ic_kind) &&
31 (ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC); 31 (ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC);
32 32
33 // Multiply by 3 because there are 3 fields per entry (name, code, map). 33 // Multiply by 3 because there are 3 fields per entry (name, code, map).
34 __ lea(offset, Operand(offset, offset, times_2, 0)); 34 __ lea(offset, Operand(offset, offset, times_2, 0));
35 35
36 if (extra.is_valid()) { 36 if (extra.is_valid()) {
37 // Get the code entry from the cache. 37 // Get the code entry from the cache.
38 __ mov(extra, Operand::StaticArray(offset, times_1, value_offset)); 38 __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
39 39
40 // Check that the key in the entry matches the name. 40 // Check that the key in the entry matches the name.
41 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset)); 41 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
42 __ j(not_equal, &miss); 42 __ j(not_equal, &miss);
43 43
44 // Check the map matches. 44 // Check the map matches.
45 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset)); 45 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
46 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset)); 46 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
47 __ j(not_equal, &miss); 47 __ j(not_equal, &miss);
48 48
49 #ifdef DEBUG
50 // Check that the flags match what we're looking for. 49 // Check that the flags match what we're looking for.
51 Code::Flags flags = Code::RemoveHolderFromFlags(
52 Code::ComputeHandlerFlags(stub_cache->ic_kind()));
53 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset)); 50 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
54 __ and_(offset, ~Code::kFlagsNotUsedInLookup); 51 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
55 __ cmp(offset, flags); 52 __ cmp(offset, flags);
56 __ Check(equal, kUnexpectedValue); 53 __ j(not_equal, &miss);
57 54
55 #ifdef DEBUG
58 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { 56 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
59 __ jmp(&miss); 57 __ jmp(&miss);
60 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { 58 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
61 __ jmp(&miss); 59 __ jmp(&miss);
62 } 60 }
63 #endif 61 #endif
64 62
65 // The vector and slot were pushed onto the stack before starting the 63 // The vector and slot were pushed onto the stack before starting the
66 // probe, and need to be dropped before calling the handler. 64 // probe, and need to be dropped before calling the handler.
67 if (is_vector_store) { 65 if (is_vector_store) {
(...skipping 28 matching lines...) Expand all
96 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset)); 94 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
97 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset)); 95 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
98 __ j(not_equal, &miss); 96 __ j(not_equal, &miss);
99 97
100 // Restore offset register. 98 // Restore offset register.
101 __ mov(offset, Operand(esp, 0)); 99 __ mov(offset, Operand(esp, 0));
102 100
103 // Get the code entry from the cache. 101 // Get the code entry from the cache.
104 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset)); 102 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
105 103
106 #ifdef DEBUG
107 // Check that the flags match what we're looking for. 104 // Check that the flags match what we're looking for.
108 Code::Flags flags = Code::RemoveHolderFromFlags(
109 Code::ComputeHandlerFlags(stub_cache->ic_kind()));
110 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset)); 105 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
111 __ and_(offset, ~Code::kFlagsNotUsedInLookup); 106 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
112 __ cmp(offset, flags); 107 __ cmp(offset, flags);
113 __ Check(equal, kUnexpectedValue); 108 __ j(not_equal, &miss);
114 109
110 #ifdef DEBUG
115 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { 111 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
116 __ jmp(&miss); 112 __ jmp(&miss);
117 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { 113 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
118 __ jmp(&miss); 114 __ jmp(&miss);
119 } 115 }
120 #endif 116 #endif
121 117
122 // Restore offset and re-load code entry from cache. 118 // Restore offset and re-load code entry from cache.
123 __ pop(offset); 119 __ pop(offset);
124 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset)); 120 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
(...skipping 16 matching lines...) Expand all
141 137
142 // Pop at miss. 138 // Pop at miss.
143 __ bind(&miss); 139 __ bind(&miss);
144 __ pop(offset); 140 __ pop(offset);
145 } 141 }
146 } 142 }
147 143
148 void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver, 144 void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
149 Register name, Register scratch, Register extra, 145 Register name, Register scratch, Register extra,
150 Register extra2, Register extra3) { 146 Register extra2, Register extra3) {
147 Code::Flags flags =
148 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
149
151 Label miss; 150 Label miss;
152 151
153 // Assert that code is valid. The multiplying code relies on the entry size 152 // Assert that code is valid. The multiplying code relies on the entry size
154 // being 12. 153 // being 12.
155 DCHECK(sizeof(Entry) == 12); 154 DCHECK(sizeof(Entry) == 12);
156 155
157 // Assert that there are no register conflicts. 156 // Assert that there are no register conflicts.
158 DCHECK(!scratch.is(receiver)); 157 DCHECK(!scratch.is(receiver));
159 DCHECK(!scratch.is(name)); 158 DCHECK(!scratch.is(name));
160 DCHECK(!extra.is(receiver)); 159 DCHECK(!extra.is(receiver));
(...skipping 10 matching lines...) Expand all
171 170
172 Counters* counters = masm->isolate()->counters(); 171 Counters* counters = masm->isolate()->counters();
173 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1); 172 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
174 173
175 // Check that the receiver isn't a smi. 174 // Check that the receiver isn't a smi.
176 __ JumpIfSmi(receiver, &miss); 175 __ JumpIfSmi(receiver, &miss);
177 176
178 // Get the map of the receiver and compute the hash. 177 // Get the map of the receiver and compute the hash.
179 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset)); 178 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
180 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset)); 179 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
180 __ xor_(offset, flags);
181 // We mask out the last two bits because they are not part of the hash and 181 // We mask out the last two bits because they are not part of the hash and
182 // they are always 01 for maps. Also in the two 'and' instructions below. 182 // they are always 01 for maps. Also in the two 'and' instructions below.
183 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift); 183 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
184 // ProbeTable expects the offset to be pointer scaled, which it is, because 184 // ProbeTable expects the offset to be pointer scaled, which it is, because
185 // the heap object tag size is 2 and the pointer size log 2 is also 2. 185 // the heap object tag size is 2 and the pointer size log 2 is also 2.
186 DCHECK(kCacheIndexShift == kPointerSizeLog2); 186 DCHECK(kCacheIndexShift == kPointerSizeLog2);
187 187
188 // Probe the primary table. 188 // Probe the primary table.
189 ProbeTable(this, masm, kPrimary, name, receiver, offset, extra); 189 ProbeTable(this, masm, ic_kind_, flags, kPrimary, name, receiver, offset,
190 extra);
190 191
191 // Primary miss: Compute hash for secondary probe. 192 // Primary miss: Compute hash for secondary probe.
192 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset)); 193 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
193 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset)); 194 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
195 __ xor_(offset, flags);
194 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift); 196 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
195 __ sub(offset, name); 197 __ sub(offset, name);
198 __ add(offset, Immediate(flags));
196 __ and_(offset, (kSecondaryTableSize - 1) << kCacheIndexShift); 199 __ and_(offset, (kSecondaryTableSize - 1) << kCacheIndexShift);
197 200
198 // Probe the secondary table. 201 // Probe the secondary table.
199 ProbeTable(this, masm, kSecondary, name, receiver, offset, extra); 202 ProbeTable(this, masm, ic_kind_, flags, kSecondary, name, receiver, offset,
203 extra);
200 204
201 // Cache miss: Fall-through and let caller handle the miss by 205 // Cache miss: Fall-through and let caller handle the miss by
202 // entering the runtime system. 206 // entering the runtime system.
203 __ bind(&miss); 207 __ bind(&miss);
204 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); 208 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
205 } 209 }
206 210
207 211
208 #undef __ 212 #undef __
209 } // namespace internal 213 } // namespace internal
210 } // namespace v8 214 } // namespace v8
211 215
212 #endif // V8_TARGET_ARCH_IA32 216 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ic/arm64/stub-cache-arm64.cc ('k') | src/ic/ic.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698