Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(769)

Side by Side Diff: src/ic/x64/stub-cache-x64.cc

Issue 2147433002: [ic] [stubs] Don't use Code::flags in megamorphic stub cache hash computations. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@split-stub-cache
Patch Set: Improved stub cache tests and the fixes. Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ic/stub-cache.cc ('k') | src/ic/x87/stub-cache-x87.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_X64 5 #if V8_TARGET_ARCH_X64
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/ic/ic.h" 8 #include "src/ic/ic.h"
9 #include "src/ic/stub-cache.h" 9 #include "src/ic/stub-cache.h"
10 #include "src/interface-descriptors.h" 10 #include "src/interface-descriptors.h"
11 11
12 namespace v8 { 12 namespace v8 {
13 namespace internal { 13 namespace internal {
14 14
15 #define __ ACCESS_MASM(masm) 15 #define __ ACCESS_MASM(masm)
16 16
17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, 17 static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
18 Code::Flags flags, StubCache::Table table, 18 StubCache::Table table, Register receiver, Register name,
19 Register receiver, Register name,
20 // The offset is scaled by 4, based on 19 // The offset is scaled by 4, based on
21 // kCacheIndexShift, which is two bits 20 // kCacheIndexShift, which is two bits
22 Register offset) { 21 Register offset) {
23 // We need to scale up the pointer by 2 when the offset is scaled by less 22 // We need to scale up the pointer by 2 when the offset is scaled by less
24 // than the pointer size. 23 // than the pointer size.
25 DCHECK(kPointerSize == kInt64Size 24 DCHECK(kPointerSize == kInt64Size
26 ? kPointerSizeLog2 == StubCache::kCacheIndexShift + 1 25 ? kPointerSizeLog2 == StubCache::kCacheIndexShift + 1
27 : kPointerSizeLog2 == StubCache::kCacheIndexShift); 26 : kPointerSizeLog2 == StubCache::kCacheIndexShift);
28 ScaleFactor scale_factor = kPointerSize == kInt64Size ? times_2 : times_1; 27 ScaleFactor scale_factor = kPointerSize == kInt64Size ? times_2 : times_1;
29 28
(...skipping 20 matching lines...) Expand all
50 kPointerSize * 2); 49 kPointerSize * 2);
51 __ movp(kScratchRegister, 50 __ movp(kScratchRegister,
52 Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2)); 51 Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
53 __ cmpp(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset)); 52 __ cmpp(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset));
54 __ j(not_equal, &miss); 53 __ j(not_equal, &miss);
55 54
56 // Get the code entry from the cache. 55 // Get the code entry from the cache.
57 __ LoadAddress(kScratchRegister, value_offset); 56 __ LoadAddress(kScratchRegister, value_offset);
58 __ movp(kScratchRegister, Operand(kScratchRegister, offset, scale_factor, 0)); 57 __ movp(kScratchRegister, Operand(kScratchRegister, offset, scale_factor, 0));
59 58
59 #ifdef DEBUG
60 // Check that the flags match what we're looking for. 60 // Check that the flags match what we're looking for.
61 Code::Flags flags = Code::RemoveHolderFromFlags(
62 Code::ComputeHandlerFlags(stub_cache->ic_kind()));
61 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset)); 63 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
62 __ andp(offset, Immediate(~Code::kFlagsNotUsedInLookup)); 64 __ andp(offset, Immediate(~Code::kFlagsNotUsedInLookup));
63 __ cmpl(offset, Immediate(flags)); 65 __ cmpl(offset, Immediate(flags));
64 __ j(not_equal, &miss); 66 __ Check(equal, kUnexpectedValue);
65 67
66 #ifdef DEBUG
67 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { 68 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
68 __ jmp(&miss); 69 __ jmp(&miss);
69 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { 70 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
70 __ jmp(&miss); 71 __ jmp(&miss);
71 } 72 }
72 #endif 73 #endif
73 74
74 // Jump to the first instruction in the code stub. 75 // Jump to the first instruction in the code stub.
75 __ addp(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag)); 76 __ addp(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
76 __ jmp(kScratchRegister); 77 __ jmp(kScratchRegister);
77 78
78 __ bind(&miss); 79 __ bind(&miss);
79 } 80 }
80 81
81 void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver, 82 void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
82 Register name, Register scratch, Register extra, 83 Register name, Register scratch, Register extra,
83 Register extra2, Register extra3) { 84 Register extra2, Register extra3) {
84 Code::Flags flags =
85 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
86
87 Label miss; 85 Label miss;
88 USE(extra); // The register extra is not used on the X64 platform. 86 USE(extra); // The register extra is not used on the X64 platform.
89 USE(extra2); // The register extra2 is not used on the X64 platform. 87 USE(extra2); // The register extra2 is not used on the X64 platform.
90 USE(extra3); // The register extra2 is not used on the X64 platform. 88 USE(extra3); // The register extra2 is not used on the X64 platform.
91 // Make sure that code is valid. The multiplying code relies on the 89 // Make sure that code is valid. The multiplying code relies on the
92 // entry size being 3 * kPointerSize. 90 // entry size being 3 * kPointerSize.
93 DCHECK(sizeof(Entry) == 3 * kPointerSize); 91 DCHECK(sizeof(Entry) == 3 * kPointerSize);
94 92
95 // Make sure that there are no register conflicts. 93 // Make sure that there are no register conflicts.
96 DCHECK(!scratch.is(receiver)); 94 DCHECK(!scratch.is(receiver));
(...skipping 25 matching lines...) Expand all
122 Counters* counters = masm->isolate()->counters(); 120 Counters* counters = masm->isolate()->counters();
123 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1); 121 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
124 122
125 // Check that the receiver isn't a smi. 123 // Check that the receiver isn't a smi.
126 __ JumpIfSmi(receiver, &miss); 124 __ JumpIfSmi(receiver, &miss);
127 125
128 // Get the map of the receiver and compute the hash. 126 // Get the map of the receiver and compute the hash.
129 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); 127 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
130 // Use only the low 32 bits of the map pointer. 128 // Use only the low 32 bits of the map pointer.
131 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 129 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
132 __ xorp(scratch, Immediate(flags));
133 // We mask out the last two bits because they are not part of the hash and 130 // We mask out the last two bits because they are not part of the hash and
134 // they are always 01 for maps. Also in the two 'and' instructions below. 131 // they are always 01 for maps. Also in the two 'and' instructions below.
135 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift)); 132 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
136 133
137 // Probe the primary table. 134 // Probe the primary table.
138 ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch); 135 ProbeTable(this, masm, kPrimary, receiver, name, scratch);
139 136
140 // Primary miss: Compute hash for secondary probe. 137 // Primary miss: Compute hash for secondary probe.
141 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); 138 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
142 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 139 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
143 __ xorp(scratch, Immediate(flags));
144 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift)); 140 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
145 __ subl(scratch, name); 141 __ subl(scratch, name);
146 __ addl(scratch, Immediate(flags)); 142 __ addl(scratch, Immediate(kSecondaryMagic));
147 __ andp(scratch, Immediate((kSecondaryTableSize - 1) << kCacheIndexShift)); 143 __ andp(scratch, Immediate((kSecondaryTableSize - 1) << kCacheIndexShift));
148 144
149 // Probe the secondary table. 145 // Probe the secondary table.
150 ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch); 146 ProbeTable(this, masm, kSecondary, receiver, name, scratch);
151 147
152 // Cache miss: Fall-through and let caller handle the miss by 148 // Cache miss: Fall-through and let caller handle the miss by
153 // entering the runtime system. 149 // entering the runtime system.
154 __ bind(&miss); 150 __ bind(&miss);
155 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); 151 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
156 } 152 }
157 153
158 154
159 #undef __ 155 #undef __
160 } // namespace internal 156 } // namespace internal
161 } // namespace v8 157 } // namespace v8
162 158
163 #endif // V8_TARGET_ARCH_X64 159 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/ic/stub-cache.cc ('k') | src/ic/x87/stub-cache-x87.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698