OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
8 | 8 |
9 #include "src/arguments.h" | 9 #include "src/arguments.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
11 #include "src/ic-inl.h" | 11 #include "src/ic-inl.h" |
12 #include "src/stub-cache.h" | 12 #include "src/stub-cache.h" |
13 | 13 |
14 namespace v8 { | 14 namespace v8 { |
15 namespace internal { | 15 namespace internal { |
16 | 16 |
17 #define __ ACCESS_MASM(masm) | 17 #define __ ACCESS_MASM(masm) |
18 | 18 |
19 | 19 |
20 static void ProbeTable(Isolate* isolate, | 20 static void ProbeTable(Isolate* isolate, |
21 MacroAssembler* masm, | 21 MacroAssembler* masm, |
22 Code::Flags flags, | 22 Code::Flags flags, |
23 StubCache::Table table, | 23 StubCache::Table table, |
24 Register receiver, | 24 Register receiver, |
25 Register name, | 25 Register name, |
26 // The offset is scaled by 4, based on | 26 // The offset is scaled by 4, based on |
27 // kHeapObjectTagSize, which is two bits | 27 // kCacheIndexShift, which is two bits |
28 Register offset) { | 28 Register offset) { |
29 // We need to scale up the pointer by 2 when the offset is scaled by less | 29 // We need to scale up the pointer by 2 when the offset is scaled by less |
30 // than the pointer size. | 30 // than the pointer size. |
31 ASSERT(kPointerSize == kInt64Size | 31 ASSERT(kPointerSize == kInt64Size |
32 ? kPointerSizeLog2 == kHeapObjectTagSize + 1 | 32 ? kPointerSizeLog2 == StubCache::kCacheIndexShift + 1 |
33 : kPointerSizeLog2 == kHeapObjectTagSize); | 33 : kPointerSizeLog2 == StubCache::kCacheIndexShift); |
34 ScaleFactor scale_factor = kPointerSize == kInt64Size ? times_2 : times_1; | 34 ScaleFactor scale_factor = kPointerSize == kInt64Size ? times_2 : times_1; |
35 | 35 |
36 ASSERT_EQ(3 * kPointerSize, sizeof(StubCache::Entry)); | 36 ASSERT_EQ(3 * kPointerSize, sizeof(StubCache::Entry)); |
37 // The offset register holds the entry offset times four (due to masking | 37 // The offset register holds the entry offset times four (due to masking |
38 // and shifting optimizations). | 38 // and shifting optimizations). |
39 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); | 39 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); |
40 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); | 40 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); |
41 Label miss; | 41 Label miss; |
42 | 42 |
43 // Multiply by 3 because there are 3 fields per entry (name, code, map). | 43 // Multiply by 3 because there are 3 fields per entry (name, code, map). |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
168 // Check that the receiver isn't a smi. | 168 // Check that the receiver isn't a smi. |
169 __ JumpIfSmi(receiver, &miss); | 169 __ JumpIfSmi(receiver, &miss); |
170 | 170 |
171 // Get the map of the receiver and compute the hash. | 171 // Get the map of the receiver and compute the hash. |
172 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); | 172 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); |
173 // Use only the low 32 bits of the map pointer. | 173 // Use only the low 32 bits of the map pointer. |
174 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); | 174 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); |
175 __ xorp(scratch, Immediate(flags)); | 175 __ xorp(scratch, Immediate(flags)); |
176 // We mask out the last two bits because they are not part of the hash and | 176 // We mask out the last two bits because they are not part of the hash and |
177 // they are always 01 for maps. Also in the two 'and' instructions below. | 177 // they are always 01 for maps. Also in the two 'and' instructions below. |
178 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); | 178 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift)); |
179 | 179 |
180 // Probe the primary table. | 180 // Probe the primary table. |
181 ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch); | 181 ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch); |
182 | 182 |
183 // Primary miss: Compute hash for secondary probe. | 183 // Primary miss: Compute hash for secondary probe. |
184 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); | 184 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); |
185 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); | 185 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); |
186 __ xorp(scratch, Immediate(flags)); | 186 __ xorp(scratch, Immediate(flags)); |
187 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); | 187 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift)); |
188 __ subl(scratch, name); | 188 __ subl(scratch, name); |
189 __ addl(scratch, Immediate(flags)); | 189 __ addl(scratch, Immediate(flags)); |
190 __ andp(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize)); | 190 __ andp(scratch, Immediate((kSecondaryTableSize - 1) << kCacheIndexShift)); |
191 | 191 |
192 // Probe the secondary table. | 192 // Probe the secondary table. |
193 ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch); | 193 ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch); |
194 | 194 |
195 // Cache miss: Fall-through and let caller handle the miss by | 195 // Cache miss: Fall-through and let caller handle the miss by |
196 // entering the runtime system. | 196 // entering the runtime system. |
197 __ bind(&miss); | 197 __ bind(&miss); |
198 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); | 198 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); |
199 } | 199 } |
200 | 200 |
(...skipping 1240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1441 // ----------------------------------- | 1441 // ----------------------------------- |
1442 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | 1442 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); |
1443 } | 1443 } |
1444 | 1444 |
1445 | 1445 |
1446 #undef __ | 1446 #undef __ |
1447 | 1447 |
1448 } } // namespace v8::internal | 1448 } } // namespace v8::internal |
1449 | 1449 |
1450 #endif // V8_TARGET_ARCH_X64 | 1450 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |