OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
8 | 8 |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/ic.h" | 10 #include "src/ic/ic.h" |
11 #include "src/ic/ic-compiler.h" | 11 #include "src/ic/ic-compiler.h" |
12 #include "src/ic/stub-cache.h" | 12 #include "src/ic/stub-cache.h" |
13 | 13 |
14 namespace v8 { | 14 namespace v8 { |
15 namespace internal { | 15 namespace internal { |
16 | 16 |
17 // ---------------------------------------------------------------------------- | 17 // ---------------------------------------------------------------------------- |
18 // Static IC stub generators. | 18 // Static IC stub generators. |
19 // | 19 // |
20 | 20 |
21 #define __ ACCESS_MASM(masm) | 21 #define __ ACCESS_MASM(masm) |
22 | 22 |
23 | |
24 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type, | |
25 Label* global_object) { | |
26 // Register usage: | |
27 // type: holds the receiver instance type on entry. | |
28 __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE)); | |
29 __ j(equal, global_object); | |
30 __ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE)); | |
31 __ j(equal, global_object); | |
32 __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE)); | |
33 __ j(equal, global_object); | |
34 } | |
35 | |
36 | |
37 // Helper function used to load a property from a dictionary backing storage. | 23 // Helper function used to load a property from a dictionary backing storage. |
38 // This function may return false negatives, so miss_label | 24 // This function may return false negatives, so miss_label |
39 // must always call a backup property load that is complete. | 25 // must always call a backup property load that is complete. |
40 // This function is safe to call if name is not an internalized string, | 26 // This function is safe to call if name is not an internalized string, |
41 // and will jump to the miss_label in that case. | 27 // and will jump to the miss_label in that case. |
42 // The generated code assumes that the receiver has slow properties, | 28 // The generated code assumes that the receiver has slow properties, |
43 // is not a global object and does not have interceptors. | 29 // is not a global object and does not have interceptors. |
44 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label, | 30 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label, |
45 Register elements, Register name, | 31 Register elements, Register name, |
46 Register r0, Register r1, Register result) { | 32 Register r0, Register r1, Register result) { |
(...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
267 // bit test is enough. | 253 // bit test is enough. |
268 STATIC_ASSERT(kNotInternalizedTag != 0); | 254 STATIC_ASSERT(kNotInternalizedTag != 0); |
269 __ testb(FieldOperand(map, Map::kInstanceTypeOffset), | 255 __ testb(FieldOperand(map, Map::kInstanceTypeOffset), |
270 Immediate(kIsNotInternalizedMask)); | 256 Immediate(kIsNotInternalizedMask)); |
271 __ j(not_zero, not_unique); | 257 __ j(not_zero, not_unique); |
272 | 258 |
273 __ bind(&unique); | 259 __ bind(&unique); |
274 } | 260 } |
275 | 261 |
276 | 262 |
277 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 263 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { |
278 // The return address is on the stack. | 264 // The return address is on the stack. |
279 Label slow, check_name, index_smi, index_name, property_array_property; | 265 Label slow, check_name, index_smi, index_name, property_array_property; |
280 Label probe_dictionary, check_number_dictionary; | 266 Label probe_dictionary, check_number_dictionary; |
281 | 267 |
282 Register receiver = LoadDescriptor::ReceiverRegister(); | 268 Register receiver = LoadDescriptor::ReceiverRegister(); |
283 Register key = LoadDescriptor::NameRegister(); | 269 Register key = LoadDescriptor::NameRegister(); |
284 DCHECK(receiver.is(rdx)); | 270 DCHECK(receiver.is(rdx)); |
285 DCHECK(key.is(rcx)); | 271 DCHECK(key.is(rcx)); |
286 | 272 |
287 // Check that the key is a smi. | 273 // Check that the key is a smi. |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
325 GenerateKeyedLoadReceiverCheck(masm, receiver, rax, Map::kHasNamedInterceptor, | 311 GenerateKeyedLoadReceiverCheck(masm, receiver, rax, Map::kHasNamedInterceptor, |
326 &slow); | 312 &slow); |
327 | 313 |
328 // If the receiver is a fast-case object, check the keyed lookup | 314 // If the receiver is a fast-case object, check the keyed lookup |
329 // cache. Otherwise probe the dictionary leaving result in key. | 315 // cache. Otherwise probe the dictionary leaving result in key. |
330 __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset)); | 316 __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset)); |
331 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), | 317 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), |
332 Heap::kHashTableMapRootIndex); | 318 Heap::kHashTableMapRootIndex); |
333 __ j(equal, &probe_dictionary); | 319 __ j(equal, &probe_dictionary); |
334 | 320 |
335 // Load the map of the receiver, compute the keyed lookup cache hash | |
336 // based on 32 bits of the map pointer and the string hash. | |
337 __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset)); | |
338 __ movl(rax, rbx); | |
339 __ shrl(rax, Immediate(KeyedLookupCache::kMapHashShift)); | |
340 __ movl(rdi, FieldOperand(key, String::kHashFieldOffset)); | |
341 __ shrl(rdi, Immediate(String::kHashShift)); | |
342 __ xorp(rax, rdi); | |
343 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); | |
344 __ andp(rax, Immediate(mask)); | |
345 | 321 |
346 // Load the key (consisting of map and internalized string) from the cache and | 322 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
347 // check for match. | 323 Code::ComputeHandlerFlags(Code::LOAD_IC)); |
348 Label load_in_object_property; | 324 masm->isolate()->stub_cache()->GenerateProbe( |
349 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; | 325 masm, Code::LOAD_IC, flags, false, receiver, key, rbx, no_reg); |
350 Label hit_on_nth_entry[kEntriesPerBucket]; | 326 // Cache miss. |
351 ExternalReference cache_keys = | 327 GenerateMiss(masm); |
352 ExternalReference::keyed_lookup_cache_keys(masm->isolate()); | |
353 | |
354 for (int i = 0; i < kEntriesPerBucket - 1; i++) { | |
355 Label try_next_entry; | |
356 __ movp(rdi, rax); | |
357 __ shlp(rdi, Immediate(kPointerSizeLog2 + 1)); | |
358 __ LoadAddress(kScratchRegister, cache_keys); | |
359 int off = kPointerSize * i * 2; | |
360 __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off)); | |
361 __ j(not_equal, &try_next_entry); | |
362 __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize)); | |
363 __ j(equal, &hit_on_nth_entry[i]); | |
364 __ bind(&try_next_entry); | |
365 } | |
366 | |
367 int off = kPointerSize * (kEntriesPerBucket - 1) * 2; | |
368 __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off)); | |
369 __ j(not_equal, &slow); | |
370 __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize)); | |
371 __ j(not_equal, &slow); | |
372 | |
373 // Get field offset, which is a 32-bit integer. | |
374 ExternalReference cache_field_offsets = | |
375 ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); | |
376 | |
377 // Hit on nth entry. | |
378 for (int i = kEntriesPerBucket - 1; i >= 0; i--) { | |
379 __ bind(&hit_on_nth_entry[i]); | |
380 if (i != 0) { | |
381 __ addl(rax, Immediate(i)); | |
382 } | |
383 __ LoadAddress(kScratchRegister, cache_field_offsets); | |
384 __ movl(rdi, Operand(kScratchRegister, rax, times_4, 0)); | |
385 __ movzxbp(rax, FieldOperand(rbx, Map::kInObjectPropertiesOffset)); | |
386 __ subp(rdi, rax); | |
387 __ j(above_equal, &property_array_property); | |
388 if (i != 0) { | |
389 __ jmp(&load_in_object_property); | |
390 } | |
391 } | |
392 | |
393 // Load in-object property. | |
394 __ bind(&load_in_object_property); | |
395 __ movzxbp(rax, FieldOperand(rbx, Map::kInstanceSizeOffset)); | |
396 __ addp(rax, rdi); | |
397 __ movp(rax, FieldOperand(receiver, rax, times_pointer_size, 0)); | |
398 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); | |
399 __ ret(0); | |
400 | |
401 // Load property array property. | |
402 __ bind(&property_array_property); | |
403 __ movp(rax, FieldOperand(receiver, JSObject::kPropertiesOffset)); | |
404 __ movp(rax, | |
405 FieldOperand(rax, rdi, times_pointer_size, FixedArray::kHeaderSize)); | |
406 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); | |
407 __ ret(0); | |
408 | 328 |
409 // Do a quick inline probe of the receiver's dictionary, if it | 329 // Do a quick inline probe of the receiver's dictionary, if it |
410 // exists. | 330 // exists. |
411 __ bind(&probe_dictionary); | 331 __ bind(&probe_dictionary); |
412 // rbx: elements | 332 // rbx: elements |
413 | 333 |
414 __ movp(rax, FieldOperand(receiver, JSObject::kMapOffset)); | 334 __ movp(rax, FieldOperand(receiver, JSObject::kMapOffset)); |
415 __ movb(rax, FieldOperand(rax, Map::kInstanceTypeOffset)); | 335 __ movb(rax, FieldOperand(rax, Map::kInstanceTypeOffset)); |
416 GenerateGlobalInstanceTypeCheck(masm, rax, &slow); | |
417 | 336 |
418 GenerateDictionaryLoad(masm, &slow, rbx, key, rax, rdi, rax); | 337 GenerateDictionaryLoad(masm, &slow, rbx, key, rax, rdi, rax); |
419 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1); | 338 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1); |
420 __ ret(0); | 339 __ ret(0); |
421 | 340 |
422 __ bind(&index_name); | 341 __ bind(&index_name); |
423 __ IndexFromHash(rbx, key); | 342 __ IndexFromHash(rbx, key); |
424 __ jmp(&index_smi); | 343 __ jmp(&index_smi); |
425 } | 344 } |
426 | 345 |
(...skipping 586 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1013 Condition cc = | 932 Condition cc = |
1014 (check == ENABLE_INLINED_SMI_CHECK) | 933 (check == ENABLE_INLINED_SMI_CHECK) |
1015 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) | 934 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) |
1016 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); | 935 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); |
1017 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); | 936 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); |
1018 } | 937 } |
1019 } | 938 } |
1020 } // namespace v8::internal | 939 } // namespace v8::internal |
1021 | 940 |
1022 #endif // V8_TARGET_ARCH_X64 | 941 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |