OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_IA32 | 7 #if V8_TARGET_ARCH_IA32 |
8 | 8 |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/ic.h" | 10 #include "src/ic/ic.h" |
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
323 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); | 323 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); |
324 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK); | 324 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK); |
325 __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset)); | 325 __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset)); |
326 __ cmp(key, scratch); | 326 __ cmp(key, scratch); |
327 __ j(greater_equal, slow_case); | 327 __ j(greater_equal, slow_case); |
328 return FieldOperand(backing_store, key, times_half_pointer_size, | 328 return FieldOperand(backing_store, key, times_half_pointer_size, |
329 FixedArray::kHeaderSize); | 329 FixedArray::kHeaderSize); |
330 } | 330 } |
331 | 331 |
332 | 332 |
333 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 333 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { |
334 // The return address is on the stack. | 334 // The return address is on the stack. |
335 Label slow, check_name, index_smi, index_name, property_array_property; | 335 Label slow, check_name, index_smi, index_name, property_array_property; |
336 Label probe_dictionary, check_number_dictionary; | 336 Label probe_dictionary, check_number_dictionary; |
337 | 337 |
338 Register receiver = LoadDescriptor::ReceiverRegister(); | 338 Register receiver = LoadDescriptor::ReceiverRegister(); |
339 Register key = LoadDescriptor::NameRegister(); | 339 Register key = LoadDescriptor::NameRegister(); |
340 DCHECK(receiver.is(edx)); | 340 DCHECK(receiver.is(edx)); |
341 DCHECK(key.is(ecx)); | 341 DCHECK(key.is(ecx)); |
342 | 342 |
343 // Check that the key is a smi. | 343 // Check that the key is a smi. |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
385 // Slow case: jump to runtime. | 385 // Slow case: jump to runtime. |
386 __ IncrementCounter(counters->keyed_load_generic_slow(), 1); | 386 __ IncrementCounter(counters->keyed_load_generic_slow(), 1); |
387 GenerateRuntimeGetProperty(masm); | 387 GenerateRuntimeGetProperty(masm); |
388 | 388 |
389 __ bind(&check_name); | 389 __ bind(&check_name); |
390 GenerateKeyNameCheck(masm, key, eax, ebx, &index_name, &slow); | 390 GenerateKeyNameCheck(masm, key, eax, ebx, &index_name, &slow); |
391 | 391 |
392 GenerateKeyedLoadReceiverCheck(masm, receiver, eax, Map::kHasNamedInterceptor, | 392 GenerateKeyedLoadReceiverCheck(masm, receiver, eax, Map::kHasNamedInterceptor, |
393 &slow); | 393 &slow); |
394 | 394 |
395 // If the receiver is a fast-case object, check the keyed lookup | 395 // If the receiver is a fast-case object, check the stub cache. Otherwise |
396 // cache. Otherwise probe the dictionary. | 396 // probe the dictionary. |
397 __ mov(ebx, FieldOperand(receiver, JSObject::kPropertiesOffset)); | 397 __ mov(ebx, FieldOperand(receiver, JSObject::kPropertiesOffset)); |
398 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), | 398 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), |
399 Immediate(isolate->factory()->hash_table_map())); | 399 Immediate(isolate->factory()->hash_table_map())); |
400 __ j(equal, &probe_dictionary); | 400 __ j(equal, &probe_dictionary); |
401 | 401 |
402 // The receiver's map is still in eax, compute the keyed lookup cache hash | 402 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
403 // based on 32 bits of the map pointer and the string hash. | 403 Code::ComputeHandlerFlags(Code::LOAD_IC)); |
404 if (FLAG_debug_code) { | 404 masm->isolate()->stub_cache()->GenerateProbe( |
405 __ cmp(eax, FieldOperand(receiver, HeapObject::kMapOffset)); | 405 masm, Code::LOAD_IC, flags, false, receiver, key, ebx, no_reg); |
406 __ Check(equal, kMapIsNoLongerInEax); | 406 // Cache miss. |
407 } | 407 GenerateMiss(masm); |
408 __ mov(ebx, eax); // Keep the map around for later. | |
409 __ shr(eax, KeyedLookupCache::kMapHashShift); | |
410 __ mov(edi, FieldOperand(key, String::kHashFieldOffset)); | |
411 __ shr(edi, String::kHashShift); | |
412 __ xor_(eax, edi); | |
413 __ and_(eax, KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); | |
414 | |
415 // Load the key (consisting of map and internalized string) from the cache and | |
416 // check for match. | |
417 Label load_in_object_property; | |
418 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; | |
419 Label hit_on_nth_entry[kEntriesPerBucket]; | |
420 ExternalReference cache_keys = | |
421 ExternalReference::keyed_lookup_cache_keys(masm->isolate()); | |
422 | |
423 for (int i = 0; i < kEntriesPerBucket - 1; i++) { | |
424 Label try_next_entry; | |
425 __ mov(edi, eax); | |
426 __ shl(edi, kPointerSizeLog2 + 1); | |
427 if (i != 0) { | |
428 __ add(edi, Immediate(kPointerSize * i * 2)); | |
429 } | |
430 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); | |
431 __ j(not_equal, &try_next_entry); | |
432 __ add(edi, Immediate(kPointerSize)); | |
433 __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys)); | |
434 __ j(equal, &hit_on_nth_entry[i]); | |
435 __ bind(&try_next_entry); | |
436 } | |
437 | |
438 __ lea(edi, Operand(eax, 1)); | |
439 __ shl(edi, kPointerSizeLog2 + 1); | |
440 __ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2)); | |
441 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); | |
442 __ j(not_equal, &slow); | |
443 __ add(edi, Immediate(kPointerSize)); | |
444 __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys)); | |
445 __ j(not_equal, &slow); | |
446 | |
447 // Get field offset. | |
448 // ebx : receiver's map | |
449 // eax : lookup cache index | |
450 ExternalReference cache_field_offsets = | |
451 ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); | |
452 | |
453 // Hit on nth entry. | |
454 for (int i = kEntriesPerBucket - 1; i >= 0; i--) { | |
455 __ bind(&hit_on_nth_entry[i]); | |
456 if (i != 0) { | |
457 __ add(eax, Immediate(i)); | |
458 } | |
459 __ mov(edi, | |
460 Operand::StaticArray(eax, times_pointer_size, cache_field_offsets)); | |
461 __ movzx_b(eax, FieldOperand(ebx, Map::kInObjectPropertiesOffset)); | |
462 __ sub(edi, eax); | |
463 __ j(above_equal, &property_array_property); | |
464 if (i != 0) { | |
465 __ jmp(&load_in_object_property); | |
466 } | |
467 } | |
468 | |
469 // Load in-object property. | |
470 __ bind(&load_in_object_property); | |
471 __ movzx_b(eax, FieldOperand(ebx, Map::kInstanceSizeOffset)); | |
472 __ add(eax, edi); | |
473 __ mov(eax, FieldOperand(receiver, eax, times_pointer_size, 0)); | |
474 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); | |
475 __ ret(0); | |
476 | |
477 // Load property array property. | |
478 __ bind(&property_array_property); | |
479 __ mov(eax, FieldOperand(receiver, JSObject::kPropertiesOffset)); | |
480 __ mov(eax, | |
481 FieldOperand(eax, edi, times_pointer_size, FixedArray::kHeaderSize)); | |
482 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); | |
483 __ ret(0); | |
484 | 408 |
485 // Do a quick inline probe of the receiver's dictionary, if it | 409 // Do a quick inline probe of the receiver's dictionary, if it |
486 // exists. | 410 // exists. |
487 __ bind(&probe_dictionary); | 411 __ bind(&probe_dictionary); |
488 | 412 |
489 __ mov(eax, FieldOperand(receiver, JSObject::kMapOffset)); | 413 __ mov(eax, FieldOperand(receiver, JSObject::kMapOffset)); |
490 __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset)); | 414 __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset)); |
491 GenerateGlobalInstanceTypeCheck(masm, eax, &slow); | 415 GenerateGlobalInstanceTypeCheck(masm, eax, &slow); |
492 | 416 |
493 GenerateDictionaryLoad(masm, &slow, ebx, key, eax, edi, eax); | 417 GenerateDictionaryLoad(masm, &slow, ebx, key, eax, edi, eax); |
(...skipping 524 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1018 Condition cc = | 942 Condition cc = |
1019 (check == ENABLE_INLINED_SMI_CHECK) | 943 (check == ENABLE_INLINED_SMI_CHECK) |
1020 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) | 944 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) |
1021 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); | 945 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); |
1022 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); | 946 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); |
1023 } | 947 } |
1024 } | 948 } |
1025 } // namespace v8::internal | 949 } // namespace v8::internal |
1026 | 950 |
1027 #endif // V8_TARGET_ARCH_IA32 | 951 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |