| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
| 8 | 8 |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/ic.h" | 10 #include "src/ic/ic.h" |
| (...skipping 590 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 601 kCheckMap, kDontIncrementLength); | 601 kCheckMap, kDontIncrementLength); |
| 602 KeyedStoreGenerateMegamorphicHelper(masm, &fast_object_grow, | 602 KeyedStoreGenerateMegamorphicHelper(masm, &fast_object_grow, |
| 603 &fast_double_grow, &slow, kDontCheckMap, | 603 &fast_double_grow, &slow, kDontCheckMap, |
| 604 kIncrementLength); | 604 kIncrementLength); |
| 605 | 605 |
| 606 __ bind(&miss); | 606 __ bind(&miss); |
| 607 GenerateMiss(masm); | 607 GenerateMiss(masm); |
| 608 } | 608 } |
| 609 | 609 |
| 610 | 610 |
| 611 static Operand GenerateMappedArgumentsLookup( | |
| 612 MacroAssembler* masm, Register object, Register key, Register scratch1, | |
| 613 Register scratch2, Register scratch3, Label* unmapped_case, | |
| 614 Label* slow_case) { | |
| 615 Heap* heap = masm->isolate()->heap(); | |
| 616 | |
| 617 // Check that the receiver is a JSObject. Because of the elements | |
| 618 // map check later, we do not need to check for interceptors or | |
| 619 // whether it requires access checks. | |
| 620 __ JumpIfSmi(object, slow_case); | |
| 621 // Check that the object is some kind of JSObject. | |
| 622 __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1); | |
| 623 __ j(below, slow_case); | |
| 624 | |
| 625 // Check that the key is a positive smi. | |
| 626 Condition check = masm->CheckNonNegativeSmi(key); | |
| 627 __ j(NegateCondition(check), slow_case); | |
| 628 | |
| 629 // Load the elements into scratch1 and check its map. If not, jump | |
| 630 // to the unmapped lookup with the parameter map in scratch1. | |
| 631 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map()); | |
| 632 __ movp(scratch1, FieldOperand(object, JSObject::kElementsOffset)); | |
| 633 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); | |
| 634 | |
| 635 // Check if element is in the range of mapped arguments. | |
| 636 __ movp(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset)); | |
| 637 __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2)); | |
| 638 __ cmpp(key, scratch2); | |
| 639 __ j(greater_equal, unmapped_case); | |
| 640 | |
| 641 // Load element index and check whether it is the hole. | |
| 642 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize; | |
| 643 __ SmiToInteger64(scratch3, key); | |
| 644 __ movp(scratch2, | |
| 645 FieldOperand(scratch1, scratch3, times_pointer_size, kHeaderSize)); | |
| 646 __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex); | |
| 647 __ j(equal, unmapped_case); | |
| 648 | |
| 649 // Load value from context and return it. We can reuse scratch1 because | |
| 650 // we do not jump to the unmapped lookup (which requires the parameter | |
| 651 // map in scratch1). | |
| 652 __ movp(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize)); | |
| 653 __ SmiToInteger64(scratch3, scratch2); | |
| 654 return FieldOperand(scratch1, scratch3, times_pointer_size, | |
| 655 Context::kHeaderSize); | |
| 656 } | |
| 657 | |
| 658 | |
| 659 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, | |
| 660 Register key, | |
| 661 Register parameter_map, | |
| 662 Register scratch, | |
| 663 Label* slow_case) { | |
| 664 // Element is in arguments backing store, which is referenced by the | |
| 665 // second element of the parameter_map. The parameter_map register | |
| 666 // must be loaded with the parameter map of the arguments object and is | |
| 667 // overwritten. | |
| 668 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; | |
| 669 Register backing_store = parameter_map; | |
| 670 __ movp(backing_store, FieldOperand(parameter_map, kBackingStoreOffset)); | |
| 671 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); | |
| 672 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK); | |
| 673 __ movp(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset)); | |
| 674 __ cmpp(key, scratch); | |
| 675 __ j(greater_equal, slow_case); | |
| 676 __ SmiToInteger64(scratch, key); | |
| 677 return FieldOperand(backing_store, scratch, times_pointer_size, | |
| 678 FixedArray::kHeaderSize); | |
| 679 } | |
| 680 | |
| 681 | |
| 682 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) { | |
| 683 // The return address is on the stack. | |
| 684 Label slow, notin; | |
| 685 Register receiver = StoreDescriptor::ReceiverRegister(); | |
| 686 Register name = StoreDescriptor::NameRegister(); | |
| 687 Register value = StoreDescriptor::ValueRegister(); | |
| 688 DCHECK(receiver.is(rdx)); | |
| 689 DCHECK(name.is(rcx)); | |
| 690 DCHECK(value.is(rax)); | |
| 691 | |
| 692 Operand mapped_location = GenerateMappedArgumentsLookup( | |
| 693 masm, receiver, name, rbx, rdi, r8, ¬in, &slow); | |
| 694 __ movp(mapped_location, value); | |
| 695 __ leap(r9, mapped_location); | |
| 696 __ movp(r8, value); | |
| 697 __ RecordWrite(rbx, r9, r8, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
| 698 INLINE_SMI_CHECK); | |
| 699 __ Ret(); | |
| 700 __ bind(¬in); | |
| 701 // The unmapped lookup expects that the parameter map is in rbx. | |
| 702 Operand unmapped_location = | |
| 703 GenerateUnmappedArgumentsLookup(masm, name, rbx, rdi, &slow); | |
| 704 __ movp(unmapped_location, value); | |
| 705 __ leap(r9, unmapped_location); | |
| 706 __ movp(r8, value); | |
| 707 __ RecordWrite(rbx, r9, r8, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
| 708 INLINE_SMI_CHECK); | |
| 709 __ Ret(); | |
| 710 __ bind(&slow); | |
| 711 GenerateMiss(masm); | |
| 712 } | |
| 713 | |
| 714 | |
| 715 void LoadIC::GenerateNormal(MacroAssembler* masm) { | 611 void LoadIC::GenerateNormal(MacroAssembler* masm) { |
| 716 Register dictionary = rax; | 612 Register dictionary = rax; |
| 717 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); | 613 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); |
| 718 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); | 614 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); |
| 719 | 615 |
| 720 Label slow; | 616 Label slow; |
| 721 | 617 |
| 722 __ movp(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(), | 618 __ movp(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(), |
| 723 JSObject::kPropertiesOffset)); | 619 JSObject::kPropertiesOffset)); |
| 724 GenerateDictionaryLoad(masm, &slow, dictionary, | 620 GenerateDictionaryLoad(masm, &slow, dictionary, |
| (...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 950 Condition cc = | 846 Condition cc = |
| 951 (check == ENABLE_INLINED_SMI_CHECK) | 847 (check == ENABLE_INLINED_SMI_CHECK) |
| 952 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) | 848 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) |
| 953 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); | 849 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); |
| 954 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); | 850 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); |
| 955 } | 851 } |
| 956 } | 852 } |
| 957 } // namespace v8::internal | 853 } // namespace v8::internal |
| 958 | 854 |
| 959 #endif // V8_TARGET_ARCH_X64 | 855 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |