| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 6519 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6530 } | 6530 } |
| 6531 | 6531 |
| 6532 cc_reg_ = eq; | 6532 cc_reg_ = eq; |
| 6533 ASSERT(has_cc() && frame_->height() == original_height); | 6533 ASSERT(has_cc() && frame_->height() == original_height); |
| 6534 } | 6534 } |
| 6535 | 6535 |
| 6536 | 6536 |
| 6537 class DeferredReferenceGetNamedValue: public DeferredCode { | 6537 class DeferredReferenceGetNamedValue: public DeferredCode { |
| 6538 public: | 6538 public: |
| 6539 explicit DeferredReferenceGetNamedValue(Register receiver, | 6539 explicit DeferredReferenceGetNamedValue(Register receiver, |
| 6540 Handle<String> name) | 6540 Handle<String> name, |
| 6541 : receiver_(receiver), name_(name) { | 6541 bool is_contextual) |
| 6542 set_comment("[ DeferredReferenceGetNamedValue"); | 6542 : receiver_(receiver), |
| 6543 name_(name), |
| 6544 is_contextual_(is_contextual), |
| 6545 is_dont_delete_(false) { |
| 6546 set_comment(is_contextual |
| 6547 ? "[ DeferredReferenceGetNamedValue (contextual)" |
| 6548 : "[ DeferredReferenceGetNamedValue"); |
| 6543 } | 6549 } |
| 6544 | 6550 |
| 6545 virtual void Generate(); | 6551 virtual void Generate(); |
| 6546 | 6552 |
| 6553 void set_is_dont_delete(bool value) { |
| 6554 ASSERT(is_contextual_); |
| 6555 is_dont_delete_ = value; |
| 6556 } |
| 6557 |
| 6547 private: | 6558 private: |
| 6548 Register receiver_; | 6559 Register receiver_; |
| 6549 Handle<String> name_; | 6560 Handle<String> name_; |
| 6561 bool is_contextual_; |
| 6562 bool is_dont_delete_; |
| 6550 }; | 6563 }; |
| 6551 | 6564 |
| 6552 | 6565 |
| 6553 // Convention for this is that on entry the receiver is in a register that | 6566 // Convention for this is that on entry the receiver is in a register that |
| 6554 // is not used by the stack. On exit the answer is found in that same | 6567 // is not used by the stack. On exit the answer is found in that same |
| 6555 // register and the stack has the same height. | 6568 // register and the stack has the same height. |
| 6556 void DeferredReferenceGetNamedValue::Generate() { | 6569 void DeferredReferenceGetNamedValue::Generate() { |
| 6557 #ifdef DEBUG | 6570 #ifdef DEBUG |
| 6558 int expected_height = frame_state()->frame()->height(); | 6571 int expected_height = frame_state()->frame()->height(); |
| 6559 #endif | 6572 #endif |
| 6560 VirtualFrame copied_frame(*frame_state()->frame()); | 6573 VirtualFrame copied_frame(*frame_state()->frame()); |
| 6561 copied_frame.SpillAll(); | 6574 copied_frame.SpillAll(); |
| 6562 | 6575 |
| 6563 Register scratch1 = VirtualFrame::scratch0(); | 6576 Register scratch1 = VirtualFrame::scratch0(); |
| 6564 Register scratch2 = VirtualFrame::scratch1(); | 6577 Register scratch2 = VirtualFrame::scratch1(); |
| 6565 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2)); | 6578 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2)); |
| 6566 __ DecrementCounter(&Counters::named_load_inline, 1, scratch1, scratch2); | 6579 __ DecrementCounter(&Counters::named_load_inline, 1, scratch1, scratch2); |
| 6567 __ IncrementCounter(&Counters::named_load_inline_miss, 1, scratch1, scratch2); | 6580 __ IncrementCounter(&Counters::named_load_inline_miss, 1, scratch1, scratch2); |
| 6568 | 6581 |
| 6569 // Ensure receiver in r0 and name in r2 to match load ic calling convention. | 6582 // Ensure receiver in r0 and name in r2 to match load ic calling convention. |
| 6570 __ Move(r0, receiver_); | 6583 __ Move(r0, receiver_); |
| 6571 __ mov(r2, Operand(name_)); | 6584 __ mov(r2, Operand(name_)); |
| 6572 | 6585 |
| 6573 // The rest of the instructions in the deferred code must be together. | 6586 // The rest of the instructions in the deferred code must be together. |
| 6574 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 6587 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 6575 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | 6588 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
| 6576 __ Call(ic, RelocInfo::CODE_TARGET); | 6589 RelocInfo::Mode mode = is_contextual_ |
| 6577 // The call must be followed by a nop(1) instruction to indicate that the | 6590 ? RelocInfo::CODE_TARGET_CONTEXT |
| 6578 // in-object has been inlined. | 6591 : RelocInfo::CODE_TARGET; |
| 6579 __ nop(PROPERTY_ACCESS_INLINED); | 6592 __ Call(ic, mode); |
| 6593 // We must mark the code just after the call with the correct marker. |
| 6594 MacroAssembler::NopMarkerTypes code_marker; |
| 6595 if (is_contextual_) { |
| 6596 code_marker = is_dont_delete_ |
| 6597 ? MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE |
| 6598 : MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT; |
| 6599 } else { |
| 6600 code_marker = MacroAssembler::PROPERTY_ACCESS_INLINED; |
| 6601 } |
| 6602 __ MarkCode(code_marker); |
| 6580 | 6603 |
| 6581 // At this point the answer is in r0. We move it to the expected register | 6604 // At this point the answer is in r0. We move it to the expected register |
| 6582 // if necessary. | 6605 // if necessary. |
| 6583 __ Move(receiver_, r0); | 6606 __ Move(receiver_, r0); |
| 6584 | 6607 |
| 6585 // Now go back to the frame that we entered with. This will not overwrite | 6608 // Now go back to the frame that we entered with. This will not overwrite |
| 6586 // the receiver register since that register was not in use when we came | 6609 // the receiver register since that register was not in use when we came |
| 6587 // in. The instructions emitted by this merge are skipped over by the | 6610 // in. The instructions emitted by this merge are skipped over by the |
| 6588 // inline load patching mechanism when looking for the branch instruction | 6611 // inline load patching mechanism when looking for the branch instruction |
| 6589 // that tells it where the code to patch is. | 6612 // that tells it where the code to patch is. |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6633 __ Swap(r0, r1, ip); | 6656 __ Swap(r0, r1, ip); |
| 6634 } | 6657 } |
| 6635 | 6658 |
| 6636 // The rest of the instructions in the deferred code must be together. | 6659 // The rest of the instructions in the deferred code must be together. |
| 6637 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 6660 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 6638 // Call keyed load IC. It has the arguments key and receiver in r0 and r1. | 6661 // Call keyed load IC. It has the arguments key and receiver in r0 and r1. |
| 6639 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 6662 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
| 6640 __ Call(ic, RelocInfo::CODE_TARGET); | 6663 __ Call(ic, RelocInfo::CODE_TARGET); |
| 6641 // The call must be followed by a nop instruction to indicate that the | 6664 // The call must be followed by a nop instruction to indicate that the |
| 6642 // keyed load has been inlined. | 6665 // keyed load has been inlined. |
| 6643 __ nop(PROPERTY_ACCESS_INLINED); | 6666 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED); |
| 6644 | 6667 |
| 6645 // Now go back to the frame that we entered with. This will not overwrite | 6668 // Now go back to the frame that we entered with. This will not overwrite |
| 6646 // the receiver or key registers since they were not in use when we came | 6669 // the receiver or key registers since they were not in use when we came |
| 6647 // in. The instructions emitted by this merge are skipped over by the | 6670 // in. The instructions emitted by this merge are skipped over by the |
| 6648 // inline load patching mechanism when looking for the branch instruction | 6671 // inline load patching mechanism when looking for the branch instruction |
| 6649 // that tells it where the code to patch is. | 6672 // that tells it where the code to patch is. |
| 6650 copied_frame.MergeTo(frame_state()->frame()); | 6673 copied_frame.MergeTo(frame_state()->frame()); |
| 6651 | 6674 |
| 6652 // Block the constant pool for one more instruction after leaving this | 6675 // Block the constant pool for one more instruction after leaving this |
| 6653 // constant pool block scope to include the branch instruction ending the | 6676 // constant pool block scope to include the branch instruction ending the |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6690 ASSERT(receiver_.is(r2)); | 6713 ASSERT(receiver_.is(r2)); |
| 6691 | 6714 |
| 6692 // The rest of the instructions in the deferred code must be together. | 6715 // The rest of the instructions in the deferred code must be together. |
| 6693 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 6716 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 6694 // Call keyed store IC. It has the arguments value, key and receiver in r0, | 6717 // Call keyed store IC. It has the arguments value, key and receiver in r0, |
| 6695 // r1 and r2. | 6718 // r1 and r2. |
| 6696 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); | 6719 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); |
| 6697 __ Call(ic, RelocInfo::CODE_TARGET); | 6720 __ Call(ic, RelocInfo::CODE_TARGET); |
| 6698 // The call must be followed by a nop instruction to indicate that the | 6721 // The call must be followed by a nop instruction to indicate that the |
| 6699 // keyed store has been inlined. | 6722 // keyed store has been inlined. |
| 6700 __ nop(PROPERTY_ACCESS_INLINED); | 6723 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED); |
| 6701 | 6724 |
| 6702 // Block the constant pool for one more instruction after leaving this | 6725 // Block the constant pool for one more instruction after leaving this |
| 6703 // constant pool block scope to include the branch instruction ending the | 6726 // constant pool block scope to include the branch instruction ending the |
| 6704 // deferred code. | 6727 // deferred code. |
| 6705 __ BlockConstPoolFor(1); | 6728 __ BlockConstPoolFor(1); |
| 6706 } | 6729 } |
| 6707 } | 6730 } |
| 6708 | 6731 |
| 6709 | 6732 |
| 6710 class DeferredReferenceSetNamedValue: public DeferredCode { | 6733 class DeferredReferenceSetNamedValue: public DeferredCode { |
| (...skipping 27 matching lines...) Expand all Loading... |
| 6738 __ mov(r2, Operand(name_)); | 6761 __ mov(r2, Operand(name_)); |
| 6739 | 6762 |
| 6740 // The rest of the instructions in the deferred code must be together. | 6763 // The rest of the instructions in the deferred code must be together. |
| 6741 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 6764 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 6742 // Call keyed store IC. It has the arguments value, key and receiver in r0, | 6765 // Call keyed store IC. It has the arguments value, key and receiver in r0, |
| 6743 // r1 and r2. | 6766 // r1 and r2. |
| 6744 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); | 6767 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); |
| 6745 __ Call(ic, RelocInfo::CODE_TARGET); | 6768 __ Call(ic, RelocInfo::CODE_TARGET); |
| 6746 // The call must be followed by a nop instruction to indicate that the | 6769 // The call must be followed by a nop instruction to indicate that the |
| 6747 // named store has been inlined. | 6770 // named store has been inlined. |
| 6748 __ nop(PROPERTY_ACCESS_INLINED); | 6771 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED); |
| 6749 | 6772 |
| 6750 // Go back to the frame we entered with. The instructions | 6773 // Go back to the frame we entered with. The instructions |
| 6751 // generated by this merge are skipped over by the inline store | 6774 // generated by this merge are skipped over by the inline store |
| 6752 // patching mechanism when looking for the branch instruction that | 6775 // patching mechanism when looking for the branch instruction that |
| 6753 // tells it where the code to patch is. | 6776 // tells it where the code to patch is. |
| 6754 copied_frame.MergeTo(frame_state()->frame()); | 6777 copied_frame.MergeTo(frame_state()->frame()); |
| 6755 | 6778 |
| 6756 // Block the constant pool for one more instruction after leaving this | 6779 // Block the constant pool for one more instruction after leaving this |
| 6757 // constant pool block scope to include the branch instruction ending the | 6780 // constant pool block scope to include the branch instruction ending the |
| 6758 // deferred code. | 6781 // deferred code. |
| 6759 __ BlockConstPoolFor(1); | 6782 __ BlockConstPoolFor(1); |
| 6760 } | 6783 } |
| 6761 } | 6784 } |
| 6762 | 6785 |
| 6763 | 6786 |
| 6764 // Consumes the top of stack (the receiver) and pushes the result instead. | 6787 // Consumes the top of stack (the receiver) and pushes the result instead. |
| 6765 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { | 6788 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { |
| 6766 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { | 6789 bool contextual_load_in_builtin = |
| 6790 is_contextual && |
| 6791 (Bootstrapper::IsActive() || |
| 6792 (!info_->closure().is_null() && info_->closure()->IsBuiltin())); |
| 6793 |
| 6794 if (scope()->is_global_scope() || |
| 6795 loop_nesting() == 0 || |
| 6796 contextual_load_in_builtin) { |
| 6767 Comment cmnt(masm(), "[ Load from named Property"); | 6797 Comment cmnt(masm(), "[ Load from named Property"); |
| 6768 // Setup the name register and call load IC. | 6798 // Setup the name register and call load IC. |
| 6769 frame_->CallLoadIC(name, | 6799 frame_->CallLoadIC(name, |
| 6770 is_contextual | 6800 is_contextual |
| 6771 ? RelocInfo::CODE_TARGET_CONTEXT | 6801 ? RelocInfo::CODE_TARGET_CONTEXT |
| 6772 : RelocInfo::CODE_TARGET); | 6802 : RelocInfo::CODE_TARGET); |
| 6773 frame_->EmitPush(r0); // Push answer. | 6803 frame_->EmitPush(r0); // Push answer. |
| 6774 } else { | 6804 } else { |
| 6775 // Inline the in-object property case. | 6805 // Inline the in-object property case. |
| 6776 Comment cmnt(masm(), "[ Inlined named property load"); | 6806 Comment cmnt(masm(), is_contextual |
| 6807 ? "[ Inlined contextual property load" |
| 6808 : "[ Inlined named property load"); |
| 6777 | 6809 |
| 6778 // Counter will be decremented in the deferred code. Placed here to avoid | 6810 // Counter will be decremented in the deferred code. Placed here to avoid |
| 6779 // having it in the instruction stream below where patching will occur. | 6811 // having it in the instruction stream below where patching will occur. |
| 6780 __ IncrementCounter(&Counters::named_load_inline, 1, | 6812 if (is_contextual) { |
| 6781 frame_->scratch0(), frame_->scratch1()); | 6813 __ IncrementCounter(&Counters::named_load_global_inline, 1, |
| 6814 frame_->scratch0(), frame_->scratch1()); |
| 6815 } else { |
| 6816 __ IncrementCounter(&Counters::named_load_inline, 1, |
| 6817 frame_->scratch0(), frame_->scratch1()); |
| 6818 } |
| 6782 | 6819 |
| 6783 // The following instructions are the inlined load of an in-object property. | 6820 // The following instructions are the inlined load of an in-object property. |
| 6784 // Parts of this code is patched, so the exact instructions generated needs | 6821 // Parts of this code is patched, so the exact instructions generated needs |
| 6785 // to be fixed. Therefore the instruction pool is blocked when generating | 6822 // to be fixed. Therefore the instruction pool is blocked when generating |
| 6786 // this code | 6823 // this code |
| 6787 | 6824 |
| 6788 // Load the receiver from the stack. | 6825 // Load the receiver from the stack. |
| 6789 Register receiver = frame_->PopToRegister(); | 6826 Register receiver = frame_->PopToRegister(); |
| 6790 | 6827 |
| 6791 DeferredReferenceGetNamedValue* deferred = | 6828 DeferredReferenceGetNamedValue* deferred = |
| 6792 new DeferredReferenceGetNamedValue(receiver, name); | 6829 new DeferredReferenceGetNamedValue(receiver, name, is_contextual); |
| 6830 |
| 6831 bool is_dont_delete = false; |
| 6832 if (is_contextual) { |
| 6833 if (!info_->closure().is_null()) { |
| 6834 // When doing lazy compilation we can check if the global cell |
| 6835 // already exists and use its "don't delete" status as a hint. |
| 6836 AssertNoAllocation no_gc; |
| 6837 v8::internal::GlobalObject* global_object = |
| 6838 info_->closure()->context()->global(); |
| 6839 LookupResult lookup; |
| 6840 global_object->LocalLookupRealNamedProperty(*name, &lookup); |
| 6841 if (lookup.IsProperty() && lookup.type() == NORMAL) { |
| 6842 ASSERT(lookup.holder() == global_object); |
| 6843 ASSERT(global_object->property_dictionary()->ValueAt( |
| 6844 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell()); |
| 6845 is_dont_delete = lookup.IsDontDelete(); |
| 6846 } |
| 6847 } |
| 6848 if (is_dont_delete) { |
| 6849 __ IncrementCounter(&Counters::dont_delete_hint_hit, 1, |
| 6850 frame_->scratch0(), frame_->scratch1()); |
| 6851 } |
| 6852 } |
| 6853 |
| 6854 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 6855 if (!is_contextual) { |
| 6856 // Check that the receiver is a heap object. |
| 6857 __ tst(receiver, Operand(kSmiTagMask)); |
| 6858 deferred->Branch(eq); |
| 6859 } |
| 6860 |
| 6861 // Check for the_hole_value if necessary. |
| 6862 // Below we rely on the number of instructions generated, and we can't |
| 6863 // cope with the Check macro which does not generate a fixed number of |
| 6864 // instructions. |
| 6865 Label skip, check_the_hole, cont; |
| 6866 if (FLAG_debug_code && is_contextual && is_dont_delete) { |
| 6867 __ b(&skip); |
| 6868 __ bind(&check_the_hole); |
| 6869 __ Check(ne, "DontDelete cells can't contain the hole"); |
| 6870 __ b(&cont); |
| 6871 __ bind(&skip); |
| 6872 } |
| 6793 | 6873 |
| 6794 #ifdef DEBUG | 6874 #ifdef DEBUG |
| 6795 int kInlinedNamedLoadInstructions = 7; | 6875 int InlinedNamedLoadInstructions = 5; |
| 6796 Label check_inlined_codesize; | 6876 Label check_inlined_codesize; |
| 6797 masm_->bind(&check_inlined_codesize); | 6877 masm_->bind(&check_inlined_codesize); |
| 6798 #endif | 6878 #endif |
| 6799 | 6879 |
| 6800 { Assembler::BlockConstPoolScope block_const_pool(masm_); | |
| 6801 // Check that the receiver is a heap object. | |
| 6802 __ tst(receiver, Operand(kSmiTagMask)); | |
| 6803 deferred->Branch(eq); | |
| 6804 | |
| 6805 Register scratch = VirtualFrame::scratch0(); | 6880 Register scratch = VirtualFrame::scratch0(); |
| 6806 Register scratch2 = VirtualFrame::scratch1(); | 6881 Register scratch2 = VirtualFrame::scratch1(); |
| 6807 | 6882 |
| 6808 // Check the map. The null map used below is patched by the inline cache | 6883 // Check the map. The null map used below is patched by the inline cache |
| 6809 // code. Therefore we can't use a LoadRoot call. | 6884 // code. Therefore we can't use a LoadRoot call. |
| 6810 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 6885 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 6811 __ mov(scratch2, Operand(Factory::null_value())); | 6886 __ mov(scratch2, Operand(Factory::null_value())); |
| 6812 __ cmp(scratch, scratch2); | 6887 __ cmp(scratch, scratch2); |
| 6813 deferred->Branch(ne); | 6888 deferred->Branch(ne); |
| 6814 | 6889 |
| 6815 // Initially use an invalid index. The index will be patched by the | 6890 if (is_contextual) { |
| 6816 // inline cache code. | 6891 #ifdef DEBUG |
| 6817 __ ldr(receiver, MemOperand(receiver, 0)); | 6892 InlinedNamedLoadInstructions += 1; |
| 6893 #endif |
| 6894 // Load the (initially invalid) cell and get its value. |
| 6895 masm()->mov(receiver, Operand(Factory::null_value())); |
| 6896 __ ldr(receiver, |
| 6897 FieldMemOperand(receiver, JSGlobalPropertyCell::kValueOffset)); |
| 6898 |
| 6899 deferred->set_is_dont_delete(is_dont_delete); |
| 6900 |
| 6901 if (!is_dont_delete) { |
| 6902 #ifdef DEBUG |
| 6903 InlinedNamedLoadInstructions += 3; |
| 6904 #endif |
| 6905 __ cmp(receiver, Operand(Factory::the_hole_value())); |
| 6906 deferred->Branch(eq); |
| 6907 } else if (FLAG_debug_code) { |
| 6908 #ifdef DEBUG |
| 6909 InlinedNamedLoadInstructions += 3; |
| 6910 #endif |
| 6911 __ cmp(receiver, Operand(Factory::the_hole_value())); |
| 6912 __ b(&check_the_hole, eq); |
| 6913 __ bind(&cont); |
| 6914 } |
| 6915 } else { |
| 6916 // Initially use an invalid index. The index will be patched by the |
| 6917 // inline cache code. |
| 6918 __ ldr(receiver, MemOperand(receiver, 0)); |
| 6919 } |
| 6818 | 6920 |
| 6819 // Make sure that the expected number of instructions are generated. | 6921 // Make sure that the expected number of instructions are generated. |
| 6820 ASSERT_EQ(kInlinedNamedLoadInstructions, | 6922 // If the code before is updated, the offsets in ic-arm.cc |
| 6923 // LoadIC::PatchInlinedContextualLoad and PatchInlinedLoad need |
| 6924 // to be updated. |
| 6925 ASSERT_EQ(InlinedNamedLoadInstructions, |
| 6821 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); | 6926 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); |
| 6822 } | 6927 } |
| 6823 | 6928 |
| 6824 deferred->BindExit(); | 6929 deferred->BindExit(); |
| 6825 // At this point the receiver register has the result, either from the | 6930 // At this point the receiver register has the result, either from the |
| 6826 // deferred code or from the inlined code. | 6931 // deferred code or from the inlined code. |
| 6827 frame_->EmitPush(receiver); | 6932 frame_->EmitPush(receiver); |
| 6828 } | 6933 } |
| 6829 } | 6934 } |
| 6830 | 6935 |
| (...skipping 449 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7280 BinaryOpIC::GetName(runtime_operands_type_)); | 7385 BinaryOpIC::GetName(runtime_operands_type_)); |
| 7281 return name_; | 7386 return name_; |
| 7282 } | 7387 } |
| 7283 | 7388 |
| 7284 | 7389 |
| 7285 #undef __ | 7390 #undef __ |
| 7286 | 7391 |
| 7287 } } // namespace v8::internal | 7392 } } // namespace v8::internal |
| 7288 | 7393 |
| 7289 #endif // V8_TARGET_ARCH_ARM | 7394 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |