| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3681 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3692 // Load the literals array of the function. | 3692 // Load the literals array of the function. |
| 3693 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset)); | 3693 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset)); |
| 3694 frame_->EmitPush(tos); | 3694 frame_->EmitPush(tos); |
| 3695 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index()))); | 3695 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index()))); |
| 3696 frame_->EmitPush(Operand(node->constant_elements())); | 3696 frame_->EmitPush(Operand(node->constant_elements())); |
| 3697 int length = node->values()->length(); | 3697 int length = node->values()->length(); |
| 3698 if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) { | 3698 if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) { |
| 3699 FastCloneShallowArrayStub stub( | 3699 FastCloneShallowArrayStub stub( |
| 3700 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); | 3700 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); |
| 3701 frame_->CallStub(&stub, 3); | 3701 frame_->CallStub(&stub, 3); |
| 3702 __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1, r1, r2); | 3702 __ IncrementCounter(masm_->isolate()->counters()->cow_arrays_created_stub(), |
| 3703 1, r1, r2); |
| 3703 } else if (node->depth() > 1) { | 3704 } else if (node->depth() > 1) { |
| 3704 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); | 3705 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); |
| 3705 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 3706 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
| 3706 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); | 3707 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); |
| 3707 } else { | 3708 } else { |
| 3708 FastCloneShallowArrayStub stub( | 3709 FastCloneShallowArrayStub stub( |
| 3709 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); | 3710 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); |
| 3710 frame_->CallStub(&stub, 3); | 3711 frame_->CallStub(&stub, 3); |
| 3711 } | 3712 } |
| 3712 frame_->EmitPush(r0); // save the result | 3713 frame_->EmitPush(r0); // save the result |
| (...skipping 2868 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6581 void DeferredReferenceGetNamedValue::Generate() { | 6582 void DeferredReferenceGetNamedValue::Generate() { |
| 6582 #ifdef DEBUG | 6583 #ifdef DEBUG |
| 6583 int expected_height = frame_state()->frame()->height(); | 6584 int expected_height = frame_state()->frame()->height(); |
| 6584 #endif | 6585 #endif |
| 6585 VirtualFrame copied_frame(*frame_state()->frame()); | 6586 VirtualFrame copied_frame(*frame_state()->frame()); |
| 6586 copied_frame.SpillAll(); | 6587 copied_frame.SpillAll(); |
| 6587 | 6588 |
| 6588 Register scratch1 = VirtualFrame::scratch0(); | 6589 Register scratch1 = VirtualFrame::scratch0(); |
| 6589 Register scratch2 = VirtualFrame::scratch1(); | 6590 Register scratch2 = VirtualFrame::scratch1(); |
| 6590 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2)); | 6591 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2)); |
| 6591 __ DecrementCounter(COUNTERS->named_load_inline(), 1, scratch1, scratch2); | 6592 __ DecrementCounter(masm_->isolate()->counters()->named_load_inline(), |
| 6592 __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1, | 6593 1, scratch1, scratch2); |
| 6593 scratch1, scratch2); | 6594 __ IncrementCounter(masm_->isolate()->counters()->named_load_inline_miss(), |
| 6595 1, scratch1, scratch2); |
| 6594 | 6596 |
| 6595 // Ensure receiver in r0 and name in r2 to match load ic calling convention. | 6597 // Ensure receiver in r0 and name in r2 to match load ic calling convention. |
| 6596 __ Move(r0, receiver_); | 6598 __ Move(r0, receiver_); |
| 6597 __ mov(r2, Operand(name_)); | 6599 __ mov(r2, Operand(name_)); |
| 6598 | 6600 |
| 6599 // The rest of the instructions in the deferred code must be together. | 6601 // The rest of the instructions in the deferred code must be together. |
| 6600 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 6602 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 6601 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | 6603 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 6602 Builtins::LoadIC_Initialize)); | 6604 Builtins::LoadIC_Initialize)); |
| 6603 RelocInfo::Mode mode = is_contextual_ | 6605 RelocInfo::Mode mode = is_contextual_ |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6654 // in r0. | 6656 // in r0. |
| 6655 void DeferredReferenceGetKeyedValue::Generate() { | 6657 void DeferredReferenceGetKeyedValue::Generate() { |
| 6656 ASSERT((key_.is(r0) && receiver_.is(r1)) || | 6658 ASSERT((key_.is(r0) && receiver_.is(r1)) || |
| 6657 (key_.is(r1) && receiver_.is(r0))); | 6659 (key_.is(r1) && receiver_.is(r0))); |
| 6658 | 6660 |
| 6659 VirtualFrame copied_frame(*frame_state()->frame()); | 6661 VirtualFrame copied_frame(*frame_state()->frame()); |
| 6660 copied_frame.SpillAll(); | 6662 copied_frame.SpillAll(); |
| 6661 | 6663 |
| 6662 Register scratch1 = VirtualFrame::scratch0(); | 6664 Register scratch1 = VirtualFrame::scratch0(); |
| 6663 Register scratch2 = VirtualFrame::scratch1(); | 6665 Register scratch2 = VirtualFrame::scratch1(); |
| 6664 __ DecrementCounter(COUNTERS->keyed_load_inline(), 1, scratch1, scratch2); | 6666 __ DecrementCounter(masm_->isolate()->counters()->keyed_load_inline(), |
| 6665 __ IncrementCounter(COUNTERS->keyed_load_inline_miss(), | 6667 1, scratch1, scratch2); |
| 6666 1, scratch1, scratch2); | 6668 __ IncrementCounter(masm_->isolate()->counters()->keyed_load_inline_miss(), |
| 6669 1, scratch1, scratch2); |
| 6667 | 6670 |
| 6668 // Ensure key in r0 and receiver in r1 to match keyed load ic calling | 6671 // Ensure key in r0 and receiver in r1 to match keyed load ic calling |
| 6669 // convention. | 6672 // convention. |
| 6670 if (key_.is(r1)) { | 6673 if (key_.is(r1)) { |
| 6671 __ Swap(r0, r1, ip); | 6674 __ Swap(r0, r1, ip); |
| 6672 } | 6675 } |
| 6673 | 6676 |
| 6674 // The rest of the instructions in the deferred code must be together. | 6677 // The rest of the instructions in the deferred code must be together. |
| 6675 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 6678 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 6676 // Call keyed load IC. It has the arguments key and receiver in r0 and r1. | 6679 // Call keyed load IC. It has the arguments key and receiver in r0 and r1. |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6715 Register value_; | 6718 Register value_; |
| 6716 Register key_; | 6719 Register key_; |
| 6717 Register receiver_; | 6720 Register receiver_; |
| 6718 StrictModeFlag strict_mode_; | 6721 StrictModeFlag strict_mode_; |
| 6719 }; | 6722 }; |
| 6720 | 6723 |
| 6721 | 6724 |
| 6722 void DeferredReferenceSetKeyedValue::Generate() { | 6725 void DeferredReferenceSetKeyedValue::Generate() { |
| 6723 Register scratch1 = VirtualFrame::scratch0(); | 6726 Register scratch1 = VirtualFrame::scratch0(); |
| 6724 Register scratch2 = VirtualFrame::scratch1(); | 6727 Register scratch2 = VirtualFrame::scratch1(); |
| 6725 __ DecrementCounter(COUNTERS->keyed_store_inline(), 1, scratch1, scratch2); | 6728 __ DecrementCounter(masm_->isolate()->counters()->keyed_store_inline(), |
| 6726 __ IncrementCounter(COUNTERS->keyed_store_inline_miss(), | 6729 1, scratch1, scratch2); |
| 6730 __ IncrementCounter(masm_->isolate()->counters()->keyed_store_inline_miss(), |
| 6727 1, scratch1, scratch2); | 6731 1, scratch1, scratch2); |
| 6728 | 6732 |
| 6729 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic | 6733 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic |
| 6730 // calling convention. | 6734 // calling convention. |
| 6731 if (value_.is(r1)) { | 6735 if (value_.is(r1)) { |
| 6732 __ Swap(r0, r1, ip); | 6736 __ Swap(r0, r1, ip); |
| 6733 } | 6737 } |
| 6734 ASSERT(receiver_.is(r2)); | 6738 ASSERT(receiver_.is(r2)); |
| 6735 | 6739 |
| 6736 // The rest of the instructions in the deferred code must be together. | 6740 // The rest of the instructions in the deferred code must be together. |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6833 frame_->EmitPush(r0); // Push answer. | 6837 frame_->EmitPush(r0); // Push answer. |
| 6834 } else { | 6838 } else { |
| 6835 // Inline the in-object property case. | 6839 // Inline the in-object property case. |
| 6836 Comment cmnt(masm(), is_contextual | 6840 Comment cmnt(masm(), is_contextual |
| 6837 ? "[ Inlined contextual property load" | 6841 ? "[ Inlined contextual property load" |
| 6838 : "[ Inlined named property load"); | 6842 : "[ Inlined named property load"); |
| 6839 | 6843 |
| 6840 // Counter will be decremented in the deferred code. Placed here to avoid | 6844 // Counter will be decremented in the deferred code. Placed here to avoid |
| 6841 // having it in the instruction stream below where patching will occur. | 6845 // having it in the instruction stream below where patching will occur. |
| 6842 if (is_contextual) { | 6846 if (is_contextual) { |
| 6843 __ IncrementCounter(COUNTERS->named_load_global_inline(), 1, | 6847 __ IncrementCounter( |
| 6844 frame_->scratch0(), frame_->scratch1()); | 6848 masm_->isolate()->counters()->named_load_global_inline(), |
| 6849 1, frame_->scratch0(), frame_->scratch1()); |
| 6845 } else { | 6850 } else { |
| 6846 __ IncrementCounter(COUNTERS->named_load_inline(), 1, | 6851 __ IncrementCounter(masm_->isolate()->counters()->named_load_inline(), |
| 6847 frame_->scratch0(), frame_->scratch1()); | 6852 1, frame_->scratch0(), frame_->scratch1()); |
| 6848 } | 6853 } |
| 6849 | 6854 |
| 6850 // The following instructions are the inlined load of an in-object property. | 6855 // The following instructions are the inlined load of an in-object property. |
| 6851 // Parts of this code is patched, so the exact instructions generated needs | 6856 // Parts of this code is patched, so the exact instructions generated needs |
| 6852 // to be fixed. Therefore the instruction pool is blocked when generating | 6857 // to be fixed. Therefore the instruction pool is blocked when generating |
| 6853 // this code | 6858 // this code |
| 6854 | 6859 |
| 6855 // Load the receiver from the stack. | 6860 // Load the receiver from the stack. |
| 6856 Register receiver = frame_->PopToRegister(); | 6861 Register receiver = frame_->PopToRegister(); |
| 6857 | 6862 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 6869 LookupResult lookup; | 6874 LookupResult lookup; |
| 6870 global_object->LocalLookupRealNamedProperty(*name, &lookup); | 6875 global_object->LocalLookupRealNamedProperty(*name, &lookup); |
| 6871 if (lookup.IsProperty() && lookup.type() == NORMAL) { | 6876 if (lookup.IsProperty() && lookup.type() == NORMAL) { |
| 6872 ASSERT(lookup.holder() == global_object); | 6877 ASSERT(lookup.holder() == global_object); |
| 6873 ASSERT(global_object->property_dictionary()->ValueAt( | 6878 ASSERT(global_object->property_dictionary()->ValueAt( |
| 6874 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell()); | 6879 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell()); |
| 6875 is_dont_delete = lookup.IsDontDelete(); | 6880 is_dont_delete = lookup.IsDontDelete(); |
| 6876 } | 6881 } |
| 6877 } | 6882 } |
| 6878 if (is_dont_delete) { | 6883 if (is_dont_delete) { |
| 6879 __ IncrementCounter(COUNTERS->dont_delete_hint_hit(), 1, | 6884 __ IncrementCounter( |
| 6880 frame_->scratch0(), frame_->scratch1()); | 6885 masm_->isolate()->counters()->dont_delete_hint_hit(), |
| 6886 1, frame_->scratch0(), frame_->scratch1()); |
| 6881 } | 6887 } |
| 6882 } | 6888 } |
| 6883 | 6889 |
| 6884 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 6890 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 6885 if (!is_contextual) { | 6891 if (!is_contextual) { |
| 6886 // Check that the receiver is a heap object. | 6892 // Check that the receiver is a heap object. |
| 6887 __ tst(receiver, Operand(kSmiTagMask)); | 6893 __ tst(receiver, Operand(kSmiTagMask)); |
| 6888 deferred->Branch(eq); | 6894 deferred->Branch(eq); |
| 6889 } | 6895 } |
| 6890 | 6896 |
| (...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7055 void CodeGenerator::EmitKeyedLoad() { | 7061 void CodeGenerator::EmitKeyedLoad() { |
| 7056 if (loop_nesting() == 0) { | 7062 if (loop_nesting() == 0) { |
| 7057 Comment cmnt(masm_, "[ Load from keyed property"); | 7063 Comment cmnt(masm_, "[ Load from keyed property"); |
| 7058 frame_->CallKeyedLoadIC(); | 7064 frame_->CallKeyedLoadIC(); |
| 7059 } else { | 7065 } else { |
| 7060 // Inline the keyed load. | 7066 // Inline the keyed load. |
| 7061 Comment cmnt(masm_, "[ Inlined load from keyed property"); | 7067 Comment cmnt(masm_, "[ Inlined load from keyed property"); |
| 7062 | 7068 |
| 7063 // Counter will be decremented in the deferred code. Placed here to avoid | 7069 // Counter will be decremented in the deferred code. Placed here to avoid |
| 7064 // having it in the instruction stream below where patching will occur. | 7070 // having it in the instruction stream below where patching will occur. |
| 7065 __ IncrementCounter(COUNTERS->keyed_load_inline(), 1, | 7071 __ IncrementCounter(masm_->isolate()->counters()->keyed_load_inline(), |
| 7066 frame_->scratch0(), frame_->scratch1()); | 7072 1, frame_->scratch0(), frame_->scratch1()); |
| 7067 | 7073 |
| 7068 // Load the key and receiver from the stack. | 7074 // Load the key and receiver from the stack. |
| 7069 bool key_is_known_smi = frame_->KnownSmiAt(0); | 7075 bool key_is_known_smi = frame_->KnownSmiAt(0); |
| 7070 Register key = frame_->PopToRegister(); | 7076 Register key = frame_->PopToRegister(); |
| 7071 Register receiver = frame_->PopToRegister(key); | 7077 Register receiver = frame_->PopToRegister(key); |
| 7072 | 7078 |
| 7073 // The deferred code expects key and receiver in registers. | 7079 // The deferred code expects key and receiver in registers. |
| 7074 DeferredReferenceGetKeyedValue* deferred = | 7080 DeferredReferenceGetKeyedValue* deferred = |
| 7075 new DeferredReferenceGetKeyedValue(key, receiver); | 7081 new DeferredReferenceGetKeyedValue(key, receiver); |
| 7076 | 7082 |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7142 if (loop_nesting() > 0 && key_type->IsLikelySmi()) { | 7148 if (loop_nesting() > 0 && key_type->IsLikelySmi()) { |
| 7143 // Inline the keyed store. | 7149 // Inline the keyed store. |
| 7144 Comment cmnt(masm_, "[ Inlined store to keyed property"); | 7150 Comment cmnt(masm_, "[ Inlined store to keyed property"); |
| 7145 | 7151 |
| 7146 Register scratch1 = VirtualFrame::scratch0(); | 7152 Register scratch1 = VirtualFrame::scratch0(); |
| 7147 Register scratch2 = VirtualFrame::scratch1(); | 7153 Register scratch2 = VirtualFrame::scratch1(); |
| 7148 Register scratch3 = r3; | 7154 Register scratch3 = r3; |
| 7149 | 7155 |
| 7150 // Counter will be decremented in the deferred code. Placed here to avoid | 7156 // Counter will be decremented in the deferred code. Placed here to avoid |
| 7151 // having it in the instruction stream below where patching will occur. | 7157 // having it in the instruction stream below where patching will occur. |
| 7152 __ IncrementCounter(COUNTERS->keyed_store_inline(), 1, | 7158 __ IncrementCounter(masm_->isolate()->counters()->keyed_store_inline(), |
| 7153 scratch1, scratch2); | 7159 1, scratch1, scratch2); |
| 7154 | 7160 |
| 7155 | 7161 |
| 7156 // Load the value, key and receiver from the stack. | 7162 // Load the value, key and receiver from the stack. |
| 7157 bool value_is_harmless = frame_->KnownSmiAt(0); | 7163 bool value_is_harmless = frame_->KnownSmiAt(0); |
| 7158 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true; | 7164 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true; |
| 7159 bool key_is_smi = frame_->KnownSmiAt(1); | 7165 bool key_is_smi = frame_->KnownSmiAt(1); |
| 7160 Register value = frame_->PopToRegister(); | 7166 Register value = frame_->PopToRegister(); |
| 7161 Register key = frame_->PopToRegister(value); | 7167 Register key = frame_->PopToRegister(value); |
| 7162 VirtualFrame::SpilledScope spilled(frame_); | 7168 VirtualFrame::SpilledScope spilled(frame_); |
| 7163 Register receiver = r2; | 7169 Register receiver = r2; |
| (...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7420 specialized_on_rhs_ ? "_ConstantRhs" : "", | 7426 specialized_on_rhs_ ? "_ConstantRhs" : "", |
| 7421 BinaryOpIC::GetName(runtime_operands_type_)); | 7427 BinaryOpIC::GetName(runtime_operands_type_)); |
| 7422 return name_; | 7428 return name_; |
| 7423 } | 7429 } |
| 7424 | 7430 |
| 7425 #undef __ | 7431 #undef __ |
| 7426 | 7432 |
| 7427 } } // namespace v8::internal | 7433 } } // namespace v8::internal |
| 7428 | 7434 |
| 7429 #endif // V8_TARGET_ARCH_ARM | 7435 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |