| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_IA32 | 7 #if V8_TARGET_ARCH_IA32 |
| 8 | 8 |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 3856 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3867 // Registers: | 3867 // Registers: |
| 3868 // dictionary_: NameDictionary to probe. | 3868 // dictionary_: NameDictionary to probe. |
| 3869 // result_: used as scratch. | 3869 // result_: used as scratch. |
| 3870 // index_: will hold an index of entry if lookup is successful. | 3870 // index_: will hold an index of entry if lookup is successful. |
| 3871 // might alias with result_. | 3871 // might alias with result_. |
| 3872 // Returns: | 3872 // Returns: |
| 3873 // result_ is zero if lookup failed, non zero otherwise. | 3873 // result_ is zero if lookup failed, non zero otherwise. |
| 3874 | 3874 |
| 3875 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; | 3875 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; |
| 3876 | 3876 |
| 3877 Register scratch = result_; | 3877 Register scratch = result(); |
| 3878 | 3878 |
| 3879 __ mov(scratch, FieldOperand(dictionary_, kCapacityOffset)); | 3879 __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset)); |
| 3880 __ dec(scratch); | 3880 __ dec(scratch); |
| 3881 __ SmiUntag(scratch); | 3881 __ SmiUntag(scratch); |
| 3882 __ push(scratch); | 3882 __ push(scratch); |
| 3883 | 3883 |
| 3884 // If names of slots in range from 1 to kProbes - 1 for the hash value are | 3884 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
| 3885 // not equal to the name and kProbes-th slot is not used (its name is the | 3885 // not equal to the name and kProbes-th slot is not used (its name is the |
| 3886 // undefined value), it guarantees the hash table doesn't contain the | 3886 // undefined value), it guarantees the hash table doesn't contain the |
| 3887 // property. It's true even if some slots represent deleted properties | 3887 // property. It's true even if some slots represent deleted properties |
| 3888 // (their names are the null value). | 3888 // (their names are the null value). |
| 3889 for (int i = kInlinedProbes; i < kTotalProbes; i++) { | 3889 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
| 3890 // Compute the masked index: (hash + i + i * i) & mask. | 3890 // Compute the masked index: (hash + i + i * i) & mask. |
| 3891 __ mov(scratch, Operand(esp, 2 * kPointerSize)); | 3891 __ mov(scratch, Operand(esp, 2 * kPointerSize)); |
| 3892 if (i > 0) { | 3892 if (i > 0) { |
| 3893 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); | 3893 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); |
| 3894 } | 3894 } |
| 3895 __ and_(scratch, Operand(esp, 0)); | 3895 __ and_(scratch, Operand(esp, 0)); |
| 3896 | 3896 |
| 3897 // Scale the index by multiplying by the entry size. | 3897 // Scale the index by multiplying by the entry size. |
| 3898 DCHECK(NameDictionary::kEntrySize == 3); | 3898 DCHECK(NameDictionary::kEntrySize == 3); |
| 3899 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. | 3899 __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3. |
| 3900 | 3900 |
| 3901 // Having undefined at this place means the name is not contained. | 3901 // Having undefined at this place means the name is not contained. |
| 3902 DCHECK_EQ(kSmiTagSize, 1); | 3902 DCHECK_EQ(kSmiTagSize, 1); |
| 3903 __ mov(scratch, Operand(dictionary_, | 3903 __ mov(scratch, Operand(dictionary(), index(), times_pointer_size, |
| 3904 index_, | |
| 3905 times_pointer_size, | |
| 3906 kElementsStartOffset - kHeapObjectTag)); | 3904 kElementsStartOffset - kHeapObjectTag)); |
| 3907 __ cmp(scratch, isolate()->factory()->undefined_value()); | 3905 __ cmp(scratch, isolate()->factory()->undefined_value()); |
| 3908 __ j(equal, ¬_in_dictionary); | 3906 __ j(equal, ¬_in_dictionary); |
| 3909 | 3907 |
| 3910 // Stop if found the property. | 3908 // Stop if found the property. |
| 3911 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); | 3909 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); |
| 3912 __ j(equal, &in_dictionary); | 3910 __ j(equal, &in_dictionary); |
| 3913 | 3911 |
| 3914 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { | 3912 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) { |
| 3915 // If we hit a key that is not a unique name during negative | 3913 // If we hit a key that is not a unique name during negative |
| 3916 // lookup we have to bailout as this key might be equal to the | 3914 // lookup we have to bailout as this key might be equal to the |
| 3917 // key we are looking for. | 3915 // key we are looking for. |
| 3918 | 3916 |
| 3919 // Check if the entry name is not a unique name. | 3917 // Check if the entry name is not a unique name. |
| 3920 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 3918 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 3921 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), | 3919 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), |
| 3922 &maybe_in_dictionary); | 3920 &maybe_in_dictionary); |
| 3923 } | 3921 } |
| 3924 } | 3922 } |
| 3925 | 3923 |
| 3926 __ bind(&maybe_in_dictionary); | 3924 __ bind(&maybe_in_dictionary); |
| 3927 // If we are doing negative lookup then probing failure should be | 3925 // If we are doing negative lookup then probing failure should be |
| 3928 // treated as a lookup success. For positive lookup probing failure | 3926 // treated as a lookup success. For positive lookup probing failure |
| 3929 // should be treated as lookup failure. | 3927 // should be treated as lookup failure. |
| 3930 if (mode_ == POSITIVE_LOOKUP) { | 3928 if (mode() == POSITIVE_LOOKUP) { |
| 3931 __ mov(result_, Immediate(0)); | 3929 __ mov(result(), Immediate(0)); |
| 3932 __ Drop(1); | 3930 __ Drop(1); |
| 3933 __ ret(2 * kPointerSize); | 3931 __ ret(2 * kPointerSize); |
| 3934 } | 3932 } |
| 3935 | 3933 |
| 3936 __ bind(&in_dictionary); | 3934 __ bind(&in_dictionary); |
| 3937 __ mov(result_, Immediate(1)); | 3935 __ mov(result(), Immediate(1)); |
| 3938 __ Drop(1); | 3936 __ Drop(1); |
| 3939 __ ret(2 * kPointerSize); | 3937 __ ret(2 * kPointerSize); |
| 3940 | 3938 |
| 3941 __ bind(¬_in_dictionary); | 3939 __ bind(¬_in_dictionary); |
| 3942 __ mov(result_, Immediate(0)); | 3940 __ mov(result(), Immediate(0)); |
| 3943 __ Drop(1); | 3941 __ Drop(1); |
| 3944 __ ret(2 * kPointerSize); | 3942 __ ret(2 * kPointerSize); |
| 3945 } | 3943 } |
| 3946 | 3944 |
| 3947 | 3945 |
| 3948 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( | 3946 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( |
| 3949 Isolate* isolate) { | 3947 Isolate* isolate) { |
| 3950 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs); | 3948 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs); |
| 3951 stub.GetCode(); | 3949 stub.GetCode(); |
| 3952 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); | 3950 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); |
| 3953 stub2.GetCode(); | 3951 stub2.GetCode(); |
| 3954 } | 3952 } |
| 3955 | 3953 |
| 3956 | 3954 |
| 3957 // Takes the input in 3 registers: address_ value_ and object_. A pointer to | 3955 // Takes the input in 3 registers: address_ value_ and object_. A pointer to |
| 3958 // the value has just been written into the object, now this stub makes sure | 3956 // the value has just been written into the object, now this stub makes sure |
| 3959 // we keep the GC informed. The word in the object where the value has been | 3957 // we keep the GC informed. The word in the object where the value has been |
| 3960 // written is in the address register. | 3958 // written is in the address register. |
| 3961 void RecordWriteStub::Generate(MacroAssembler* masm) { | 3959 void RecordWriteStub::Generate(MacroAssembler* masm) { |
| 3962 Label skip_to_incremental_noncompacting; | 3960 Label skip_to_incremental_noncompacting; |
| 3963 Label skip_to_incremental_compacting; | 3961 Label skip_to_incremental_compacting; |
| 3964 | 3962 |
| 3965 // The first two instructions are generated with labels so as to get the | 3963 // The first two instructions are generated with labels so as to get the |
| 3966 // offset fixed up correctly by the bind(Label*) call. We patch it back and | 3964 // offset fixed up correctly by the bind(Label*) call. We patch it back and |
| 3967 // forth between a compare instructions (a nop in this position) and the | 3965 // forth between a compare instructions (a nop in this position) and the |
| 3968 // real branch when we start and stop incremental heap marking. | 3966 // real branch when we start and stop incremental heap marking. |
| 3969 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); | 3967 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); |
| 3970 __ jmp(&skip_to_incremental_compacting, Label::kFar); | 3968 __ jmp(&skip_to_incremental_compacting, Label::kFar); |
| 3971 | 3969 |
| 3972 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | 3970 if (remembered_set_action() == EMIT_REMEMBERED_SET) { |
| 3973 __ RememberedSetHelper(object_, | 3971 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), |
| 3974 address_, | |
| 3975 value_, | |
| 3976 save_fp_regs_mode_, | |
| 3977 MacroAssembler::kReturnAtEnd); | 3972 MacroAssembler::kReturnAtEnd); |
| 3978 } else { | 3973 } else { |
| 3979 __ ret(0); | 3974 __ ret(0); |
| 3980 } | 3975 } |
| 3981 | 3976 |
| 3982 __ bind(&skip_to_incremental_noncompacting); | 3977 __ bind(&skip_to_incremental_noncompacting); |
| 3983 GenerateIncremental(masm, INCREMENTAL); | 3978 GenerateIncremental(masm, INCREMENTAL); |
| 3984 | 3979 |
| 3985 __ bind(&skip_to_incremental_compacting); | 3980 __ bind(&skip_to_incremental_compacting); |
| 3986 GenerateIncremental(masm, INCREMENTAL_COMPACTION); | 3981 GenerateIncremental(masm, INCREMENTAL_COMPACTION); |
| 3987 | 3982 |
| 3988 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. | 3983 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. |
| 3989 // Will be checked in IncrementalMarking::ActivateGeneratedStub. | 3984 // Will be checked in IncrementalMarking::ActivateGeneratedStub. |
| 3990 masm->set_byte_at(0, kTwoByteNopInstruction); | 3985 masm->set_byte_at(0, kTwoByteNopInstruction); |
| 3991 masm->set_byte_at(2, kFiveByteNopInstruction); | 3986 masm->set_byte_at(2, kFiveByteNopInstruction); |
| 3992 } | 3987 } |
| 3993 | 3988 |
| 3994 | 3989 |
| 3995 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { | 3990 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { |
| 3996 regs_.Save(masm); | 3991 regs_.Save(masm); |
| 3997 | 3992 |
| 3998 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | 3993 if (remembered_set_action() == EMIT_REMEMBERED_SET) { |
| 3999 Label dont_need_remembered_set; | 3994 Label dont_need_remembered_set; |
| 4000 | 3995 |
| 4001 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); | 3996 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
| 4002 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. | 3997 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. |
| 4003 regs_.scratch0(), | 3998 regs_.scratch0(), |
| 4004 &dont_need_remembered_set); | 3999 &dont_need_remembered_set); |
| 4005 | 4000 |
| 4006 __ CheckPageFlag(regs_.object(), | 4001 __ CheckPageFlag(regs_.object(), |
| 4007 regs_.scratch0(), | 4002 regs_.scratch0(), |
| 4008 1 << MemoryChunk::SCAN_ON_SCAVENGE, | 4003 1 << MemoryChunk::SCAN_ON_SCAVENGE, |
| 4009 not_zero, | 4004 not_zero, |
| 4010 &dont_need_remembered_set); | 4005 &dont_need_remembered_set); |
| 4011 | 4006 |
| 4012 // First notify the incremental marker if necessary, then update the | 4007 // First notify the incremental marker if necessary, then update the |
| 4013 // remembered set. | 4008 // remembered set. |
| 4014 CheckNeedsToInformIncrementalMarker( | 4009 CheckNeedsToInformIncrementalMarker( |
| 4015 masm, | 4010 masm, |
| 4016 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, | 4011 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, |
| 4017 mode); | 4012 mode); |
| 4018 InformIncrementalMarker(masm); | 4013 InformIncrementalMarker(masm); |
| 4019 regs_.Restore(masm); | 4014 regs_.Restore(masm); |
| 4020 __ RememberedSetHelper(object_, | 4015 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), |
| 4021 address_, | |
| 4022 value_, | |
| 4023 save_fp_regs_mode_, | |
| 4024 MacroAssembler::kReturnAtEnd); | 4016 MacroAssembler::kReturnAtEnd); |
| 4025 | 4017 |
| 4026 __ bind(&dont_need_remembered_set); | 4018 __ bind(&dont_need_remembered_set); |
| 4027 } | 4019 } |
| 4028 | 4020 |
| 4029 CheckNeedsToInformIncrementalMarker( | 4021 CheckNeedsToInformIncrementalMarker( |
| 4030 masm, | 4022 masm, |
| 4031 kReturnOnNoNeedToInformIncrementalMarker, | 4023 kReturnOnNoNeedToInformIncrementalMarker, |
| 4032 mode); | 4024 mode); |
| 4033 InformIncrementalMarker(masm); | 4025 InformIncrementalMarker(masm); |
| 4034 regs_.Restore(masm); | 4026 regs_.Restore(masm); |
| 4035 __ ret(0); | 4027 __ ret(0); |
| 4036 } | 4028 } |
| 4037 | 4029 |
| 4038 | 4030 |
| 4039 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { | 4031 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { |
| 4040 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); | 4032 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode()); |
| 4041 int argument_count = 3; | 4033 int argument_count = 3; |
| 4042 __ PrepareCallCFunction(argument_count, regs_.scratch0()); | 4034 __ PrepareCallCFunction(argument_count, regs_.scratch0()); |
| 4043 __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); | 4035 __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); |
| 4044 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. | 4036 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. |
| 4045 __ mov(Operand(esp, 2 * kPointerSize), | 4037 __ mov(Operand(esp, 2 * kPointerSize), |
| 4046 Immediate(ExternalReference::isolate_address(isolate()))); | 4038 Immediate(ExternalReference::isolate_address(isolate()))); |
| 4047 | 4039 |
| 4048 AllowExternalCallThatCantCauseGC scope(masm); | 4040 AllowExternalCallThatCantCauseGC scope(masm); |
| 4049 __ CallCFunction( | 4041 __ CallCFunction( |
| 4050 ExternalReference::incremental_marking_record_write_function(isolate()), | 4042 ExternalReference::incremental_marking_record_write_function(isolate()), |
| 4051 argument_count); | 4043 argument_count); |
| 4052 | 4044 |
| 4053 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); | 4045 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode()); |
| 4054 } | 4046 } |
| 4055 | 4047 |
| 4056 | 4048 |
| 4057 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( | 4049 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
| 4058 MacroAssembler* masm, | 4050 MacroAssembler* masm, |
| 4059 OnNoNeedToInformIncrementalMarker on_no_need, | 4051 OnNoNeedToInformIncrementalMarker on_no_need, |
| 4060 Mode mode) { | 4052 Mode mode) { |
| 4061 Label object_is_black, need_incremental, need_incremental_pop_object; | 4053 Label object_is_black, need_incremental, need_incremental_pop_object; |
| 4062 | 4054 |
| 4063 __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask)); | 4055 __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask)); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 4074 // Let's look at the color of the object: If it is not black we don't have | 4066 // Let's look at the color of the object: If it is not black we don't have |
| 4075 // to inform the incremental marker. | 4067 // to inform the incremental marker. |
| 4076 __ JumpIfBlack(regs_.object(), | 4068 __ JumpIfBlack(regs_.object(), |
| 4077 regs_.scratch0(), | 4069 regs_.scratch0(), |
| 4078 regs_.scratch1(), | 4070 regs_.scratch1(), |
| 4079 &object_is_black, | 4071 &object_is_black, |
| 4080 Label::kNear); | 4072 Label::kNear); |
| 4081 | 4073 |
| 4082 regs_.Restore(masm); | 4074 regs_.Restore(masm); |
| 4083 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { | 4075 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
| 4084 __ RememberedSetHelper(object_, | 4076 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), |
| 4085 address_, | |
| 4086 value_, | |
| 4087 save_fp_regs_mode_, | |
| 4088 MacroAssembler::kReturnAtEnd); | 4077 MacroAssembler::kReturnAtEnd); |
| 4089 } else { | 4078 } else { |
| 4090 __ ret(0); | 4079 __ ret(0); |
| 4091 } | 4080 } |
| 4092 | 4081 |
| 4093 __ bind(&object_is_black); | 4082 __ bind(&object_is_black); |
| 4094 | 4083 |
| 4095 // Get the value from the slot. | 4084 // Get the value from the slot. |
| 4096 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); | 4085 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
| 4097 | 4086 |
| (...skipping 24 matching lines...) Expand all Loading... |
| 4122 __ push(regs_.object()); | 4111 __ push(regs_.object()); |
| 4123 __ EnsureNotWhite(regs_.scratch0(), // The value. | 4112 __ EnsureNotWhite(regs_.scratch0(), // The value. |
| 4124 regs_.scratch1(), // Scratch. | 4113 regs_.scratch1(), // Scratch. |
| 4125 regs_.object(), // Scratch. | 4114 regs_.object(), // Scratch. |
| 4126 &need_incremental_pop_object, | 4115 &need_incremental_pop_object, |
| 4127 Label::kNear); | 4116 Label::kNear); |
| 4128 __ pop(regs_.object()); | 4117 __ pop(regs_.object()); |
| 4129 | 4118 |
| 4130 regs_.Restore(masm); | 4119 regs_.Restore(masm); |
| 4131 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { | 4120 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
| 4132 __ RememberedSetHelper(object_, | 4121 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), |
| 4133 address_, | |
| 4134 value_, | |
| 4135 save_fp_regs_mode_, | |
| 4136 MacroAssembler::kReturnAtEnd); | 4122 MacroAssembler::kReturnAtEnd); |
| 4137 } else { | 4123 } else { |
| 4138 __ ret(0); | 4124 __ ret(0); |
| 4139 } | 4125 } |
| 4140 | 4126 |
| 4141 __ bind(&need_incremental_pop_object); | 4127 __ bind(&need_incremental_pop_object); |
| 4142 __ pop(regs_.object()); | 4128 __ pop(regs_.object()); |
| 4143 | 4129 |
| 4144 __ bind(&need_incremental); | 4130 __ bind(&need_incremental); |
| 4145 | 4131 |
| (...skipping 619 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4765 Operand(ebp, 7 * kPointerSize), | 4751 Operand(ebp, 7 * kPointerSize), |
| 4766 NULL); | 4752 NULL); |
| 4767 } | 4753 } |
| 4768 | 4754 |
| 4769 | 4755 |
| 4770 #undef __ | 4756 #undef __ |
| 4771 | 4757 |
| 4772 } } // namespace v8::internal | 4758 } } // namespace v8::internal |
| 4773 | 4759 |
| 4774 #endif // V8_TARGET_ARCH_IA32 | 4760 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |