OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 4017 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4028 BinaryOpWithAllocationSiteStub stub(isolate(), state()); | 4028 BinaryOpWithAllocationSiteStub stub(isolate(), state()); |
4029 __ TailCallStub(&stub); | 4029 __ TailCallStub(&stub); |
4030 } | 4030 } |
4031 | 4031 |
4032 | 4032 |
4033 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { | 4033 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { |
4034 // We need some extra registers for this stub, they have been allocated | 4034 // We need some extra registers for this stub, they have been allocated |
4035 // but we need to save them before using them. | 4035 // but we need to save them before using them. |
4036 regs_.Save(masm); | 4036 regs_.Save(masm); |
4037 | 4037 |
4038 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | 4038 if (remembered_set_action() == EMIT_REMEMBERED_SET) { |
4039 Label dont_need_remembered_set; | 4039 Label dont_need_remembered_set; |
4040 | 4040 |
4041 Register value = regs_.scratch0(); | 4041 Register val = regs_.scratch0(); |
4042 __ Ldr(value, MemOperand(regs_.address())); | 4042 __ Ldr(val, MemOperand(regs_.address())); |
4043 __ JumpIfNotInNewSpace(value, &dont_need_remembered_set); | 4043 __ JumpIfNotInNewSpace(val, &dont_need_remembered_set); |
4044 | 4044 |
4045 __ CheckPageFlagSet(regs_.object(), | 4045 __ CheckPageFlagSet(regs_.object(), val, 1 << MemoryChunk::SCAN_ON_SCAVENGE, |
4046 value, | |
4047 1 << MemoryChunk::SCAN_ON_SCAVENGE, | |
4048 &dont_need_remembered_set); | 4046 &dont_need_remembered_set); |
4049 | 4047 |
4050 // First notify the incremental marker if necessary, then update the | 4048 // First notify the incremental marker if necessary, then update the |
4051 // remembered set. | 4049 // remembered set. |
4052 CheckNeedsToInformIncrementalMarker( | 4050 CheckNeedsToInformIncrementalMarker( |
4053 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); | 4051 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); |
4054 InformIncrementalMarker(masm); | 4052 InformIncrementalMarker(masm); |
4055 regs_.Restore(masm); // Restore the extra scratch registers we used. | 4053 regs_.Restore(masm); // Restore the extra scratch registers we used. |
4056 | 4054 |
4057 __ RememberedSetHelper(object_, | 4055 __ RememberedSetHelper(object(), address(), |
4058 address_, | 4056 value(), // scratch1 |
4059 value_, // scratch1 | 4057 save_fp_regs_mode(), MacroAssembler::kReturnAtEnd); |
4060 save_fp_regs_mode_, | |
4061 MacroAssembler::kReturnAtEnd); | |
4062 | 4058 |
4063 __ Bind(&dont_need_remembered_set); | 4059 __ Bind(&dont_need_remembered_set); |
4064 } | 4060 } |
4065 | 4061 |
4066 CheckNeedsToInformIncrementalMarker( | 4062 CheckNeedsToInformIncrementalMarker( |
4067 masm, kReturnOnNoNeedToInformIncrementalMarker, mode); | 4063 masm, kReturnOnNoNeedToInformIncrementalMarker, mode); |
4068 InformIncrementalMarker(masm); | 4064 InformIncrementalMarker(masm); |
4069 regs_.Restore(masm); // Restore the extra scratch registers we used. | 4065 regs_.Restore(masm); // Restore the extra scratch registers we used. |
4070 __ Ret(); | 4066 __ Ret(); |
4071 } | 4067 } |
4072 | 4068 |
4073 | 4069 |
4074 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { | 4070 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { |
4075 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); | 4071 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode()); |
4076 Register address = | 4072 Register address = |
4077 x0.Is(regs_.address()) ? regs_.scratch0() : regs_.address(); | 4073 x0.Is(regs_.address()) ? regs_.scratch0() : regs_.address(); |
4078 DCHECK(!address.Is(regs_.object())); | 4074 DCHECK(!address.Is(regs_.object())); |
4079 DCHECK(!address.Is(x0)); | 4075 DCHECK(!address.Is(x0)); |
4080 __ Mov(address, regs_.address()); | 4076 __ Mov(address, regs_.address()); |
4081 __ Mov(x0, regs_.object()); | 4077 __ Mov(x0, regs_.object()); |
4082 __ Mov(x1, address); | 4078 __ Mov(x1, address); |
4083 __ Mov(x2, ExternalReference::isolate_address(isolate())); | 4079 __ Mov(x2, ExternalReference::isolate_address(isolate())); |
4084 | 4080 |
4085 AllowExternalCallThatCantCauseGC scope(masm); | 4081 AllowExternalCallThatCantCauseGC scope(masm); |
4086 ExternalReference function = | 4082 ExternalReference function = |
4087 ExternalReference::incremental_marking_record_write_function( | 4083 ExternalReference::incremental_marking_record_write_function( |
4088 isolate()); | 4084 isolate()); |
4089 __ CallCFunction(function, 3, 0); | 4085 __ CallCFunction(function, 3, 0); |
4090 | 4086 |
4091 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); | 4087 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode()); |
4092 } | 4088 } |
4093 | 4089 |
4094 | 4090 |
4095 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( | 4091 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
4096 MacroAssembler* masm, | 4092 MacroAssembler* masm, |
4097 OnNoNeedToInformIncrementalMarker on_no_need, | 4093 OnNoNeedToInformIncrementalMarker on_no_need, |
4098 Mode mode) { | 4094 Mode mode) { |
4099 Label on_black; | 4095 Label on_black; |
4100 Label need_incremental; | 4096 Label need_incremental; |
4101 Label need_incremental_pop_scratch; | 4097 Label need_incremental_pop_scratch; |
4102 | 4098 |
4103 Register mem_chunk = regs_.scratch0(); | 4099 Register mem_chunk = regs_.scratch0(); |
4104 Register counter = regs_.scratch1(); | 4100 Register counter = regs_.scratch1(); |
4105 __ Bic(mem_chunk, regs_.object(), Page::kPageAlignmentMask); | 4101 __ Bic(mem_chunk, regs_.object(), Page::kPageAlignmentMask); |
4106 __ Ldr(counter, | 4102 __ Ldr(counter, |
4107 MemOperand(mem_chunk, MemoryChunk::kWriteBarrierCounterOffset)); | 4103 MemOperand(mem_chunk, MemoryChunk::kWriteBarrierCounterOffset)); |
4108 __ Subs(counter, counter, 1); | 4104 __ Subs(counter, counter, 1); |
4109 __ Str(counter, | 4105 __ Str(counter, |
4110 MemOperand(mem_chunk, MemoryChunk::kWriteBarrierCounterOffset)); | 4106 MemOperand(mem_chunk, MemoryChunk::kWriteBarrierCounterOffset)); |
4111 __ B(mi, &need_incremental); | 4107 __ B(mi, &need_incremental); |
4112 | 4108 |
4113 // If the object is not black we don't have to inform the incremental marker. | 4109 // If the object is not black we don't have to inform the incremental marker. |
4114 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); | 4110 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); |
4115 | 4111 |
4116 regs_.Restore(masm); // Restore the extra scratch registers we used. | 4112 regs_.Restore(masm); // Restore the extra scratch registers we used. |
4117 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { | 4113 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
4118 __ RememberedSetHelper(object_, | 4114 __ RememberedSetHelper(object(), address(), |
4119 address_, | 4115 value(), // scratch1 |
4120 value_, // scratch1 | 4116 save_fp_regs_mode(), MacroAssembler::kReturnAtEnd); |
4121 save_fp_regs_mode_, | |
4122 MacroAssembler::kReturnAtEnd); | |
4123 } else { | 4117 } else { |
4124 __ Ret(); | 4118 __ Ret(); |
4125 } | 4119 } |
4126 | 4120 |
4127 __ Bind(&on_black); | 4121 __ Bind(&on_black); |
4128 // Get the value from the slot. | 4122 // Get the value from the slot. |
4129 Register value = regs_.scratch0(); | 4123 Register val = regs_.scratch0(); |
4130 __ Ldr(value, MemOperand(regs_.address())); | 4124 __ Ldr(val, MemOperand(regs_.address())); |
4131 | 4125 |
4132 if (mode == INCREMENTAL_COMPACTION) { | 4126 if (mode == INCREMENTAL_COMPACTION) { |
4133 Label ensure_not_white; | 4127 Label ensure_not_white; |
4134 | 4128 |
4135 __ CheckPageFlagClear(value, | 4129 __ CheckPageFlagClear(val, regs_.scratch1(), |
4136 regs_.scratch1(), | |
4137 MemoryChunk::kEvacuationCandidateMask, | 4130 MemoryChunk::kEvacuationCandidateMask, |
4138 &ensure_not_white); | 4131 &ensure_not_white); |
4139 | 4132 |
4140 __ CheckPageFlagClear(regs_.object(), | 4133 __ CheckPageFlagClear(regs_.object(), |
4141 regs_.scratch1(), | 4134 regs_.scratch1(), |
4142 MemoryChunk::kSkipEvacuationSlotsRecordingMask, | 4135 MemoryChunk::kSkipEvacuationSlotsRecordingMask, |
4143 &need_incremental); | 4136 &need_incremental); |
4144 | 4137 |
4145 __ Bind(&ensure_not_white); | 4138 __ Bind(&ensure_not_white); |
4146 } | 4139 } |
4147 | 4140 |
4148 // We need extra registers for this, so we push the object and the address | 4141 // We need extra registers for this, so we push the object and the address |
4149 // register temporarily. | 4142 // register temporarily. |
4150 __ Push(regs_.address(), regs_.object()); | 4143 __ Push(regs_.address(), regs_.object()); |
4151 __ EnsureNotWhite(value, | 4144 __ EnsureNotWhite(val, |
4152 regs_.scratch1(), // Scratch. | 4145 regs_.scratch1(), // Scratch. |
4153 regs_.object(), // Scratch. | 4146 regs_.object(), // Scratch. |
4154 regs_.address(), // Scratch. | 4147 regs_.address(), // Scratch. |
4155 regs_.scratch2(), // Scratch. | 4148 regs_.scratch2(), // Scratch. |
4156 &need_incremental_pop_scratch); | 4149 &need_incremental_pop_scratch); |
4157 __ Pop(regs_.object(), regs_.address()); | 4150 __ Pop(regs_.object(), regs_.address()); |
4158 | 4151 |
4159 regs_.Restore(masm); // Restore the extra scratch registers we used. | 4152 regs_.Restore(masm); // Restore the extra scratch registers we used. |
4160 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { | 4153 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
4161 __ RememberedSetHelper(object_, | 4154 __ RememberedSetHelper(object(), address(), |
4162 address_, | 4155 value(), // scratch1 |
4163 value_, // scratch1 | 4156 save_fp_regs_mode(), MacroAssembler::kReturnAtEnd); |
4164 save_fp_regs_mode_, | |
4165 MacroAssembler::kReturnAtEnd); | |
4166 } else { | 4157 } else { |
4167 __ Ret(); | 4158 __ Ret(); |
4168 } | 4159 } |
4169 | 4160 |
4170 __ Bind(&need_incremental_pop_scratch); | 4161 __ Bind(&need_incremental_pop_scratch); |
4171 __ Pop(regs_.object(), regs_.address()); | 4162 __ Pop(regs_.object(), regs_.address()); |
4172 | 4163 |
4173 __ Bind(&need_incremental); | 4164 __ Bind(&need_incremental); |
4174 // Fall through when we need to inform the incremental marker. | 4165 // Fall through when we need to inform the incremental marker. |
4175 } | 4166 } |
4176 | 4167 |
4177 | 4168 |
4178 void RecordWriteStub::Generate(MacroAssembler* masm) { | 4169 void RecordWriteStub::Generate(MacroAssembler* masm) { |
4179 Label skip_to_incremental_noncompacting; | 4170 Label skip_to_incremental_noncompacting; |
4180 Label skip_to_incremental_compacting; | 4171 Label skip_to_incremental_compacting; |
4181 | 4172 |
4182 // We patch these two first instructions back and forth between a nop and | 4173 // We patch these two first instructions back and forth between a nop and |
4183 // real branch when we start and stop incremental heap marking. | 4174 // real branch when we start and stop incremental heap marking. |
4184 // Initially the stub is expected to be in STORE_BUFFER_ONLY mode, so 2 nops | 4175 // Initially the stub is expected to be in STORE_BUFFER_ONLY mode, so 2 nops |
4185 // are generated. | 4176 // are generated. |
4186 // See RecordWriteStub::Patch for details. | 4177 // See RecordWriteStub::Patch for details. |
4187 { | 4178 { |
4188 InstructionAccurateScope scope(masm, 2); | 4179 InstructionAccurateScope scope(masm, 2); |
4189 __ adr(xzr, &skip_to_incremental_noncompacting); | 4180 __ adr(xzr, &skip_to_incremental_noncompacting); |
4190 __ adr(xzr, &skip_to_incremental_compacting); | 4181 __ adr(xzr, &skip_to_incremental_compacting); |
4191 } | 4182 } |
4192 | 4183 |
4193 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | 4184 if (remembered_set_action() == EMIT_REMEMBERED_SET) { |
4194 __ RememberedSetHelper(object_, | 4185 __ RememberedSetHelper(object(), address(), |
4195 address_, | 4186 value(), // scratch1 |
4196 value_, // scratch1 | 4187 save_fp_regs_mode(), MacroAssembler::kReturnAtEnd); |
4197 save_fp_regs_mode_, | |
4198 MacroAssembler::kReturnAtEnd); | |
4199 } | 4188 } |
4200 __ Ret(); | 4189 __ Ret(); |
4201 | 4190 |
4202 __ Bind(&skip_to_incremental_noncompacting); | 4191 __ Bind(&skip_to_incremental_noncompacting); |
4203 GenerateIncremental(masm, INCREMENTAL); | 4192 GenerateIncremental(masm, INCREMENTAL); |
4204 | 4193 |
4205 __ Bind(&skip_to_incremental_compacting); | 4194 __ Bind(&skip_to_incremental_compacting); |
4206 GenerateIncremental(masm, INCREMENTAL_COMPACTION); | 4195 GenerateIncremental(masm, INCREMENTAL_COMPACTION); |
4207 } | 4196 } |
4208 | 4197 |
(...skipping 414 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4623 __ Ldr(entry_key, FieldMemOperand(index, kElementsStartOffset)); | 4612 __ Ldr(entry_key, FieldMemOperand(index, kElementsStartOffset)); |
4624 | 4613 |
4625 // Having undefined at this place means the name is not contained. | 4614 // Having undefined at this place means the name is not contained. |
4626 __ Cmp(entry_key, undefined); | 4615 __ Cmp(entry_key, undefined); |
4627 __ B(eq, ¬_in_dictionary); | 4616 __ B(eq, ¬_in_dictionary); |
4628 | 4617 |
4629 // Stop if found the property. | 4618 // Stop if found the property. |
4630 __ Cmp(entry_key, key); | 4619 __ Cmp(entry_key, key); |
4631 __ B(eq, &in_dictionary); | 4620 __ B(eq, &in_dictionary); |
4632 | 4621 |
4633 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { | 4622 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) { |
4634 // Check if the entry name is not a unique name. | 4623 // Check if the entry name is not a unique name. |
4635 __ Ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); | 4624 __ Ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); |
4636 __ Ldrb(entry_key, FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); | 4625 __ Ldrb(entry_key, FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); |
4637 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary); | 4626 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary); |
4638 } | 4627 } |
4639 } | 4628 } |
4640 | 4629 |
4641 __ Bind(&maybe_in_dictionary); | 4630 __ Bind(&maybe_in_dictionary); |
4642 // If we are doing negative lookup then probing failure should be | 4631 // If we are doing negative lookup then probing failure should be |
4643 // treated as a lookup success. For positive lookup, probing failure | 4632 // treated as a lookup success. For positive lookup, probing failure |
4644 // should be treated as lookup failure. | 4633 // should be treated as lookup failure. |
4645 if (mode_ == POSITIVE_LOOKUP) { | 4634 if (mode() == POSITIVE_LOOKUP) { |
4646 __ Mov(result, 0); | 4635 __ Mov(result, 0); |
4647 __ Ret(); | 4636 __ Ret(); |
4648 } | 4637 } |
4649 | 4638 |
4650 __ Bind(&in_dictionary); | 4639 __ Bind(&in_dictionary); |
4651 __ Mov(result, 1); | 4640 __ Mov(result, 1); |
4652 __ Ret(); | 4641 __ Ret(); |
4653 | 4642 |
4654 __ Bind(¬_in_dictionary); | 4643 __ Bind(¬_in_dictionary); |
4655 __ Mov(result, 0); | 4644 __ Mov(result, 0); |
(...skipping 474 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5130 MemOperand(fp, 6 * kPointerSize), | 5119 MemOperand(fp, 6 * kPointerSize), |
5131 NULL); | 5120 NULL); |
5132 } | 5121 } |
5133 | 5122 |
5134 | 5123 |
5135 #undef __ | 5124 #undef __ |
5136 | 5125 |
5137 } } // namespace v8::internal | 5126 } } // namespace v8::internal |
5138 | 5127 |
5139 #endif // V8_TARGET_ARCH_ARM64 | 5128 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |