Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(532)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 535733004: Minor-key-ify remaining code stubs. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM 7 #if V8_TARGET_ARCH_ARM
8 8
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
139 __ push(descriptor->GetEnvironmentParameterRegister(i)); 139 __ push(descriptor->GetEnvironmentParameterRegister(i));
140 } 140 }
141 ExternalReference miss = descriptor->miss_handler(); 141 ExternalReference miss = descriptor->miss_handler();
142 __ CallExternalReference(miss, param_count); 142 __ CallExternalReference(miss, param_count);
143 } 143 }
144 144
145 __ Ret(); 145 __ Ret();
146 } 146 }
147 147
148 148
149 // Takes a Smi and converts to an IEEE 64 bit floating point value in two
150 // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
151 // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
152 // scratch register. Destroys the source register. No GC occurs during this
153 // stub so you don't have to set up the frame.
154 class ConvertToDoubleStub : public PlatformCodeStub {
155 public:
156 ConvertToDoubleStub(Isolate* isolate,
157 Register result_reg_1,
158 Register result_reg_2,
159 Register source_reg,
160 Register scratch_reg)
161 : PlatformCodeStub(isolate),
162 result1_(result_reg_1),
163 result2_(result_reg_2),
164 source_(source_reg),
165 zeros_(scratch_reg) { }
166
167 private:
168 Register result1_;
169 Register result2_;
170 Register source_;
171 Register zeros_;
172
173 // Minor key encoding in 16 bits.
174 class ModeBits: public BitField<OverwriteMode, 0, 2> {};
175 class OpBits: public BitField<Token::Value, 2, 14> {};
176
177 Major MajorKey() const { return ConvertToDouble; }
178 uint32_t MinorKey() const {
179 // Encode the parameters in a unique 16 bit value.
180 return result1_.code() +
181 (result2_.code() << 4) +
182 (source_.code() << 8) +
183 (zeros_.code() << 12);
184 }
185
186 void Generate(MacroAssembler* masm);
187 };
188
189
190 void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
191 Register exponent = result1_;
192 Register mantissa = result2_;
193
194 Label not_special;
195 __ SmiUntag(source_);
196 // Move sign bit from source to destination. This works because the sign bit
197 // in the exponent word of the double has the same position and polarity as
198 // the 2's complement sign bit in a Smi.
199 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
200 __ and_(exponent, source_, Operand(HeapNumber::kSignMask), SetCC);
201 // Subtract from 0 if source was negative.
202 __ rsb(source_, source_, Operand::Zero(), LeaveCC, ne);
203
204 // We have -1, 0 or 1, which we treat specially. Register source_ contains
205 // absolute value: it is either equal to 1 (special case of -1 and 1),
206 // greater than 1 (not a special case) or less than 1 (special case of 0).
207 __ cmp(source_, Operand(1));
208 __ b(gt, &not_special);
209
210 // For 1 or -1 we need to or in the 0 exponent (biased to 1023).
211 const uint32_t exponent_word_for_1 =
212 HeapNumber::kExponentBias << HeapNumber::kExponentShift;
213 __ orr(exponent, exponent, Operand(exponent_word_for_1), LeaveCC, eq);
214 // 1, 0 and -1 all have 0 for the second word.
215 __ mov(mantissa, Operand::Zero());
216 __ Ret();
217
218 __ bind(&not_special);
219 __ clz(zeros_, source_);
220 // Compute exponent and or it into the exponent register.
221 // We use mantissa as a scratch register here. Use a fudge factor to
222 // divide the constant 31 + HeapNumber::kExponentBias, 0x41d, into two parts
223 // that fit in the ARM's constant field.
224 int fudge = 0x400;
225 __ rsb(mantissa, zeros_, Operand(31 + HeapNumber::kExponentBias - fudge));
226 __ add(mantissa, mantissa, Operand(fudge));
227 __ orr(exponent,
228 exponent,
229 Operand(mantissa, LSL, HeapNumber::kExponentShift));
230 // Shift up the source chopping the top bit off.
231 __ add(zeros_, zeros_, Operand(1));
232 // This wouldn't work for 1.0 or -1.0 as the shift would be 32 which means 0.
233 __ mov(source_, Operand(source_, LSL, zeros_));
234 // Compute lower part of fraction (last 12 bits).
235 __ mov(mantissa, Operand(source_, LSL, HeapNumber::kMantissaBitsInTopWord));
236 // And the top (top 20 bits).
237 __ orr(exponent,
238 exponent,
239 Operand(source_, LSR, 32 - HeapNumber::kMantissaBitsInTopWord));
240 __ Ret();
241 }
242
243
244 void DoubleToIStub::Generate(MacroAssembler* masm) { 149 void DoubleToIStub::Generate(MacroAssembler* masm) {
245 Label out_of_range, only_low, negate, done; 150 Label out_of_range, only_low, negate, done;
246 Register input_reg = source(); 151 Register input_reg = source();
247 Register result_reg = destination(); 152 Register result_reg = destination();
248 DCHECK(is_truncating()); 153 DCHECK(is_truncating());
249 154
250 int double_offset = offset(); 155 int double_offset = offset();
251 // Account for saved regs if input is sp. 156 // Account for saved regs if input is sp.
252 if (input_reg.is(sp)) double_offset += 3 * kPointerSize; 157 if (input_reg.is(sp)) double_offset += 3 * kPointerSize;
253 158
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
355 } 260 }
356 261
357 262
358 // See comment for class. 263 // See comment for class.
359 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { 264 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
360 Label max_negative_int; 265 Label max_negative_int;
361 // the_int_ has the answer which is a signed int32 but not a Smi. 266 // the_int_ has the answer which is a signed int32 but not a Smi.
362 // We test for the special value that has a different exponent. This test 267 // We test for the special value that has a different exponent. This test
363 // has the neat side effect of setting the flags according to the sign. 268 // has the neat side effect of setting the flags according to the sign.
364 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); 269 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
365 __ cmp(the_int_, Operand(0x80000000u)); 270 __ cmp(the_int(), Operand(0x80000000u));
366 __ b(eq, &max_negative_int); 271 __ b(eq, &max_negative_int);
367 // Set up the correct exponent in scratch_. All non-Smi int32s have the same. 272 // Set up the correct exponent in scratch_. All non-Smi int32s have the same.
368 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). 273 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased).
369 uint32_t non_smi_exponent = 274 uint32_t non_smi_exponent =
370 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift; 275 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
371 __ mov(scratch_, Operand(non_smi_exponent)); 276 __ mov(scratch(), Operand(non_smi_exponent));
372 // Set the sign bit in scratch_ if the value was negative. 277 // Set the sign bit in scratch_ if the value was negative.
373 __ orr(scratch_, scratch_, Operand(HeapNumber::kSignMask), LeaveCC, cs); 278 __ orr(scratch(), scratch(), Operand(HeapNumber::kSignMask), LeaveCC, cs);
374 // Subtract from 0 if the value was negative. 279 // Subtract from 0 if the value was negative.
375 __ rsb(the_int_, the_int_, Operand::Zero(), LeaveCC, cs); 280 __ rsb(the_int(), the_int(), Operand::Zero(), LeaveCC, cs);
376 // We should be masking the implict first digit of the mantissa away here, 281 // We should be masking the implict first digit of the mantissa away here,
377 // but it just ends up combining harmlessly with the last digit of the 282 // but it just ends up combining harmlessly with the last digit of the
378 // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get 283 // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get
379 // the most significant 1 to hit the last bit of the 12 bit sign and exponent. 284 // the most significant 1 to hit the last bit of the 12 bit sign and exponent.
380 DCHECK(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0); 285 DCHECK(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
381 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2; 286 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
382 __ orr(scratch_, scratch_, Operand(the_int_, LSR, shift_distance)); 287 __ orr(scratch(), scratch(), Operand(the_int(), LSR, shift_distance));
383 __ str(scratch_, FieldMemOperand(the_heap_number_, 288 __ str(scratch(),
384 HeapNumber::kExponentOffset)); 289 FieldMemOperand(the_heap_number(), HeapNumber::kExponentOffset));
385 __ mov(scratch_, Operand(the_int_, LSL, 32 - shift_distance)); 290 __ mov(scratch(), Operand(the_int(), LSL, 32 - shift_distance));
386 __ str(scratch_, FieldMemOperand(the_heap_number_, 291 __ str(scratch(),
387 HeapNumber::kMantissaOffset)); 292 FieldMemOperand(the_heap_number(), HeapNumber::kMantissaOffset));
388 __ Ret(); 293 __ Ret();
389 294
390 __ bind(&max_negative_int); 295 __ bind(&max_negative_int);
391 // The max negative int32 is stored as a positive number in the mantissa of 296 // The max negative int32 is stored as a positive number in the mantissa of
392 // a double because it uses a sign bit instead of using two's complement. 297 // a double because it uses a sign bit instead of using two's complement.
393 // The actual mantissa bits stored are all 0 because the implicit most 298 // The actual mantissa bits stored are all 0 because the implicit most
394 // significant 1 bit is not stored. 299 // significant 1 bit is not stored.
395 non_smi_exponent += 1 << HeapNumber::kExponentShift; 300 non_smi_exponent += 1 << HeapNumber::kExponentShift;
396 __ mov(ip, Operand(HeapNumber::kSignMask | non_smi_exponent)); 301 __ mov(ip, Operand(HeapNumber::kSignMask | non_smi_exponent));
397 __ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kExponentOffset)); 302 __ str(ip, FieldMemOperand(the_heap_number(), HeapNumber::kExponentOffset));
398 __ mov(ip, Operand::Zero()); 303 __ mov(ip, Operand::Zero());
399 __ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kMantissaOffset)); 304 __ str(ip, FieldMemOperand(the_heap_number(), HeapNumber::kMantissaOffset));
400 __ Ret(); 305 __ Ret();
401 } 306 }
402 307
403 308
404 // Handle the case where the lhs and rhs are the same object. 309 // Handle the case where the lhs and rhs are the same object.
405 // Equality is almost reflexive (everything but NaN), so this is a test 310 // Equality is almost reflexive (everything but NaN), so this is a test
406 // for "identity and not NaN". 311 // for "identity and not NaN".
407 static void EmitIdenticalObjectComparison(MacroAssembler* masm, 312 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
408 Label* slow, 313 Label* slow,
409 Condition cond) { 314 Condition cond) {
(...skipping 3591 matching lines...) Expand 10 before | Expand all | Expand 10 after
4001 __ ldr(entry_key, FieldMemOperand(index, kElementsStartOffset)); 3906 __ ldr(entry_key, FieldMemOperand(index, kElementsStartOffset));
4002 3907
4003 // Having undefined at this place means the name is not contained. 3908 // Having undefined at this place means the name is not contained.
4004 __ cmp(entry_key, Operand(undefined)); 3909 __ cmp(entry_key, Operand(undefined));
4005 __ b(eq, &not_in_dictionary); 3910 __ b(eq, &not_in_dictionary);
4006 3911
4007 // Stop if found the property. 3912 // Stop if found the property.
4008 __ cmp(entry_key, Operand(key)); 3913 __ cmp(entry_key, Operand(key));
4009 __ b(eq, &in_dictionary); 3914 __ b(eq, &in_dictionary);
4010 3915
4011 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { 3916 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
4012 // Check if the entry name is not a unique name. 3917 // Check if the entry name is not a unique name.
4013 __ ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); 3918 __ ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
4014 __ ldrb(entry_key, 3919 __ ldrb(entry_key,
4015 FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); 3920 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
4016 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary); 3921 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
4017 } 3922 }
4018 } 3923 }
4019 3924
4020 __ bind(&maybe_in_dictionary); 3925 __ bind(&maybe_in_dictionary);
4021 // If we are doing negative lookup then probing failure should be 3926 // If we are doing negative lookup then probing failure should be
4022 // treated as a lookup success. For positive lookup probing failure 3927 // treated as a lookup success. For positive lookup probing failure
4023 // should be treated as lookup failure. 3928 // should be treated as lookup failure.
4024 if (mode_ == POSITIVE_LOOKUP) { 3929 if (mode() == POSITIVE_LOOKUP) {
4025 __ mov(result, Operand::Zero()); 3930 __ mov(result, Operand::Zero());
4026 __ Ret(); 3931 __ Ret();
4027 } 3932 }
4028 3933
4029 __ bind(&in_dictionary); 3934 __ bind(&in_dictionary);
4030 __ mov(result, Operand(1)); 3935 __ mov(result, Operand(1));
4031 __ Ret(); 3936 __ Ret();
4032 3937
4033 __ bind(&not_in_dictionary); 3938 __ bind(&not_in_dictionary);
4034 __ mov(result, Operand::Zero()); 3939 __ mov(result, Operand::Zero());
(...skipping 25 matching lines...) Expand all
4060 // real branch when we start and stop incremental heap marking. 3965 // real branch when we start and stop incremental heap marking.
4061 // See RecordWriteStub::Patch for details. 3966 // See RecordWriteStub::Patch for details.
4062 { 3967 {
4063 // Block literal pool emission, as the position of these two instructions 3968 // Block literal pool emission, as the position of these two instructions
4064 // is assumed by the patching code. 3969 // is assumed by the patching code.
4065 Assembler::BlockConstPoolScope block_const_pool(masm); 3970 Assembler::BlockConstPoolScope block_const_pool(masm);
4066 __ b(&skip_to_incremental_noncompacting); 3971 __ b(&skip_to_incremental_noncompacting);
4067 __ b(&skip_to_incremental_compacting); 3972 __ b(&skip_to_incremental_compacting);
4068 } 3973 }
4069 3974
4070 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { 3975 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
4071 __ RememberedSetHelper(object_, 3976 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4072 address_,
4073 value_,
4074 save_fp_regs_mode_,
4075 MacroAssembler::kReturnAtEnd); 3977 MacroAssembler::kReturnAtEnd);
4076 } 3978 }
4077 __ Ret(); 3979 __ Ret();
4078 3980
4079 __ bind(&skip_to_incremental_noncompacting); 3981 __ bind(&skip_to_incremental_noncompacting);
4080 GenerateIncremental(masm, INCREMENTAL); 3982 GenerateIncremental(masm, INCREMENTAL);
4081 3983
4082 __ bind(&skip_to_incremental_compacting); 3984 __ bind(&skip_to_incremental_compacting);
4083 GenerateIncremental(masm, INCREMENTAL_COMPACTION); 3985 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
4084 3986
4085 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. 3987 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
4086 // Will be checked in IncrementalMarking::ActivateGeneratedStub. 3988 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
4087 DCHECK(Assembler::GetBranchOffset(masm->instr_at(0)) < (1 << 12)); 3989 DCHECK(Assembler::GetBranchOffset(masm->instr_at(0)) < (1 << 12));
4088 DCHECK(Assembler::GetBranchOffset(masm->instr_at(4)) < (1 << 12)); 3990 DCHECK(Assembler::GetBranchOffset(masm->instr_at(4)) < (1 << 12));
4089 PatchBranchIntoNop(masm, 0); 3991 PatchBranchIntoNop(masm, 0);
4090 PatchBranchIntoNop(masm, Assembler::kInstrSize); 3992 PatchBranchIntoNop(masm, Assembler::kInstrSize);
4091 } 3993 }
4092 3994
4093 3995
4094 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { 3996 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
4095 regs_.Save(masm); 3997 regs_.Save(masm);
4096 3998
4097 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { 3999 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
4098 Label dont_need_remembered_set; 4000 Label dont_need_remembered_set;
4099 4001
4100 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0)); 4002 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0));
4101 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. 4003 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
4102 regs_.scratch0(), 4004 regs_.scratch0(),
4103 &dont_need_remembered_set); 4005 &dont_need_remembered_set);
4104 4006
4105 __ CheckPageFlag(regs_.object(), 4007 __ CheckPageFlag(regs_.object(),
4106 regs_.scratch0(), 4008 regs_.scratch0(),
4107 1 << MemoryChunk::SCAN_ON_SCAVENGE, 4009 1 << MemoryChunk::SCAN_ON_SCAVENGE,
4108 ne, 4010 ne,
4109 &dont_need_remembered_set); 4011 &dont_need_remembered_set);
4110 4012
4111 // First notify the incremental marker if necessary, then update the 4013 // First notify the incremental marker if necessary, then update the
4112 // remembered set. 4014 // remembered set.
4113 CheckNeedsToInformIncrementalMarker( 4015 CheckNeedsToInformIncrementalMarker(
4114 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); 4016 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
4115 InformIncrementalMarker(masm); 4017 InformIncrementalMarker(masm);
4116 regs_.Restore(masm); 4018 regs_.Restore(masm);
4117 __ RememberedSetHelper(object_, 4019 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4118 address_,
4119 value_,
4120 save_fp_regs_mode_,
4121 MacroAssembler::kReturnAtEnd); 4020 MacroAssembler::kReturnAtEnd);
4122 4021
4123 __ bind(&dont_need_remembered_set); 4022 __ bind(&dont_need_remembered_set);
4124 } 4023 }
4125 4024
4126 CheckNeedsToInformIncrementalMarker( 4025 CheckNeedsToInformIncrementalMarker(
4127 masm, kReturnOnNoNeedToInformIncrementalMarker, mode); 4026 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
4128 InformIncrementalMarker(masm); 4027 InformIncrementalMarker(masm);
4129 regs_.Restore(masm); 4028 regs_.Restore(masm);
4130 __ Ret(); 4029 __ Ret();
4131 } 4030 }
4132 4031
4133 4032
4134 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { 4033 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4135 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); 4034 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
4136 int argument_count = 3; 4035 int argument_count = 3;
4137 __ PrepareCallCFunction(argument_count, regs_.scratch0()); 4036 __ PrepareCallCFunction(argument_count, regs_.scratch0());
4138 Register address = 4037 Register address =
4139 r0.is(regs_.address()) ? regs_.scratch0() : regs_.address(); 4038 r0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
4140 DCHECK(!address.is(regs_.object())); 4039 DCHECK(!address.is(regs_.object()));
4141 DCHECK(!address.is(r0)); 4040 DCHECK(!address.is(r0));
4142 __ Move(address, regs_.address()); 4041 __ Move(address, regs_.address());
4143 __ Move(r0, regs_.object()); 4042 __ Move(r0, regs_.object());
4144 __ Move(r1, address); 4043 __ Move(r1, address);
4145 __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); 4044 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
4146 4045
4147 AllowExternalCallThatCantCauseGC scope(masm); 4046 AllowExternalCallThatCantCauseGC scope(masm);
4148 __ CallCFunction( 4047 __ CallCFunction(
4149 ExternalReference::incremental_marking_record_write_function(isolate()), 4048 ExternalReference::incremental_marking_record_write_function(isolate()),
4150 argument_count); 4049 argument_count);
4151 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); 4050 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
4152 } 4051 }
4153 4052
4154 4053
4155 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 4054 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4156 MacroAssembler* masm, 4055 MacroAssembler* masm,
4157 OnNoNeedToInformIncrementalMarker on_no_need, 4056 OnNoNeedToInformIncrementalMarker on_no_need,
4158 Mode mode) { 4057 Mode mode) {
4159 Label on_black; 4058 Label on_black;
4160 Label need_incremental; 4059 Label need_incremental;
4161 Label need_incremental_pop_scratch; 4060 Label need_incremental_pop_scratch;
4162 4061
4163 __ and_(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask)); 4062 __ and_(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
4164 __ ldr(regs_.scratch1(), 4063 __ ldr(regs_.scratch1(),
4165 MemOperand(regs_.scratch0(), 4064 MemOperand(regs_.scratch0(),
4166 MemoryChunk::kWriteBarrierCounterOffset)); 4065 MemoryChunk::kWriteBarrierCounterOffset));
4167 __ sub(regs_.scratch1(), regs_.scratch1(), Operand(1), SetCC); 4066 __ sub(regs_.scratch1(), regs_.scratch1(), Operand(1), SetCC);
4168 __ str(regs_.scratch1(), 4067 __ str(regs_.scratch1(),
4169 MemOperand(regs_.scratch0(), 4068 MemOperand(regs_.scratch0(),
4170 MemoryChunk::kWriteBarrierCounterOffset)); 4069 MemoryChunk::kWriteBarrierCounterOffset));
4171 __ b(mi, &need_incremental); 4070 __ b(mi, &need_incremental);
4172 4071
4173 // Let's look at the color of the object: If it is not black we don't have 4072 // Let's look at the color of the object: If it is not black we don't have
4174 // to inform the incremental marker. 4073 // to inform the incremental marker.
4175 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); 4074 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
4176 4075
4177 regs_.Restore(masm); 4076 regs_.Restore(masm);
4178 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 4077 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4179 __ RememberedSetHelper(object_, 4078 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4180 address_,
4181 value_,
4182 save_fp_regs_mode_,
4183 MacroAssembler::kReturnAtEnd); 4079 MacroAssembler::kReturnAtEnd);
4184 } else { 4080 } else {
4185 __ Ret(); 4081 __ Ret();
4186 } 4082 }
4187 4083
4188 __ bind(&on_black); 4084 __ bind(&on_black);
4189 4085
4190 // Get the value from the slot. 4086 // Get the value from the slot.
4191 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0)); 4087 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0));
4192 4088
(...skipping 20 matching lines...) Expand all
4213 __ Push(regs_.object(), regs_.address()); 4109 __ Push(regs_.object(), regs_.address());
4214 __ EnsureNotWhite(regs_.scratch0(), // The value. 4110 __ EnsureNotWhite(regs_.scratch0(), // The value.
4215 regs_.scratch1(), // Scratch. 4111 regs_.scratch1(), // Scratch.
4216 regs_.object(), // Scratch. 4112 regs_.object(), // Scratch.
4217 regs_.address(), // Scratch. 4113 regs_.address(), // Scratch.
4218 &need_incremental_pop_scratch); 4114 &need_incremental_pop_scratch);
4219 __ Pop(regs_.object(), regs_.address()); 4115 __ Pop(regs_.object(), regs_.address());
4220 4116
4221 regs_.Restore(masm); 4117 regs_.Restore(masm);
4222 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 4118 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4223 __ RememberedSetHelper(object_, 4119 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4224 address_,
4225 value_,
4226 save_fp_regs_mode_,
4227 MacroAssembler::kReturnAtEnd); 4120 MacroAssembler::kReturnAtEnd);
4228 } else { 4121 } else {
4229 __ Ret(); 4122 __ Ret();
4230 } 4123 }
4231 4124
4232 __ bind(&need_incremental_pop_scratch); 4125 __ bind(&need_incremental_pop_scratch);
4233 __ Pop(regs_.object(), regs_.address()); 4126 __ Pop(regs_.object(), regs_.address());
4234 4127
4235 __ bind(&need_incremental); 4128 __ bind(&need_incremental);
4236 4129
(...skipping 597 matching lines...) Expand 10 before | Expand all | Expand 10 after
4834 MemOperand(fp, 6 * kPointerSize), 4727 MemOperand(fp, 6 * kPointerSize),
4835 NULL); 4728 NULL);
4836 } 4729 }
4837 4730
4838 4731
4839 #undef __ 4732 #undef __
4840 4733
4841 } } // namespace v8::internal 4734 } } // namespace v8::internal
4842 4735
4843 #endif // V8_TARGET_ARCH_ARM 4736 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698