Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(15)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 536193002: MIPS: Minor-key-ify remaining code stubs. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips64/code-stubs-mips64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_MIPS 7 #if V8_TARGET_ARCH_MIPS
8 8
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
138 MemOperand(sp, (param_count-1-i) * kPointerSize)); 138 MemOperand(sp, (param_count-1-i) * kPointerSize));
139 } 139 }
140 ExternalReference miss = descriptor->miss_handler(); 140 ExternalReference miss = descriptor->miss_handler();
141 __ CallExternalReference(miss, param_count); 141 __ CallExternalReference(miss, param_count);
142 } 142 }
143 143
144 __ Ret(); 144 __ Ret();
145 } 145 }
146 146
147 147
148 // Takes a Smi and converts to an IEEE 64 bit floating point value in two
149 // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
150 // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
151 // scratch register. Destroys the source register. No GC occurs during this
152 // stub so you don't have to set up the frame.
153 class ConvertToDoubleStub : public PlatformCodeStub {
154 public:
155 ConvertToDoubleStub(Isolate* isolate,
156 Register result_reg_1,
157 Register result_reg_2,
158 Register source_reg,
159 Register scratch_reg)
160 : PlatformCodeStub(isolate),
161 result1_(result_reg_1),
162 result2_(result_reg_2),
163 source_(source_reg),
164 zeros_(scratch_reg) { }
165
166 private:
167 Register result1_;
168 Register result2_;
169 Register source_;
170 Register zeros_;
171
172 // Minor key encoding in 16 bits.
173 class ModeBits: public BitField<OverwriteMode, 0, 2> {};
174 class OpBits: public BitField<Token::Value, 2, 14> {};
175
176 Major MajorKey() const { return ConvertToDouble; }
177 uint32_t MinorKey() const {
178 // Encode the parameters in a unique 16 bit value.
179 return result1_.code() +
180 (result2_.code() << 4) +
181 (source_.code() << 8) +
182 (zeros_.code() << 12);
183 }
184
185 void Generate(MacroAssembler* masm);
186 };
187
188
189 void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
190 Register exponent, mantissa;
191 if (kArchEndian == kLittle) {
192 exponent = result1_;
193 mantissa = result2_;
194 } else {
195 exponent = result2_;
196 mantissa = result1_;
197 }
198 Label not_special;
199 // Convert from Smi to integer.
200 __ sra(source_, source_, kSmiTagSize);
201 // Move sign bit from source to destination. This works because the sign bit
202 // in the exponent word of the double has the same position and polarity as
203 // the 2's complement sign bit in a Smi.
204 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
205 __ And(exponent, source_, Operand(HeapNumber::kSignMask));
206 // Subtract from 0 if source was negative.
207 __ subu(at, zero_reg, source_);
208 __ Movn(source_, at, exponent);
209
210 // We have -1, 0 or 1, which we treat specially. Register source_ contains
211 // absolute value: it is either equal to 1 (special case of -1 and 1),
212 // greater than 1 (not a special case) or less than 1 (special case of 0).
213 __ Branch(&not_special, gt, source_, Operand(1));
214
215 // For 1 or -1 we need to or in the 0 exponent (biased to 1023).
216 const uint32_t exponent_word_for_1 =
217 HeapNumber::kExponentBias << HeapNumber::kExponentShift;
218 // Safe to use 'at' as dest reg here.
219 __ Or(at, exponent, Operand(exponent_word_for_1));
220 __ Movn(exponent, at, source_); // Write exp when source not 0.
221 // 1, 0 and -1 all have 0 for the second word.
222 __ Ret(USE_DELAY_SLOT);
223 __ mov(mantissa, zero_reg);
224
225 __ bind(&not_special);
226 // Count leading zeros.
227 // Gets the wrong answer for 0, but we already checked for that case above.
228 __ Clz(zeros_, source_);
229 // Compute exponent and or it into the exponent register.
230 // We use mantissa as a scratch register here.
231 __ li(mantissa, Operand(31 + HeapNumber::kExponentBias));
232 __ subu(mantissa, mantissa, zeros_);
233 __ sll(mantissa, mantissa, HeapNumber::kExponentShift);
234 __ Or(exponent, exponent, mantissa);
235
236 // Shift up the source chopping the top bit off.
237 __ Addu(zeros_, zeros_, Operand(1));
238 // This wouldn't work for 1.0 or -1.0 as the shift would be 32 which means 0.
239 __ sllv(source_, source_, zeros_);
240 // Compute lower part of fraction (last 12 bits).
241 __ sll(mantissa, source_, HeapNumber::kMantissaBitsInTopWord);
242 // And the top (top 20 bits).
243 __ srl(source_, source_, 32 - HeapNumber::kMantissaBitsInTopWord);
244
245 __ Ret(USE_DELAY_SLOT);
246 __ or_(exponent, exponent, source_);
247 }
248
249
250 void DoubleToIStub::Generate(MacroAssembler* masm) { 148 void DoubleToIStub::Generate(MacroAssembler* masm) {
251 Label out_of_range, only_low, negate, done; 149 Label out_of_range, only_low, negate, done;
252 Register input_reg = source(); 150 Register input_reg = source();
253 Register result_reg = destination(); 151 Register result_reg = destination();
254 152
255 int double_offset = offset(); 153 int double_offset = offset();
256 // Account for saved regs if input is sp. 154 // Account for saved regs if input is sp.
257 if (input_reg.is(sp)) double_offset += 3 * kPointerSize; 155 if (input_reg.is(sp)) double_offset += 3 * kPointerSize;
258 156
259 Register scratch = 157 Register scratch =
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
396 } 294 }
397 295
398 296
399 // See comment for class, this does NOT work for int32's that are in Smi range. 297 // See comment for class, this does NOT work for int32's that are in Smi range.
400 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { 298 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
401 Label max_negative_int; 299 Label max_negative_int;
402 // the_int_ has the answer which is a signed int32 but not a Smi. 300 // the_int_ has the answer which is a signed int32 but not a Smi.
403 // We test for the special value that has a different exponent. 301 // We test for the special value that has a different exponent.
404 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); 302 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
405 // Test sign, and save for later conditionals. 303 // Test sign, and save for later conditionals.
406 __ And(sign_, the_int_, Operand(0x80000000u)); 304 __ And(sign(), the_int(), Operand(0x80000000u));
407 __ Branch(&max_negative_int, eq, the_int_, Operand(0x80000000u)); 305 __ Branch(&max_negative_int, eq, the_int(), Operand(0x80000000u));
408 306
409 // Set up the correct exponent in scratch_. All non-Smi int32s have the same. 307 // Set up the correct exponent in scratch_. All non-Smi int32s have the same.
410 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). 308 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased).
411 uint32_t non_smi_exponent = 309 uint32_t non_smi_exponent =
412 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift; 310 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
413 __ li(scratch_, Operand(non_smi_exponent)); 311 __ li(scratch(), Operand(non_smi_exponent));
414 // Set the sign bit in scratch_ if the value was negative. 312 // Set the sign bit in scratch_ if the value was negative.
415 __ or_(scratch_, scratch_, sign_); 313 __ or_(scratch(), scratch(), sign());
416 // Subtract from 0 if the value was negative. 314 // Subtract from 0 if the value was negative.
417 __ subu(at, zero_reg, the_int_); 315 __ subu(at, zero_reg, the_int());
418 __ Movn(the_int_, at, sign_); 316 __ Movn(the_int(), at, sign());
419 // We should be masking the implict first digit of the mantissa away here, 317 // We should be masking the implict first digit of the mantissa away here,
420 // but it just ends up combining harmlessly with the last digit of the 318 // but it just ends up combining harmlessly with the last digit of the
421 // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get 319 // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get
422 // the most significant 1 to hit the last bit of the 12 bit sign and exponent. 320 // the most significant 1 to hit the last bit of the 12 bit sign and exponent.
423 DCHECK(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0); 321 DCHECK(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
424 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2; 322 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
425 __ srl(at, the_int_, shift_distance); 323 __ srl(at, the_int(), shift_distance);
426 __ or_(scratch_, scratch_, at); 324 __ or_(scratch(), scratch(), at);
427 __ sw(scratch_, FieldMemOperand(the_heap_number_, 325 __ sw(scratch(), FieldMemOperand(the_heap_number(),
428 HeapNumber::kExponentOffset)); 326 HeapNumber::kExponentOffset));
429 __ sll(scratch_, the_int_, 32 - shift_distance); 327 __ sll(scratch(), the_int(), 32 - shift_distance);
430 __ Ret(USE_DELAY_SLOT); 328 __ Ret(USE_DELAY_SLOT);
431 __ sw(scratch_, FieldMemOperand(the_heap_number_, 329 __ sw(scratch(), FieldMemOperand(the_heap_number(),
432 HeapNumber::kMantissaOffset)); 330 HeapNumber::kMantissaOffset));
433 331
434 __ bind(&max_negative_int); 332 __ bind(&max_negative_int);
435 // The max negative int32 is stored as a positive number in the mantissa of 333 // The max negative int32 is stored as a positive number in the mantissa of
436 // a double because it uses a sign bit instead of using two's complement. 334 // a double because it uses a sign bit instead of using two's complement.
437 // The actual mantissa bits stored are all 0 because the implicit most 335 // The actual mantissa bits stored are all 0 because the implicit most
438 // significant 1 bit is not stored. 336 // significant 1 bit is not stored.
439 non_smi_exponent += 1 << HeapNumber::kExponentShift; 337 non_smi_exponent += 1 << HeapNumber::kExponentShift;
440 __ li(scratch_, Operand(HeapNumber::kSignMask | non_smi_exponent)); 338 __ li(scratch(), Operand(HeapNumber::kSignMask | non_smi_exponent));
441 __ sw(scratch_, 339 __ sw(scratch(),
442 FieldMemOperand(the_heap_number_, HeapNumber::kExponentOffset)); 340 FieldMemOperand(the_heap_number(), HeapNumber::kExponentOffset));
443 __ mov(scratch_, zero_reg); 341 __ mov(scratch(), zero_reg);
444 __ Ret(USE_DELAY_SLOT); 342 __ Ret(USE_DELAY_SLOT);
445 __ sw(scratch_, 343 __ sw(scratch(),
446 FieldMemOperand(the_heap_number_, HeapNumber::kMantissaOffset)); 344 FieldMemOperand(the_heap_number(), HeapNumber::kMantissaOffset));
447 } 345 }
448 346
449 347
450 // Handle the case where the lhs and rhs are the same object. 348 // Handle the case where the lhs and rhs are the same object.
451 // Equality is almost reflexive (everything but NaN), so this is a test 349 // Equality is almost reflexive (everything but NaN), so this is a test
452 // for "identity and not NaN". 350 // for "identity and not NaN".
453 static void EmitIdenticalObjectComparison(MacroAssembler* masm, 351 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
454 Label* slow, 352 Label* slow,
455 Condition cc) { 353 Condition cc) {
456 Label not_identical; 354 Label not_identical;
(...skipping 3764 matching lines...) Expand 10 before | Expand all | Expand 10 after
4221 __ sll(index, index, 2); 4119 __ sll(index, index, 2);
4222 __ Addu(index, index, dictionary); 4120 __ Addu(index, index, dictionary);
4223 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset)); 4121 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset));
4224 4122
4225 // Having undefined at this place means the name is not contained. 4123 // Having undefined at this place means the name is not contained.
4226 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined)); 4124 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined));
4227 4125
4228 // Stop if found the property. 4126 // Stop if found the property.
4229 __ Branch(&in_dictionary, eq, entry_key, Operand(key)); 4127 __ Branch(&in_dictionary, eq, entry_key, Operand(key));
4230 4128
4231 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { 4129 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
4232 // Check if the entry name is not a unique name. 4130 // Check if the entry name is not a unique name.
4233 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); 4131 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
4234 __ lbu(entry_key, 4132 __ lbu(entry_key,
4235 FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); 4133 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
4236 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary); 4134 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
4237 } 4135 }
4238 } 4136 }
4239 4137
4240 __ bind(&maybe_in_dictionary); 4138 __ bind(&maybe_in_dictionary);
4241 // If we are doing negative lookup then probing failure should be 4139 // If we are doing negative lookup then probing failure should be
4242 // treated as a lookup success. For positive lookup probing failure 4140 // treated as a lookup success. For positive lookup probing failure
4243 // should be treated as lookup failure. 4141 // should be treated as lookup failure.
4244 if (mode_ == POSITIVE_LOOKUP) { 4142 if (mode() == POSITIVE_LOOKUP) {
4245 __ Ret(USE_DELAY_SLOT); 4143 __ Ret(USE_DELAY_SLOT);
4246 __ mov(result, zero_reg); 4144 __ mov(result, zero_reg);
4247 } 4145 }
4248 4146
4249 __ bind(&in_dictionary); 4147 __ bind(&in_dictionary);
4250 __ Ret(USE_DELAY_SLOT); 4148 __ Ret(USE_DELAY_SLOT);
4251 __ li(result, 1); 4149 __ li(result, 1);
4252 4150
4253 __ bind(&not_in_dictionary); 4151 __ bind(&not_in_dictionary);
4254 __ Ret(USE_DELAY_SLOT); 4152 __ Ret(USE_DELAY_SLOT);
(...skipping 23 matching lines...) Expand all
4278 // get the offset fixed up correctly by the bind(Label*) call. We patch it 4176 // get the offset fixed up correctly by the bind(Label*) call. We patch it
4279 // back and forth between a "bne zero_reg, zero_reg, ..." (a nop in this 4177 // back and forth between a "bne zero_reg, zero_reg, ..." (a nop in this
4280 // position) and the "beq zero_reg, zero_reg, ..." when we start and stop 4178 // position) and the "beq zero_reg, zero_reg, ..." when we start and stop
4281 // incremental heap marking. 4179 // incremental heap marking.
4282 // See RecordWriteStub::Patch for details. 4180 // See RecordWriteStub::Patch for details.
4283 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting); 4181 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
4284 __ nop(); 4182 __ nop();
4285 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting); 4183 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
4286 __ nop(); 4184 __ nop();
4287 4185
4288 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { 4186 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
4289 __ RememberedSetHelper(object_, 4187 __ RememberedSetHelper(object(),
4290 address_, 4188 address(),
4291 value_, 4189 value(),
4292 save_fp_regs_mode_, 4190 save_fp_regs_mode(),
4293 MacroAssembler::kReturnAtEnd); 4191 MacroAssembler::kReturnAtEnd);
4294 } 4192 }
4295 __ Ret(); 4193 __ Ret();
4296 4194
4297 __ bind(&skip_to_incremental_noncompacting); 4195 __ bind(&skip_to_incremental_noncompacting);
4298 GenerateIncremental(masm, INCREMENTAL); 4196 GenerateIncremental(masm, INCREMENTAL);
4299 4197
4300 __ bind(&skip_to_incremental_compacting); 4198 __ bind(&skip_to_incremental_compacting);
4301 GenerateIncremental(masm, INCREMENTAL_COMPACTION); 4199 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
4302 4200
4303 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. 4201 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
4304 // Will be checked in IncrementalMarking::ActivateGeneratedStub. 4202 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
4305 4203
4306 PatchBranchIntoNop(masm, 0); 4204 PatchBranchIntoNop(masm, 0);
4307 PatchBranchIntoNop(masm, 2 * Assembler::kInstrSize); 4205 PatchBranchIntoNop(masm, 2 * Assembler::kInstrSize);
4308 } 4206 }
4309 4207
4310 4208
4311 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { 4209 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
4312 regs_.Save(masm); 4210 regs_.Save(masm);
4313 4211
4314 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { 4212 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
4315 Label dont_need_remembered_set; 4213 Label dont_need_remembered_set;
4316 4214
4317 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0)); 4215 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0));
4318 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. 4216 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
4319 regs_.scratch0(), 4217 regs_.scratch0(),
4320 &dont_need_remembered_set); 4218 &dont_need_remembered_set);
4321 4219
4322 __ CheckPageFlag(regs_.object(), 4220 __ CheckPageFlag(regs_.object(),
4323 regs_.scratch0(), 4221 regs_.scratch0(),
4324 1 << MemoryChunk::SCAN_ON_SCAVENGE, 4222 1 << MemoryChunk::SCAN_ON_SCAVENGE,
4325 ne, 4223 ne,
4326 &dont_need_remembered_set); 4224 &dont_need_remembered_set);
4327 4225
4328 // First notify the incremental marker if necessary, then update the 4226 // First notify the incremental marker if necessary, then update the
4329 // remembered set. 4227 // remembered set.
4330 CheckNeedsToInformIncrementalMarker( 4228 CheckNeedsToInformIncrementalMarker(
4331 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); 4229 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
4332 InformIncrementalMarker(masm); 4230 InformIncrementalMarker(masm);
4333 regs_.Restore(masm); 4231 regs_.Restore(masm);
4334 __ RememberedSetHelper(object_, 4232 __ RememberedSetHelper(object(),
4335 address_, 4233 address(),
4336 value_, 4234 value(),
4337 save_fp_regs_mode_, 4235 save_fp_regs_mode(),
4338 MacroAssembler::kReturnAtEnd); 4236 MacroAssembler::kReturnAtEnd);
4339 4237
4340 __ bind(&dont_need_remembered_set); 4238 __ bind(&dont_need_remembered_set);
4341 } 4239 }
4342 4240
4343 CheckNeedsToInformIncrementalMarker( 4241 CheckNeedsToInformIncrementalMarker(
4344 masm, kReturnOnNoNeedToInformIncrementalMarker, mode); 4242 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
4345 InformIncrementalMarker(masm); 4243 InformIncrementalMarker(masm);
4346 regs_.Restore(masm); 4244 regs_.Restore(masm);
4347 __ Ret(); 4245 __ Ret();
4348 } 4246 }
4349 4247
4350 4248
4351 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { 4249 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4352 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); 4250 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
4353 int argument_count = 3; 4251 int argument_count = 3;
4354 __ PrepareCallCFunction(argument_count, regs_.scratch0()); 4252 __ PrepareCallCFunction(argument_count, regs_.scratch0());
4355 Register address = 4253 Register address =
4356 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address(); 4254 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
4357 DCHECK(!address.is(regs_.object())); 4255 DCHECK(!address.is(regs_.object()));
4358 DCHECK(!address.is(a0)); 4256 DCHECK(!address.is(a0));
4359 __ Move(address, regs_.address()); 4257 __ Move(address, regs_.address());
4360 __ Move(a0, regs_.object()); 4258 __ Move(a0, regs_.object());
4361 __ Move(a1, address); 4259 __ Move(a1, address);
4362 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); 4260 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
4363 4261
4364 AllowExternalCallThatCantCauseGC scope(masm); 4262 AllowExternalCallThatCantCauseGC scope(masm);
4365 __ CallCFunction( 4263 __ CallCFunction(
4366 ExternalReference::incremental_marking_record_write_function(isolate()), 4264 ExternalReference::incremental_marking_record_write_function(isolate()),
4367 argument_count); 4265 argument_count);
4368 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); 4266 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
4369 } 4267 }
4370 4268
4371 4269
4372 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 4270 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4373 MacroAssembler* masm, 4271 MacroAssembler* masm,
4374 OnNoNeedToInformIncrementalMarker on_no_need, 4272 OnNoNeedToInformIncrementalMarker on_no_need,
4375 Mode mode) { 4273 Mode mode) {
4376 Label on_black; 4274 Label on_black;
4377 Label need_incremental; 4275 Label need_incremental;
4378 Label need_incremental_pop_scratch; 4276 Label need_incremental_pop_scratch;
4379 4277
4380 __ And(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask)); 4278 __ And(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
4381 __ lw(regs_.scratch1(), 4279 __ lw(regs_.scratch1(),
4382 MemOperand(regs_.scratch0(), 4280 MemOperand(regs_.scratch0(),
4383 MemoryChunk::kWriteBarrierCounterOffset)); 4281 MemoryChunk::kWriteBarrierCounterOffset));
4384 __ Subu(regs_.scratch1(), regs_.scratch1(), Operand(1)); 4282 __ Subu(regs_.scratch1(), regs_.scratch1(), Operand(1));
4385 __ sw(regs_.scratch1(), 4283 __ sw(regs_.scratch1(),
4386 MemOperand(regs_.scratch0(), 4284 MemOperand(regs_.scratch0(),
4387 MemoryChunk::kWriteBarrierCounterOffset)); 4285 MemoryChunk::kWriteBarrierCounterOffset));
4388 __ Branch(&need_incremental, lt, regs_.scratch1(), Operand(zero_reg)); 4286 __ Branch(&need_incremental, lt, regs_.scratch1(), Operand(zero_reg));
4389 4287
4390 // Let's look at the color of the object: If it is not black we don't have 4288 // Let's look at the color of the object: If it is not black we don't have
4391 // to inform the incremental marker. 4289 // to inform the incremental marker.
4392 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); 4290 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
4393 4291
4394 regs_.Restore(masm); 4292 regs_.Restore(masm);
4395 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 4293 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4396 __ RememberedSetHelper(object_, 4294 __ RememberedSetHelper(object(),
4397 address_, 4295 address(),
4398 value_, 4296 value(),
4399 save_fp_regs_mode_, 4297 save_fp_regs_mode(),
4400 MacroAssembler::kReturnAtEnd); 4298 MacroAssembler::kReturnAtEnd);
4401 } else { 4299 } else {
4402 __ Ret(); 4300 __ Ret();
4403 } 4301 }
4404 4302
4405 __ bind(&on_black); 4303 __ bind(&on_black);
4406 4304
4407 // Get the value from the slot. 4305 // Get the value from the slot.
4408 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0)); 4306 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0));
4409 4307
(...skipping 20 matching lines...) Expand all
4430 __ Push(regs_.object(), regs_.address()); 4328 __ Push(regs_.object(), regs_.address());
4431 __ EnsureNotWhite(regs_.scratch0(), // The value. 4329 __ EnsureNotWhite(regs_.scratch0(), // The value.
4432 regs_.scratch1(), // Scratch. 4330 regs_.scratch1(), // Scratch.
4433 regs_.object(), // Scratch. 4331 regs_.object(), // Scratch.
4434 regs_.address(), // Scratch. 4332 regs_.address(), // Scratch.
4435 &need_incremental_pop_scratch); 4333 &need_incremental_pop_scratch);
4436 __ Pop(regs_.object(), regs_.address()); 4334 __ Pop(regs_.object(), regs_.address());
4437 4335
4438 regs_.Restore(masm); 4336 regs_.Restore(masm);
4439 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 4337 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4440 __ RememberedSetHelper(object_, 4338 __ RememberedSetHelper(object(),
4441 address_, 4339 address(),
4442 value_, 4340 value(),
4443 save_fp_regs_mode_, 4341 save_fp_regs_mode(),
4444 MacroAssembler::kReturnAtEnd); 4342 MacroAssembler::kReturnAtEnd);
4445 } else { 4343 } else {
4446 __ Ret(); 4344 __ Ret();
4447 } 4345 }
4448 4346
4449 __ bind(&need_incremental_pop_scratch); 4347 __ bind(&need_incremental_pop_scratch);
4450 __ Pop(regs_.object(), regs_.address()); 4348 __ Pop(regs_.object(), regs_.address());
4451 4349
4452 __ bind(&need_incremental); 4350 __ bind(&need_incremental);
4453 4351
(...skipping 587 matching lines...) Expand 10 before | Expand all | Expand 10 after
5041 MemOperand(fp, 6 * kPointerSize), 4939 MemOperand(fp, 6 * kPointerSize),
5042 NULL); 4940 NULL);
5043 } 4941 }
5044 4942
5045 4943
5046 #undef __ 4944 #undef __
5047 4945
5048 } } // namespace v8::internal 4946 } } // namespace v8::internal
5049 4947
5050 #endif // V8_TARGET_ARCH_MIPS 4948 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips64/code-stubs-mips64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698