Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(307)

Side by Side Diff: src/mips64/code-stubs-mips64.cc

Issue 536193002: MIPS: Minor-key-ify remaining code stubs. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips64/code-stubs-mips64.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_MIPS64 7 #if V8_TARGET_ARCH_MIPS64
8 8
9 #include "src/bootstrapper.h" 9 #include "src/bootstrapper.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
136 MemOperand(sp, (param_count-1-i) * kPointerSize)); 136 MemOperand(sp, (param_count-1-i) * kPointerSize));
137 } 137 }
138 ExternalReference miss = descriptor->miss_handler(); 138 ExternalReference miss = descriptor->miss_handler();
139 __ CallExternalReference(miss, param_count); 139 __ CallExternalReference(miss, param_count);
140 } 140 }
141 141
142 __ Ret(); 142 __ Ret();
143 } 143 }
144 144
145 145
146 // Takes a Smi and converts to an IEEE 64 bit floating point value in two
147 // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
148 // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
149 // scratch register. Destroys the source register. No GC occurs during this
150 // stub so you don't have to set up the frame.
151 class ConvertToDoubleStub : public PlatformCodeStub {
152 public:
153 ConvertToDoubleStub(Isolate* isolate,
154 Register result_reg_1,
155 Register result_reg_2,
156 Register source_reg,
157 Register scratch_reg)
158 : PlatformCodeStub(isolate),
159 result1_(result_reg_1),
160 result2_(result_reg_2),
161 source_(source_reg),
162 zeros_(scratch_reg) { }
163
164 private:
165 Register result1_;
166 Register result2_;
167 Register source_;
168 Register zeros_;
169
170 // Minor key encoding in 16 bits.
171 class ModeBits: public BitField<OverwriteMode, 0, 2> {};
172 class OpBits: public BitField<Token::Value, 2, 14> {};
173
174 Major MajorKey() const { return ConvertToDouble; }
175 uint32_t MinorKey() const {
176 // Encode the parameters in a unique 16 bit value.
177 return result1_.code() +
178 (result2_.code() << 4) +
179 (source_.code() << 8) +
180 (zeros_.code() << 12);
181 }
182
183 void Generate(MacroAssembler* masm);
184 };
185
186
187 void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
188 #ifndef BIG_ENDIAN_FLOATING_POINT
189 Register exponent = result1_;
190 Register mantissa = result2_;
191 #else
192 Register exponent = result2_;
193 Register mantissa = result1_;
194 #endif
195 Label not_special;
196 // Convert from Smi to integer.
197 __ SmiUntag(source_);
198 // Move sign bit from source to destination. This works because the sign bit
199 // in the exponent word of the double has the same position and polarity as
200 // the 2's complement sign bit in a Smi.
201 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
202 __ And(exponent, source_, Operand(HeapNumber::kSignMask));
203 // Subtract from 0 if source was negative.
204 __ subu(at, zero_reg, source_);
205 __ Movn(source_, at, exponent);
206
207 // We have -1, 0 or 1, which we treat specially. Register source_ contains
208 // absolute value: it is either equal to 1 (special case of -1 and 1),
209 // greater than 1 (not a special case) or less than 1 (special case of 0).
210 __ Branch(&not_special, gt, source_, Operand(1));
211
212 // For 1 or -1 we need to or in the 0 exponent (biased to 1023).
213 const uint32_t exponent_word_for_1 =
214 HeapNumber::kExponentBias << HeapNumber::kExponentShift;
215 // Safe to use 'at' as dest reg here.
216 __ Or(at, exponent, Operand(exponent_word_for_1));
217 __ Movn(exponent, at, source_); // Write exp when source not 0.
218 // 1, 0 and -1 all have 0 for the second word.
219 __ Ret(USE_DELAY_SLOT);
220 __ mov(mantissa, zero_reg);
221
222 __ bind(&not_special);
223 // Count leading zeros.
224 // Gets the wrong answer for 0, but we already checked for that case above.
225 __ Clz(zeros_, source_);
226 // Compute exponent and or it into the exponent register.
227 // We use mantissa as a scratch register here.
228 __ li(mantissa, Operand(31 + HeapNumber::kExponentBias));
229 __ subu(mantissa, mantissa, zeros_);
230 __ sll(mantissa, mantissa, HeapNumber::kExponentShift);
231 __ Or(exponent, exponent, mantissa);
232
233 // Shift up the source chopping the top bit off.
234 __ Addu(zeros_, zeros_, Operand(1));
235 // This wouldn't work for 1.0 or -1.0 as the shift would be 32 which means 0.
236 __ sllv(source_, source_, zeros_);
237 // Compute lower part of fraction (last 12 bits).
238 __ sll(mantissa, source_, HeapNumber::kMantissaBitsInTopWord);
239 // And the top (top 20 bits).
240 __ srl(source_, source_, 32 - HeapNumber::kMantissaBitsInTopWord);
241
242 __ Ret(USE_DELAY_SLOT);
243 __ or_(exponent, exponent, source_);
244 }
245
246
247 void DoubleToIStub::Generate(MacroAssembler* masm) { 146 void DoubleToIStub::Generate(MacroAssembler* masm) {
248 Label out_of_range, only_low, negate, done; 147 Label out_of_range, only_low, negate, done;
249 Register input_reg = source(); 148 Register input_reg = source();
250 Register result_reg = destination(); 149 Register result_reg = destination();
251 150
252 int double_offset = offset(); 151 int double_offset = offset();
253 // Account for saved regs if input is sp. 152 // Account for saved regs if input is sp.
254 if (input_reg.is(sp)) double_offset += 3 * kPointerSize; 153 if (input_reg.is(sp)) double_offset += 3 * kPointerSize;
255 154
256 Register scratch = 155 Register scratch =
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
390 } 289 }
391 290
392 291
393 // See comment for class, this does NOT work for int32's that are in Smi range. 292 // See comment for class, this does NOT work for int32's that are in Smi range.
394 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { 293 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
395 Label max_negative_int; 294 Label max_negative_int;
396 // the_int_ has the answer which is a signed int32 but not a Smi. 295 // the_int_ has the answer which is a signed int32 but not a Smi.
397 // We test for the special value that has a different exponent. 296 // We test for the special value that has a different exponent.
398 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); 297 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
399 // Test sign, and save for later conditionals. 298 // Test sign, and save for later conditionals.
400 __ And(sign_, the_int_, Operand(0x80000000u)); 299 __ And(sign(), the_int(), Operand(0x80000000u));
401 __ Branch(&max_negative_int, eq, the_int_, Operand(0x80000000u)); 300 __ Branch(&max_negative_int, eq, the_int(), Operand(0x80000000u));
402 301
403 // Set up the correct exponent in scratch_. All non-Smi int32s have the same. 302 // Set up the correct exponent in scratch_. All non-Smi int32s have the same.
404 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). 303 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased).
405 uint32_t non_smi_exponent = 304 uint32_t non_smi_exponent =
406 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift; 305 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
407 __ li(scratch_, Operand(non_smi_exponent)); 306 __ li(scratch(), Operand(non_smi_exponent));
408 // Set the sign bit in scratch_ if the value was negative. 307 // Set the sign bit in scratch_ if the value was negative.
409 __ or_(scratch_, scratch_, sign_); 308 __ or_(scratch(), scratch(), sign());
410 // Subtract from 0 if the value was negative. 309 // Subtract from 0 if the value was negative.
411 __ subu(at, zero_reg, the_int_); 310 __ subu(at, zero_reg, the_int());
412 __ Movn(the_int_, at, sign_); 311 __ Movn(the_int(), at, sign());
413 // We should be masking the implict first digit of the mantissa away here, 312 // We should be masking the implict first digit of the mantissa away here,
414 // but it just ends up combining harmlessly with the last digit of the 313 // but it just ends up combining harmlessly with the last digit of the
415 // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get 314 // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get
416 // the most significant 1 to hit the last bit of the 12 bit sign and exponent. 315 // the most significant 1 to hit the last bit of the 12 bit sign and exponent.
417 DCHECK(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0); 316 DCHECK(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
418 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2; 317 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
419 __ srl(at, the_int_, shift_distance); 318 __ srl(at, the_int(), shift_distance);
420 __ or_(scratch_, scratch_, at); 319 __ or_(scratch(), scratch(), at);
421 __ sw(scratch_, FieldMemOperand(the_heap_number_, 320 __ sw(scratch(), FieldMemOperand(the_heap_number(),
422 HeapNumber::kExponentOffset)); 321 HeapNumber::kExponentOffset));
423 __ sll(scratch_, the_int_, 32 - shift_distance); 322 __ sll(scratch(), the_int(), 32 - shift_distance);
424 __ Ret(USE_DELAY_SLOT); 323 __ Ret(USE_DELAY_SLOT);
425 __ sw(scratch_, FieldMemOperand(the_heap_number_, 324 __ sw(scratch(), FieldMemOperand(the_heap_number(),
426 HeapNumber::kMantissaOffset)); 325 HeapNumber::kMantissaOffset));
427 326
428 __ bind(&max_negative_int); 327 __ bind(&max_negative_int);
429 // The max negative int32 is stored as a positive number in the mantissa of 328 // The max negative int32 is stored as a positive number in the mantissa of
430 // a double because it uses a sign bit instead of using two's complement. 329 // a double because it uses a sign bit instead of using two's complement.
431 // The actual mantissa bits stored are all 0 because the implicit most 330 // The actual mantissa bits stored are all 0 because the implicit most
432 // significant 1 bit is not stored. 331 // significant 1 bit is not stored.
433 non_smi_exponent += 1 << HeapNumber::kExponentShift; 332 non_smi_exponent += 1 << HeapNumber::kExponentShift;
434 __ li(scratch_, Operand(HeapNumber::kSignMask | non_smi_exponent)); 333 __ li(scratch(), Operand(HeapNumber::kSignMask | non_smi_exponent));
435 __ sw(scratch_, 334 __ sw(scratch(),
436 FieldMemOperand(the_heap_number_, HeapNumber::kExponentOffset)); 335 FieldMemOperand(the_heap_number(), HeapNumber::kExponentOffset));
437 __ mov(scratch_, zero_reg); 336 __ mov(scratch(), zero_reg);
438 __ Ret(USE_DELAY_SLOT); 337 __ Ret(USE_DELAY_SLOT);
439 __ sw(scratch_, 338 __ sw(scratch(),
440 FieldMemOperand(the_heap_number_, HeapNumber::kMantissaOffset)); 339 FieldMemOperand(the_heap_number(), HeapNumber::kMantissaOffset));
441 } 340 }
442 341
443 342
444 // Handle the case where the lhs and rhs are the same object. 343 // Handle the case where the lhs and rhs are the same object.
445 // Equality is almost reflexive (everything but NaN), so this is a test 344 // Equality is almost reflexive (everything but NaN), so this is a test
446 // for "identity and not NaN". 345 // for "identity and not NaN".
447 static void EmitIdenticalObjectComparison(MacroAssembler* masm, 346 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
448 Label* slow, 347 Label* slow,
449 Condition cc) { 348 Condition cc) {
450 Label not_identical; 349 Label not_identical;
(...skipping 3806 matching lines...) Expand 10 before | Expand all | Expand 10 after
4257 __ dsll(index, index, kPointerSizeLog2); 4156 __ dsll(index, index, kPointerSizeLog2);
4258 __ Daddu(index, index, dictionary); 4157 __ Daddu(index, index, dictionary);
4259 __ ld(entry_key, FieldMemOperand(index, kElementsStartOffset)); 4158 __ ld(entry_key, FieldMemOperand(index, kElementsStartOffset));
4260 4159
4261 // Having undefined at this place means the name is not contained. 4160 // Having undefined at this place means the name is not contained.
4262 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined)); 4161 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined));
4263 4162
4264 // Stop if found the property. 4163 // Stop if found the property.
4265 __ Branch(&in_dictionary, eq, entry_key, Operand(key)); 4164 __ Branch(&in_dictionary, eq, entry_key, Operand(key));
4266 4165
4267 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { 4166 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
4268 // Check if the entry name is not a unique name. 4167 // Check if the entry name is not a unique name.
4269 __ ld(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); 4168 __ ld(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
4270 __ lbu(entry_key, 4169 __ lbu(entry_key,
4271 FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); 4170 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
4272 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary); 4171 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
4273 } 4172 }
4274 } 4173 }
4275 4174
4276 __ bind(&maybe_in_dictionary); 4175 __ bind(&maybe_in_dictionary);
4277 // If we are doing negative lookup then probing failure should be 4176 // If we are doing negative lookup then probing failure should be
4278 // treated as a lookup success. For positive lookup probing failure 4177 // treated as a lookup success. For positive lookup probing failure
4279 // should be treated as lookup failure. 4178 // should be treated as lookup failure.
4280 if (mode_ == POSITIVE_LOOKUP) { 4179 if (mode() == POSITIVE_LOOKUP) {
4281 __ Ret(USE_DELAY_SLOT); 4180 __ Ret(USE_DELAY_SLOT);
4282 __ mov(result, zero_reg); 4181 __ mov(result, zero_reg);
4283 } 4182 }
4284 4183
4285 __ bind(&in_dictionary); 4184 __ bind(&in_dictionary);
4286 __ Ret(USE_DELAY_SLOT); 4185 __ Ret(USE_DELAY_SLOT);
4287 __ li(result, 1); 4186 __ li(result, 1);
4288 4187
4289 __ bind(&not_in_dictionary); 4188 __ bind(&not_in_dictionary);
4290 __ Ret(USE_DELAY_SLOT); 4189 __ Ret(USE_DELAY_SLOT);
(...skipping 23 matching lines...) Expand all
4314 // get the offset fixed up correctly by the bind(Label*) call. We patch it 4213 // get the offset fixed up correctly by the bind(Label*) call. We patch it
4315 // back and forth between a "bne zero_reg, zero_reg, ..." (a nop in this 4214 // back and forth between a "bne zero_reg, zero_reg, ..." (a nop in this
4316 // position) and the "beq zero_reg, zero_reg, ..." when we start and stop 4215 // position) and the "beq zero_reg, zero_reg, ..." when we start and stop
4317 // incremental heap marking. 4216 // incremental heap marking.
4318 // See RecordWriteStub::Patch for details. 4217 // See RecordWriteStub::Patch for details.
4319 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting); 4218 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
4320 __ nop(); 4219 __ nop();
4321 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting); 4220 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
4322 __ nop(); 4221 __ nop();
4323 4222
4324 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { 4223 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
4325 __ RememberedSetHelper(object_, 4224 __ RememberedSetHelper(object(),
4326 address_, 4225 address(),
4327 value_, 4226 value(),
4328 save_fp_regs_mode_, 4227 save_fp_regs_mode(),
4329 MacroAssembler::kReturnAtEnd); 4228 MacroAssembler::kReturnAtEnd);
4330 } 4229 }
4331 __ Ret(); 4230 __ Ret();
4332 4231
4333 __ bind(&skip_to_incremental_noncompacting); 4232 __ bind(&skip_to_incremental_noncompacting);
4334 GenerateIncremental(masm, INCREMENTAL); 4233 GenerateIncremental(masm, INCREMENTAL);
4335 4234
4336 __ bind(&skip_to_incremental_compacting); 4235 __ bind(&skip_to_incremental_compacting);
4337 GenerateIncremental(masm, INCREMENTAL_COMPACTION); 4236 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
4338 4237
4339 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. 4238 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
4340 // Will be checked in IncrementalMarking::ActivateGeneratedStub. 4239 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
4341 4240
4342 PatchBranchIntoNop(masm, 0); 4241 PatchBranchIntoNop(masm, 0);
4343 PatchBranchIntoNop(masm, 2 * Assembler::kInstrSize); 4242 PatchBranchIntoNop(masm, 2 * Assembler::kInstrSize);
4344 } 4243 }
4345 4244
4346 4245
4347 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { 4246 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
4348 regs_.Save(masm); 4247 regs_.Save(masm);
4349 4248
4350 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { 4249 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
4351 Label dont_need_remembered_set; 4250 Label dont_need_remembered_set;
4352 4251
4353 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0)); 4252 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0));
4354 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. 4253 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
4355 regs_.scratch0(), 4254 regs_.scratch0(),
4356 &dont_need_remembered_set); 4255 &dont_need_remembered_set);
4357 4256
4358 __ CheckPageFlag(regs_.object(), 4257 __ CheckPageFlag(regs_.object(),
4359 regs_.scratch0(), 4258 regs_.scratch0(),
4360 1 << MemoryChunk::SCAN_ON_SCAVENGE, 4259 1 << MemoryChunk::SCAN_ON_SCAVENGE,
4361 ne, 4260 ne,
4362 &dont_need_remembered_set); 4261 &dont_need_remembered_set);
4363 4262
4364 // First notify the incremental marker if necessary, then update the 4263 // First notify the incremental marker if necessary, then update the
4365 // remembered set. 4264 // remembered set.
4366 CheckNeedsToInformIncrementalMarker( 4265 CheckNeedsToInformIncrementalMarker(
4367 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); 4266 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
4368 InformIncrementalMarker(masm); 4267 InformIncrementalMarker(masm);
4369 regs_.Restore(masm); 4268 regs_.Restore(masm);
4370 __ RememberedSetHelper(object_, 4269 __ RememberedSetHelper(object(),
4371 address_, 4270 address(),
4372 value_, 4271 value(),
4373 save_fp_regs_mode_, 4272 save_fp_regs_mode(),
4374 MacroAssembler::kReturnAtEnd); 4273 MacroAssembler::kReturnAtEnd);
4375 4274
4376 __ bind(&dont_need_remembered_set); 4275 __ bind(&dont_need_remembered_set);
4377 } 4276 }
4378 4277
4379 CheckNeedsToInformIncrementalMarker( 4278 CheckNeedsToInformIncrementalMarker(
4380 masm, kReturnOnNoNeedToInformIncrementalMarker, mode); 4279 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
4381 InformIncrementalMarker(masm); 4280 InformIncrementalMarker(masm);
4382 regs_.Restore(masm); 4281 regs_.Restore(masm);
4383 __ Ret(); 4282 __ Ret();
4384 } 4283 }
4385 4284
4386 4285
4387 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { 4286 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4388 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); 4287 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
4389 int argument_count = 3; 4288 int argument_count = 3;
4390 __ PrepareCallCFunction(argument_count, regs_.scratch0()); 4289 __ PrepareCallCFunction(argument_count, regs_.scratch0());
4391 Register address = 4290 Register address =
4392 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address(); 4291 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
4393 DCHECK(!address.is(regs_.object())); 4292 DCHECK(!address.is(regs_.object()));
4394 DCHECK(!address.is(a0)); 4293 DCHECK(!address.is(a0));
4395 __ Move(address, regs_.address()); 4294 __ Move(address, regs_.address());
4396 __ Move(a0, regs_.object()); 4295 __ Move(a0, regs_.object());
4397 __ Move(a1, address); 4296 __ Move(a1, address);
4398 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); 4297 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
4399 4298
4400 AllowExternalCallThatCantCauseGC scope(masm); 4299 AllowExternalCallThatCantCauseGC scope(masm);
4401 __ CallCFunction( 4300 __ CallCFunction(
4402 ExternalReference::incremental_marking_record_write_function(isolate()), 4301 ExternalReference::incremental_marking_record_write_function(isolate()),
4403 argument_count); 4302 argument_count);
4404 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); 4303 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
4405 } 4304 }
4406 4305
4407 4306
4408 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 4307 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4409 MacroAssembler* masm, 4308 MacroAssembler* masm,
4410 OnNoNeedToInformIncrementalMarker on_no_need, 4309 OnNoNeedToInformIncrementalMarker on_no_need,
4411 Mode mode) { 4310 Mode mode) {
4412 Label on_black; 4311 Label on_black;
4413 Label need_incremental; 4312 Label need_incremental;
4414 Label need_incremental_pop_scratch; 4313 Label need_incremental_pop_scratch;
4415 4314
4416 __ And(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask)); 4315 __ And(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
4417 __ ld(regs_.scratch1(), 4316 __ ld(regs_.scratch1(),
4418 MemOperand(regs_.scratch0(), 4317 MemOperand(regs_.scratch0(),
4419 MemoryChunk::kWriteBarrierCounterOffset)); 4318 MemoryChunk::kWriteBarrierCounterOffset));
4420 __ Dsubu(regs_.scratch1(), regs_.scratch1(), Operand(1)); 4319 __ Dsubu(regs_.scratch1(), regs_.scratch1(), Operand(1));
4421 __ sd(regs_.scratch1(), 4320 __ sd(regs_.scratch1(),
4422 MemOperand(regs_.scratch0(), 4321 MemOperand(regs_.scratch0(),
4423 MemoryChunk::kWriteBarrierCounterOffset)); 4322 MemoryChunk::kWriteBarrierCounterOffset));
4424 __ Branch(&need_incremental, lt, regs_.scratch1(), Operand(zero_reg)); 4323 __ Branch(&need_incremental, lt, regs_.scratch1(), Operand(zero_reg));
4425 4324
4426 // Let's look at the color of the object: If it is not black we don't have 4325 // Let's look at the color of the object: If it is not black we don't have
4427 // to inform the incremental marker. 4326 // to inform the incremental marker.
4428 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); 4327 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
4429 4328
4430 regs_.Restore(masm); 4329 regs_.Restore(masm);
4431 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 4330 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4432 __ RememberedSetHelper(object_, 4331 __ RememberedSetHelper(object(),
4433 address_, 4332 address(),
4434 value_, 4333 value(),
4435 save_fp_regs_mode_, 4334 save_fp_regs_mode(),
4436 MacroAssembler::kReturnAtEnd); 4335 MacroAssembler::kReturnAtEnd);
4437 } else { 4336 } else {
4438 __ Ret(); 4337 __ Ret();
4439 } 4338 }
4440 4339
4441 __ bind(&on_black); 4340 __ bind(&on_black);
4442 4341
4443 // Get the value from the slot. 4342 // Get the value from the slot.
4444 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0)); 4343 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0));
4445 4344
(...skipping 20 matching lines...) Expand all
4466 __ Push(regs_.object(), regs_.address()); 4365 __ Push(regs_.object(), regs_.address());
4467 __ EnsureNotWhite(regs_.scratch0(), // The value. 4366 __ EnsureNotWhite(regs_.scratch0(), // The value.
4468 regs_.scratch1(), // Scratch. 4367 regs_.scratch1(), // Scratch.
4469 regs_.object(), // Scratch. 4368 regs_.object(), // Scratch.
4470 regs_.address(), // Scratch. 4369 regs_.address(), // Scratch.
4471 &need_incremental_pop_scratch); 4370 &need_incremental_pop_scratch);
4472 __ Pop(regs_.object(), regs_.address()); 4371 __ Pop(regs_.object(), regs_.address());
4473 4372
4474 regs_.Restore(masm); 4373 regs_.Restore(masm);
4475 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 4374 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4476 __ RememberedSetHelper(object_, 4375 __ RememberedSetHelper(object(),
4477 address_, 4376 address(),
4478 value_, 4377 value(),
4479 save_fp_regs_mode_, 4378 save_fp_regs_mode(),
4480 MacroAssembler::kReturnAtEnd); 4379 MacroAssembler::kReturnAtEnd);
4481 } else { 4380 } else {
4482 __ Ret(); 4381 __ Ret();
4483 } 4382 }
4484 4383
4485 __ bind(&need_incremental_pop_scratch); 4384 __ bind(&need_incremental_pop_scratch);
4486 __ Pop(regs_.object(), regs_.address()); 4385 __ Pop(regs_.object(), regs_.address());
4487 4386
4488 __ bind(&need_incremental); 4387 __ bind(&need_incremental);
4489 4388
(...skipping 587 matching lines...) Expand 10 before | Expand all | Expand 10 after
5077 MemOperand(fp, 6 * kPointerSize), 4976 MemOperand(fp, 6 * kPointerSize),
5078 NULL); 4977 NULL);
5079 } 4978 }
5080 4979
5081 4980
5082 #undef __ 4981 #undef __
5083 4982
5084 } } // namespace v8::internal 4983 } } // namespace v8::internal
5085 4984
5086 #endif // V8_TARGET_ARCH_MIPS64 4985 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « src/mips64/code-stubs-mips64.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698