OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
331 __ Set(tos_, Immediate(0)); | 331 __ Set(tos_, Immediate(0)); |
332 __ ret(1 * kPointerSize); | 332 __ ret(1 * kPointerSize); |
333 __ bind(¬_heap_number); | 333 __ bind(¬_heap_number); |
334 } | 334 } |
335 | 335 |
336 __ bind(&patch); | 336 __ bind(&patch); |
337 GenerateTypeTransition(masm); | 337 GenerateTypeTransition(masm); |
338 } | 338 } |
339 | 339 |
340 | 340 |
| 341 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 342 // We don't allow a GC during a store buffer overflow so there is no need to |
| 343 // store the registers in any particular way, but we do have to store and |
| 344 // restore them. |
| 345 __ pushad(); |
| 346 if (save_doubles_ == kSaveFPRegs) { |
| 347 CpuFeatures::Scope scope(SSE2); |
| 348 __ sub(Operand(esp), Immediate(kDoubleSize * XMMRegister::kNumRegisters)); |
| 349 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { |
| 350 XMMRegister reg = XMMRegister::from_code(i); |
| 351 __ movdbl(Operand(esp, i * kDoubleSize), reg); |
| 352 } |
| 353 } |
| 354 const int argument_count = 1; |
| 355 |
| 356 AllowExternalCallThatCantCauseGC scope(masm); |
| 357 __ PrepareCallCFunction(argument_count, ecx); |
| 358 __ mov(Operand(esp, 0 * kPointerSize), |
| 359 Immediate(ExternalReference::isolate_address())); |
| 360 __ CallCFunction( |
| 361 ExternalReference::store_buffer_overflow_function(masm->isolate()), |
| 362 argument_count); |
| 363 if (save_doubles_ == kSaveFPRegs) { |
| 364 CpuFeatures::Scope scope(SSE2); |
| 365 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { |
| 366 XMMRegister reg = XMMRegister::from_code(i); |
| 367 __ movdbl(reg, Operand(esp, i * kDoubleSize)); |
| 368 } |
| 369 __ add(Operand(esp), Immediate(kDoubleSize * XMMRegister::kNumRegisters)); |
| 370 } |
| 371 __ popad(); |
| 372 __ ret(0); |
| 373 } |
| 374 |
| 375 |
341 void ToBooleanStub::CheckOddball(MacroAssembler* masm, | 376 void ToBooleanStub::CheckOddball(MacroAssembler* masm, |
342 Type type, | 377 Type type, |
343 Heap::RootListIndex value, | 378 Heap::RootListIndex value, |
344 bool result) { | 379 bool result) { |
345 const Register argument = eax; | 380 const Register argument = eax; |
346 if (types_.Contains(type)) { | 381 if (types_.Contains(type)) { |
347 // If we see an expected oddball, return its ToBoolean value tos_. | 382 // If we see an expected oddball, return its ToBoolean value tos_. |
348 Label different_value; | 383 Label different_value; |
349 __ CompareRoot(argument, value); | 384 __ CompareRoot(argument, value); |
350 __ j(not_equal, &different_value, Label::kNear); | 385 __ j(not_equal, &different_value, Label::kNear); |
(...skipping 3253 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3604 | 3639 |
3605 // ebx: last_match_info backing store (FixedArray) | 3640 // ebx: last_match_info backing store (FixedArray) |
3606 // edx: number of capture registers | 3641 // edx: number of capture registers |
3607 // Store the capture count. | 3642 // Store the capture count. |
3608 __ SmiTag(edx); // Number of capture registers to smi. | 3643 __ SmiTag(edx); // Number of capture registers to smi. |
3609 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx); | 3644 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx); |
3610 __ SmiUntag(edx); // Number of capture registers back from smi. | 3645 __ SmiUntag(edx); // Number of capture registers back from smi. |
3611 // Store last subject and last input. | 3646 // Store last subject and last input. |
3612 __ mov(eax, Operand(esp, kSubjectOffset)); | 3647 __ mov(eax, Operand(esp, kSubjectOffset)); |
3613 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax); | 3648 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax); |
3614 __ mov(ecx, ebx); | 3649 __ RecordWriteField(ebx, |
3615 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi); | 3650 RegExpImpl::kLastSubjectOffset, |
| 3651 eax, |
| 3652 edi, |
| 3653 kDontSaveFPRegs); |
3616 __ mov(eax, Operand(esp, kSubjectOffset)); | 3654 __ mov(eax, Operand(esp, kSubjectOffset)); |
3617 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax); | 3655 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax); |
3618 __ mov(ecx, ebx); | 3656 __ RecordWriteField(ebx, |
3619 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi); | 3657 RegExpImpl::kLastInputOffset, |
| 3658 eax, |
| 3659 edi, |
| 3660 kDontSaveFPRegs); |
3620 | 3661 |
3621 // Get the static offsets vector filled by the native regexp code. | 3662 // Get the static offsets vector filled by the native regexp code. |
3622 ExternalReference address_of_static_offsets_vector = | 3663 ExternalReference address_of_static_offsets_vector = |
3623 ExternalReference::address_of_static_offsets_vector(masm->isolate()); | 3664 ExternalReference::address_of_static_offsets_vector(masm->isolate()); |
3624 __ mov(ecx, Immediate(address_of_static_offsets_vector)); | 3665 __ mov(ecx, Immediate(address_of_static_offsets_vector)); |
3625 | 3666 |
3626 // ebx: last_match_info backing store (FixedArray) | 3667 // ebx: last_match_info backing store (FixedArray) |
3627 // ecx: offsets vector | 3668 // ecx: offsets vector |
3628 // edx: number of capture registers | 3669 // edx: number of capture registers |
3629 Label next_capture, done; | 3670 Label next_capture, done; |
(...skipping 666 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4296 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && | 4337 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && |
4297 result_size_ == 1; | 4338 result_size_ == 1; |
4298 } | 4339 } |
4299 | 4340 |
4300 | 4341 |
4301 void CodeStub::GenerateStubsAheadOfTime() { | 4342 void CodeStub::GenerateStubsAheadOfTime() { |
4302 } | 4343 } |
4303 | 4344 |
4304 | 4345 |
4305 void CodeStub::GenerateFPStubs() { | 4346 void CodeStub::GenerateFPStubs() { |
4306 CEntryStub save_doubles(1); | 4347 CEntryStub save_doubles(1, kSaveFPRegs); |
4307 save_doubles.SaveDoubles(); | |
4308 Handle<Code> code = save_doubles.GetCode(); | 4348 Handle<Code> code = save_doubles.GetCode(); |
4309 code->GetIsolate()->set_fp_stubs_generated(true); | 4349 code->GetIsolate()->set_fp_stubs_generated(true); |
4310 } | 4350 } |
4311 | 4351 |
4312 | 4352 |
4313 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { | 4353 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { |
4314 __ Throw(eax); | 4354 __ Throw(eax); |
4315 } | 4355 } |
4316 | 4356 |
4317 | 4357 |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4393 Label okay; | 4433 Label okay; |
4394 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); | 4434 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); |
4395 // Cannot use check here as it attempts to generate call into runtime. | 4435 // Cannot use check here as it attempts to generate call into runtime. |
4396 __ j(equal, &okay, Label::kNear); | 4436 __ j(equal, &okay, Label::kNear); |
4397 __ int3(); | 4437 __ int3(); |
4398 __ bind(&okay); | 4438 __ bind(&okay); |
4399 __ pop(edx); | 4439 __ pop(edx); |
4400 } | 4440 } |
4401 | 4441 |
4402 // Exit the JavaScript to C++ exit frame. | 4442 // Exit the JavaScript to C++ exit frame. |
4403 __ LeaveExitFrame(save_doubles_); | 4443 __ LeaveExitFrame(save_doubles_ == kSaveFPRegs); |
4404 __ ret(0); | 4444 __ ret(0); |
4405 | 4445 |
4406 // Handling of failure. | 4446 // Handling of failure. |
4407 __ bind(&failure_returned); | 4447 __ bind(&failure_returned); |
4408 | 4448 |
4409 Label retry; | 4449 Label retry; |
4410 // If the returned exception is RETRY_AFTER_GC continue at retry label | 4450 // If the returned exception is RETRY_AFTER_GC continue at retry label |
4411 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); | 4451 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); |
4412 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); | 4452 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); |
4413 __ j(zero, &retry, Label::kNear); | 4453 __ j(zero, &retry, Label::kNear); |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4448 // ebp: frame pointer (restored after C call) | 4488 // ebp: frame pointer (restored after C call) |
4449 // esp: stack pointer (restored after C call) | 4489 // esp: stack pointer (restored after C call) |
4450 // esi: current context (C callee-saved) | 4490 // esi: current context (C callee-saved) |
4451 // edi: JS function of the caller (C callee-saved) | 4491 // edi: JS function of the caller (C callee-saved) |
4452 | 4492 |
4453 // NOTE: Invocations of builtins may return failure objects instead | 4493 // NOTE: Invocations of builtins may return failure objects instead |
4454 // of a proper result. The builtin entry handles this by performing | 4494 // of a proper result. The builtin entry handles this by performing |
4455 // a garbage collection and retrying the builtin (twice). | 4495 // a garbage collection and retrying the builtin (twice). |
4456 | 4496 |
4457 // Enter the exit frame that transitions from JavaScript to C++. | 4497 // Enter the exit frame that transitions from JavaScript to C++. |
4458 __ EnterExitFrame(save_doubles_); | 4498 __ EnterExitFrame(save_doubles_ == kSaveFPRegs); |
4459 | 4499 |
4460 // eax: result parameter for PerformGC, if any (setup below) | 4500 // eax: result parameter for PerformGC, if any (setup below) |
4461 // ebx: pointer to builtin function (C callee-saved) | 4501 // ebx: pointer to builtin function (C callee-saved) |
4462 // ebp: frame pointer (restored after C call) | 4502 // ebp: frame pointer (restored after C call) |
4463 // esp: stack pointer (restored after C call) | 4503 // esp: stack pointer (restored after C call) |
4464 // edi: number of arguments including receiver (C callee-saved) | 4504 // edi: number of arguments including receiver (C callee-saved) |
4465 // esi: argv pointer (C callee-saved) | 4505 // esi: argv pointer (C callee-saved) |
4466 | 4506 |
4467 Label throw_normal_exception; | 4507 Label throw_normal_exception; |
4468 Label throw_termination_exception; | 4508 Label throw_termination_exception; |
(...skipping 2062 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6531 __ Drop(1); | 6571 __ Drop(1); |
6532 __ ret(2 * kPointerSize); | 6572 __ ret(2 * kPointerSize); |
6533 | 6573 |
6534 __ bind(¬_in_dictionary); | 6574 __ bind(¬_in_dictionary); |
6535 __ mov(result_, Immediate(0)); | 6575 __ mov(result_, Immediate(0)); |
6536 __ Drop(1); | 6576 __ Drop(1); |
6537 __ ret(2 * kPointerSize); | 6577 __ ret(2 * kPointerSize); |
6538 } | 6578 } |
6539 | 6579 |
6540 | 6580 |
| 6581 struct AheadOfTimeWriteBarrierStubList { |
| 6582 Register object, value, address; |
| 6583 RememberedSetAction action; |
| 6584 }; |
| 6585 |
| 6586 |
| 6587 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { |
| 6588 // Used in RegExpExecStub. |
| 6589 { ebx, eax, edi, EMIT_REMEMBERED_SET }, |
| 6590 // Used in CompileArrayPushCall. |
| 6591 { ebx, ecx, edx, EMIT_REMEMBERED_SET }, |
| 6592 // Used in CompileStoreGlobal. |
| 6593 { ebx, ecx, edx, OMIT_REMEMBERED_SET }, |
| 6594 // Used in StoreStubCompiler::CompileStoreField and |
| 6595 // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField. |
| 6596 { edx, ecx, ebx, EMIT_REMEMBERED_SET }, |
| 6597 // GenerateStoreField calls the stub with two different permutations of |
| 6598 // registers. This is the second. |
| 6599 { ebx, ecx, edx, EMIT_REMEMBERED_SET }, |
| 6600 // StoreIC::GenerateNormal via GenerateDictionaryStore. |
| 6601 { ebx, edi, edx, EMIT_REMEMBERED_SET }, |
| 6602 // KeyedStoreIC::GenerateGeneric. |
| 6603 { ebx, edx, ecx, EMIT_REMEMBERED_SET}, |
| 6604 // KeyedStoreStubCompiler::GenerateStoreFastElement. |
| 6605 { edi, edx, ecx, EMIT_REMEMBERED_SET}, |
| 6606 // Null termination. |
| 6607 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} |
| 6608 }; |
| 6609 |
| 6610 |
| 6611 bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() { |
| 6612 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; |
| 6613 !entry->object.is(no_reg); |
| 6614 entry++) { |
| 6615 if (object_.is(entry->object) && |
| 6616 value_.is(entry->value) && |
| 6617 address_.is(entry->address) && |
| 6618 remembered_set_action_ == entry->action && |
| 6619 save_fp_regs_mode_ == kDontSaveFPRegs) { |
| 6620 return true; |
| 6621 } |
| 6622 } |
| 6623 return false; |
| 6624 } |
| 6625 |
| 6626 |
| 6627 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() { |
| 6628 StoreBufferOverflowStub stub1(kDontSaveFPRegs); |
| 6629 stub1.GetCode(); |
| 6630 StoreBufferOverflowStub stub2(kSaveFPRegs); |
| 6631 stub2.GetCode(); |
| 6632 } |
| 6633 |
| 6634 |
| 6635 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() { |
| 6636 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; |
| 6637 !entry->object.is(no_reg); |
| 6638 entry++) { |
| 6639 RecordWriteStub stub(entry->object, |
| 6640 entry->value, |
| 6641 entry->address, |
| 6642 entry->action, |
| 6643 kDontSaveFPRegs); |
| 6644 stub.GetCode(); |
| 6645 } |
| 6646 } |
| 6647 |
| 6648 |
| 6649 // Takes the input in 3 registers: address_ value_ and object_. A pointer to |
| 6650 // the value has just been written into the object, now this stub makes sure |
| 6651 // we keep the GC informed. The word in the object where the value has been |
| 6652 // written is in the address register. |
| 6653 void RecordWriteStub::Generate(MacroAssembler* masm) { |
| 6654 Label skip_to_incremental_noncompacting; |
| 6655 Label skip_to_incremental_compacting; |
| 6656 |
| 6657 // The first two instructions are generated with labels so as to get the |
| 6658 // offset fixed up correctly by the bind(Label*) call. We patch it back and |
| 6659 // forth between a compare instructions (a nop in this position) and the |
| 6660 // real branch when we start and stop incremental heap marking. |
| 6661 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); |
| 6662 __ jmp(&skip_to_incremental_compacting, Label::kFar); |
| 6663 |
| 6664 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
| 6665 __ RememberedSetHelper( |
| 6666 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); |
| 6667 } else { |
| 6668 __ ret(0); |
| 6669 } |
| 6670 |
| 6671 __ bind(&skip_to_incremental_noncompacting); |
| 6672 GenerateIncremental(masm, INCREMENTAL); |
| 6673 |
| 6674 __ bind(&skip_to_incremental_compacting); |
| 6675 GenerateIncremental(masm, INCREMENTAL_COMPACTION); |
| 6676 |
| 6677 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. |
| 6678 // Will be checked in IncrementalMarking::ActivateGeneratedStub. |
| 6679 masm->set_byte_at(0, kTwoByteNopInstruction); |
| 6680 masm->set_byte_at(2, kFiveByteNopInstruction); |
| 6681 } |
| 6682 |
| 6683 |
| 6684 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { |
| 6685 regs_.Save(masm); |
| 6686 |
| 6687 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
| 6688 Label dont_need_remembered_set; |
| 6689 |
| 6690 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
| 6691 __ JumpIfNotInNewSpace(regs_.scratch0(), |
| 6692 regs_.scratch0(), |
| 6693 &dont_need_remembered_set); |
| 6694 |
| 6695 __ CheckPageFlag(regs_.object(), |
| 6696 regs_.scratch0(), |
| 6697 1 << MemoryChunk::SCAN_ON_SCAVENGE, |
| 6698 not_zero, |
| 6699 &dont_need_remembered_set); |
| 6700 |
| 6701 // First notify the incremental marker if necessary, then update the |
| 6702 // remembered set. |
| 6703 CheckNeedsToInformIncrementalMarker( |
| 6704 masm, |
| 6705 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, |
| 6706 mode); |
| 6707 InformIncrementalMarker(masm, mode); |
| 6708 regs_.Restore(masm); |
| 6709 __ RememberedSetHelper( |
| 6710 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); |
| 6711 |
| 6712 __ bind(&dont_need_remembered_set); |
| 6713 } |
| 6714 |
| 6715 CheckNeedsToInformIncrementalMarker( |
| 6716 masm, |
| 6717 kReturnOnNoNeedToInformIncrementalMarker, |
| 6718 mode); |
| 6719 InformIncrementalMarker(masm, mode); |
| 6720 regs_.Restore(masm); |
| 6721 __ ret(0); |
| 6722 } |
| 6723 |
| 6724 |
| 6725 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) { |
| 6726 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); |
| 6727 int argument_count = 3; |
| 6728 __ PrepareCallCFunction(argument_count, regs_.scratch0()); |
| 6729 __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); |
| 6730 if (mode == INCREMENTAL_COMPACTION) { |
| 6731 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. |
| 6732 } else { |
| 6733 ASSERT(mode == INCREMENTAL); |
| 6734 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
| 6735 __ mov(Operand(esp, 1 * kPointerSize), regs_.scratch0()); // Value. |
| 6736 } |
| 6737 __ mov(Operand(esp, 2 * kPointerSize), |
| 6738 Immediate(ExternalReference::isolate_address())); |
| 6739 |
| 6740 AllowExternalCallThatCantCauseGC scope(masm); |
| 6741 if (mode == INCREMENTAL_COMPACTION) { |
| 6742 __ CallCFunction( |
| 6743 ExternalReference::incremental_evacuation_record_write_function( |
| 6744 masm->isolate()), |
| 6745 argument_count); |
| 6746 } else { |
| 6747 ASSERT(mode == INCREMENTAL); |
| 6748 __ CallCFunction( |
| 6749 ExternalReference::incremental_marking_record_write_function( |
| 6750 masm->isolate()), |
| 6751 argument_count); |
| 6752 } |
| 6753 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); |
| 6754 } |
| 6755 |
| 6756 |
| 6757 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
| 6758 MacroAssembler* masm, |
| 6759 OnNoNeedToInformIncrementalMarker on_no_need, |
| 6760 Mode mode) { |
| 6761 Label object_is_black, need_incremental, need_incremental_pop_object; |
| 6762 |
| 6763 // Let's look at the color of the object: If it is not black we don't have |
| 6764 // to inform the incremental marker. |
| 6765 __ JumpIfBlack(regs_.object(), |
| 6766 regs_.scratch0(), |
| 6767 regs_.scratch1(), |
| 6768 &object_is_black, |
| 6769 Label::kNear); |
| 6770 |
| 6771 regs_.Restore(masm); |
| 6772 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
| 6773 __ RememberedSetHelper( |
| 6774 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); |
| 6775 } else { |
| 6776 __ ret(0); |
| 6777 } |
| 6778 |
| 6779 __ bind(&object_is_black); |
| 6780 |
| 6781 // Get the value from the slot. |
| 6782 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
| 6783 |
| 6784 if (mode == INCREMENTAL_COMPACTION) { |
| 6785 Label ensure_not_white; |
| 6786 |
| 6787 __ CheckPageFlag(regs_.scratch0(), // Contains value. |
| 6788 regs_.scratch1(), // Scratch. |
| 6789 MemoryChunk::kEvacuationCandidateMask, |
| 6790 zero, |
| 6791 &ensure_not_white, |
| 6792 Label::kNear); |
| 6793 |
| 6794 __ CheckPageFlag(regs_.object(), |
| 6795 regs_.scratch1(), // Scratch. |
| 6796 MemoryChunk::kSkipEvacuationSlotsRecordingMask, |
| 6797 not_zero, |
| 6798 &ensure_not_white, |
| 6799 Label::kNear); |
| 6800 |
| 6801 __ jmp(&need_incremental); |
| 6802 |
| 6803 __ bind(&ensure_not_white); |
| 6804 } |
| 6805 |
| 6806 // We need an extra register for this, so we push the object register |
| 6807 // temporarily. |
| 6808 __ push(regs_.object()); |
| 6809 __ EnsureNotWhite(regs_.scratch0(), // The value. |
| 6810 regs_.scratch1(), // Scratch. |
| 6811 regs_.object(), // Scratch. |
| 6812 &need_incremental_pop_object, |
| 6813 Label::kNear); |
| 6814 __ pop(regs_.object()); |
| 6815 |
| 6816 regs_.Restore(masm); |
| 6817 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
| 6818 __ RememberedSetHelper( |
| 6819 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); |
| 6820 } else { |
| 6821 __ ret(0); |
| 6822 } |
| 6823 |
| 6824 __ bind(&need_incremental_pop_object); |
| 6825 __ pop(regs_.object()); |
| 6826 |
| 6827 __ bind(&need_incremental); |
| 6828 |
| 6829 // Fall through when we need to inform the incremental marker. |
| 6830 } |
| 6831 |
| 6832 |
6541 #undef __ | 6833 #undef __ |
6542 | 6834 |
6543 } } // namespace v8::internal | 6835 } } // namespace v8::internal |
6544 | 6836 |
6545 #endif // V8_TARGET_ARCH_IA32 | 6837 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |