Chromium Code Reviews| Index: src/ia32/code-stubs-ia32.cc |
| diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc |
| index 59538db13fa7a1d9c4f092befa768d1d40d1333b..d9a40fd0fd8511a6ff07a48a68ae3edd9ffa79a6 100644 |
| --- a/src/ia32/code-stubs-ia32.cc |
| +++ b/src/ia32/code-stubs-ia32.cc |
| @@ -6235,18 +6235,15 @@ void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
| // we keep the GC informed. The word in the object where the value has been |
| // written is in the address register. |
| void RecordWriteStub::Generate(MacroAssembler* masm) { |
| - Label skip_non_incremental_part; |
| + Label skip_to_incremental_noncompacting; |
| + Label skip_to_incremental_compacting; |
| // The first instruction is generated as a label so as to get the offset |
|
Erik Corry
2011/07/04 11:04:11
first instruction is -> first two instructions are
Vyacheslav Egorov (Chromium)
2011/08/05 12:50:28
Done.
|
| // fixed up correctly by the bind(Label*) call. We patch it back and forth |
| // between a 2-byte compare instruction (a nop in this position) and the real |
| // branch when we start and stop incremental heap marking. |
| - __ jmp(&skip_non_incremental_part, Label::kNear); |
| - if (!masm->isolate()->heap()->incremental_marking()->IsMarking()) { |
| - ASSERT(masm->byte_at(masm->pc_offset() - 2) == |
| - kSkipNonIncrementalPartInstruction); |
| - masm->set_byte_at(masm->pc_offset() - 2, kTwoByteNopInstruction); |
| - } |
| + __ jmp(&skip_to_incremental_noncompacting, Label::kNear); |
| + __ jmp(&skip_to_incremental_compacting, Label::kFar); |
| if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
| __ RememberedSetHelper( |
| @@ -6255,12 +6252,26 @@ void RecordWriteStub::Generate(MacroAssembler* masm) { |
| __ ret(0); |
| } |
| - __ bind(&skip_non_incremental_part); |
| - GenerateIncremental(masm); |
| + __ bind(&skip_to_incremental_noncompacting); |
| + GenerateIncremental(masm, kWithoutEvacuationCandidates); |
| + |
| + __ bind(&skip_to_incremental_compacting); |
| + GenerateIncremental(masm, kWithEvacuationCandidates); |
| + |
| + if (!masm->isolate()->heap()->incremental_marking()->IsMarking()) { |
| + ASSERT(masm->byte_at(0) == kTwoByteJumpInstruction); |
| + masm->set_byte_at(0, kTwoByteNopInstruction); |
| + } |
| + |
| + if (!masm->isolate()->heap()->incremental_marking()->IsMarking()) { |
| + ASSERT(masm->byte_at(2) == kFiveByteJumpInstruction); |
| + masm->set_byte_at(2, kFiveByteNopInstruction); |
| + } |
| } |
| -void RecordWriteStub::GenerateIncremental(MacroAssembler* masm) { |
| +void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, |
| + EvacuationState evacuation_state) { |
| regs_.Save(masm); |
| if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
| @@ -6273,15 +6284,17 @@ void RecordWriteStub::GenerateIncremental(MacroAssembler* masm) { |
| __ CheckPageFlag(regs_.object(), |
| regs_.scratch0(), |
| - MemoryChunk::SCAN_ON_SCAVENGE, |
| + 1 << MemoryChunk::SCAN_ON_SCAVENGE, |
| not_zero, |
| &dont_need_remembered_set); |
| // First notify the incremental marker if necessary, then update the |
| // remembered set. |
| CheckNeedsToInformIncrementalMarker( |
| - masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker); |
| - InformIncrementalMarker(masm); |
| + masm, |
| + kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, |
| + evacuation_state); |
| + InformIncrementalMarker(masm, evacuation_state); |
| regs_.Restore(masm); |
| __ RememberedSetHelper( |
| address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); |
| @@ -6290,36 +6303,62 @@ void RecordWriteStub::GenerateIncremental(MacroAssembler* masm) { |
| } |
| CheckNeedsToInformIncrementalMarker( |
| - masm, kReturnOnNoNeedToInformIncrementalMarker); |
| - InformIncrementalMarker(masm); |
| + masm, |
| + kReturnOnNoNeedToInformIncrementalMarker, |
| + evacuation_state); |
| + InformIncrementalMarker(masm, evacuation_state); |
| regs_.Restore(masm); |
| __ ret(0); |
| } |
| -void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { |
| +void RecordWriteStub::InformIncrementalMarker( |
| + MacroAssembler* masm, |
| + RecordWriteStub::EvacuationState evacuation_state) { |
| regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); |
| int argument_count = 3; |
| __ PrepareCallCFunction(argument_count, regs_.scratch0()); |
| __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); |
| - __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
| - __ mov(Operand(esp, 1 * kPointerSize), regs_.scratch0()); // Value. |
| + switch (evacuation_state) { |
|
Erik Corry
2011/07/04 11:04:11
This switch introduces a lot of boilerplate compar
Vyacheslav Egorov (Chromium)
2011/08/05 12:50:28
Done.
|
| + case kWithEvacuationCandidates: |
| + __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. |
| + break; |
| + case kWithoutEvacuationCandidates: |
| + __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
| + __ mov(Operand(esp, 1 * kPointerSize), regs_.scratch0()); // Value. |
| + break; |
| + default: |
| + UNREACHABLE(); |
| + } |
| __ mov(Operand(esp, 2 * kPointerSize), |
| Immediate(ExternalReference::isolate_address())); |
| // TODO(gc): Create a fast version of this C function that does not duplicate |
| // the checks done in the stub. |
| - __ CallCFunction( |
| - ExternalReference::incremental_marking_record_write_function( |
| - masm->isolate()), |
| - argument_count); |
| + switch (evacuation_state) { |
|
Erik Corry
2011/07/04 11:04:11
And here.
Vyacheslav Egorov (Chromium)
2011/08/05 12:50:28
Done.
|
| + case kWithEvacuationCandidates: |
| + __ CallCFunction( |
| + ExternalReference::incremental_evacuation_record_write_function( |
| + masm->isolate()), |
| + argument_count); |
| + break; |
| + case kWithoutEvacuationCandidates: |
| + __ CallCFunction( |
| + ExternalReference::incremental_marking_record_write_function( |
| + masm->isolate()), |
| + argument_count); |
| + break; |
| + default: |
| + UNREACHABLE(); |
| + } |
| regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); |
| } |
| void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
| MacroAssembler* masm, |
| - RecordWriteStub::OnNoNeedToInformIncrementalMarker on_no_need) { |
| - Label object_is_black, need_incremental; |
| + RecordWriteStub::OnNoNeedToInformIncrementalMarker on_no_need, |
| + RecordWriteStub::EvacuationState evacuation_state) { |
| + Label object_is_black, need_incremental, need_incremental_pop_object; |
| // Let's look at the color of the object: If it is not black we don't have |
| // to inform the incremental marker. |
| @@ -6342,16 +6381,38 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
| // Get the value from the slot. |
| __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
| + if (evacuation_state == kWithEvacuationCandidates) { |
| + Label ensure_not_white; |
| + |
| + __ CheckPageFlag(regs_.scratch0(), // Contains value. |
| + regs_.scratch1(), // Scratch. |
| + MemoryChunk::kEvacuationCandidateMask, |
| + zero, |
| + &ensure_not_white, |
| + Label::kNear); |
| + |
| + __ CheckPageFlag(regs_.object(), |
| + regs_.scratch1(), // Scratch. |
| + MemoryChunk::kEvacuationCandidateOrNewSpaceMask, |
| + not_zero, |
| + &ensure_not_white, |
| + Label::kNear); |
| + |
| + __ jmp(&need_incremental); |
| + |
| + __ bind(&ensure_not_white); |
| + } |
| + |
| // We need an extra register for this, so we push the object register |
| // temporarily. |
| __ push(regs_.object()); |
| __ EnsureNotWhite(regs_.scratch0(), // The value. |
| regs_.scratch1(), // Scratch. |
| regs_.object(), // Scratch. |
| - &need_incremental, |
| + &need_incremental_pop_object, |
| Label::kNear); |
| - |
| __ pop(regs_.object()); |
| + |
| regs_.Restore(masm); |
| if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
| __ RememberedSetHelper( |
| @@ -6360,9 +6421,11 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
| __ ret(0); |
| } |
| - __ bind(&need_incremental); |
| + __ bind(&need_incremental_pop_object); |
| __ pop(regs_.object()); |
| + __ bind(&need_incremental); |
| + |
| // Fall through when we need to inform the incremental marker. |
| } |