| Index: src/ia32/code-stubs-ia32.cc | 
| =================================================================== | 
| --- src/ia32/code-stubs-ia32.cc	(revision 9327) | 
| +++ src/ia32/code-stubs-ia32.cc	(working copy) | 
| @@ -338,6 +338,41 @@ | 
| } | 
|  | 
|  | 
| +void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 
| +  // We don't allow a GC during a store buffer overflow so there is no need to | 
| +  // store the registers in any particular way, but we do have to store and | 
| +  // restore them. | 
| +  __ pushad(); | 
| +  if (save_doubles_ == kSaveFPRegs) { | 
| +    CpuFeatures::Scope scope(SSE2); | 
| +    __ sub(Operand(esp), Immediate(kDoubleSize * XMMRegister::kNumRegisters)); | 
| +    for (int i = 0; i < XMMRegister::kNumRegisters; i++) { | 
| +      XMMRegister reg = XMMRegister::from_code(i); | 
| +      __ movdbl(Operand(esp, i * kDoubleSize), reg); | 
| +    } | 
| +  } | 
| +  const int argument_count = 1; | 
| + | 
| +  AllowExternalCallThatCantCauseGC scope(masm); | 
| +  __ PrepareCallCFunction(argument_count, ecx); | 
| +  __ mov(Operand(esp, 0 * kPointerSize), | 
| +         Immediate(ExternalReference::isolate_address())); | 
| +  __ CallCFunction( | 
| +      ExternalReference::store_buffer_overflow_function(masm->isolate()), | 
| +      argument_count); | 
| +  if (save_doubles_ == kSaveFPRegs) { | 
| +    CpuFeatures::Scope scope(SSE2); | 
| +    for (int i = 0; i < XMMRegister::kNumRegisters; i++) { | 
| +      XMMRegister reg = XMMRegister::from_code(i); | 
| +      __ movdbl(reg, Operand(esp, i * kDoubleSize)); | 
| +    } | 
| +    __ add(Operand(esp), Immediate(kDoubleSize * XMMRegister::kNumRegisters)); | 
| +  } | 
| +  __ popad(); | 
| +  __ ret(0); | 
| +} | 
| + | 
| + | 
| void ToBooleanStub::CheckOddball(MacroAssembler* masm, | 
| Type type, | 
| Heap::RootListIndex value, | 
| @@ -3611,12 +3646,18 @@ | 
| // Store last subject and last input. | 
| __ mov(eax, Operand(esp, kSubjectOffset)); | 
| __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax); | 
| -  __ mov(ecx, ebx); | 
| -  __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi); | 
| +  __ RecordWriteField(ebx, | 
| +                      RegExpImpl::kLastSubjectOffset, | 
| +                      eax, | 
| +                      edi, | 
| +                      kDontSaveFPRegs); | 
| __ mov(eax, Operand(esp, kSubjectOffset)); | 
| __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax); | 
| -  __ mov(ecx, ebx); | 
| -  __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi); | 
| +  __ RecordWriteField(ebx, | 
| +                      RegExpImpl::kLastInputOffset, | 
| +                      eax, | 
| +                      edi, | 
| +                      kDontSaveFPRegs); | 
|  | 
| // Get the static offsets vector filled by the native regexp code. | 
| ExternalReference address_of_static_offsets_vector = | 
| @@ -4303,8 +4344,7 @@ | 
|  | 
|  | 
| void CodeStub::GenerateFPStubs() { | 
| -  CEntryStub save_doubles(1); | 
| -  save_doubles.SaveDoubles(); | 
| +  CEntryStub save_doubles(1, kSaveFPRegs); | 
| Handle<Code> code = save_doubles.GetCode(); | 
| code->GetIsolate()->set_fp_stubs_generated(true); | 
| } | 
| @@ -4400,7 +4440,7 @@ | 
| } | 
|  | 
| // Exit the JavaScript to C++ exit frame. | 
| -  __ LeaveExitFrame(save_doubles_); | 
| +  __ LeaveExitFrame(save_doubles_ == kSaveFPRegs); | 
| __ ret(0); | 
|  | 
| // Handling of failure. | 
| @@ -4455,7 +4495,7 @@ | 
| // a garbage collection and retrying the builtin (twice). | 
|  | 
| // Enter the exit frame that transitions from JavaScript to C++. | 
| -  __ EnterExitFrame(save_doubles_); | 
| +  __ EnterExitFrame(save_doubles_ == kSaveFPRegs); | 
|  | 
| // eax: result parameter for PerformGC, if any (setup below) | 
| // ebx: pointer to builtin function  (C callee-saved) | 
| @@ -6538,6 +6578,258 @@ | 
| } | 
|  | 
|  | 
| +struct AheadOfTimeWriteBarrierStubList { | 
| +  Register object, value, address; | 
| +  RememberedSetAction action; | 
| +}; | 
| + | 
| + | 
| +struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { | 
| +  // Used in RegExpExecStub. | 
| +  { ebx, eax, edi, EMIT_REMEMBERED_SET }, | 
| +  // Used in CompileArrayPushCall. | 
| +  { ebx, ecx, edx, EMIT_REMEMBERED_SET }, | 
| +  // Used in CompileStoreGlobal. | 
| +  { ebx, ecx, edx, OMIT_REMEMBERED_SET }, | 
| +  // Used in StoreStubCompiler::CompileStoreField and | 
| +  // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField. | 
| +  { edx, ecx, ebx, EMIT_REMEMBERED_SET }, | 
| +  // GenerateStoreField calls the stub with two different permutations of | 
| +  // registers.  This is the second. | 
| +  { ebx, ecx, edx, EMIT_REMEMBERED_SET }, | 
| +  // StoreIC::GenerateNormal via GenerateDictionaryStore. | 
| +  { ebx, edi, edx, EMIT_REMEMBERED_SET }, | 
| +  // KeyedStoreIC::GenerateGeneric. | 
| +  { ebx, edx, ecx, EMIT_REMEMBERED_SET}, | 
| +  // KeyedStoreStubCompiler::GenerateStoreFastElement. | 
| +  { edi, edx, ecx, EMIT_REMEMBERED_SET}, | 
| +  // Null termination. | 
| +  { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} | 
| +}; | 
| + | 
| + | 
| +bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() { | 
| +  for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | 
| +       !entry->object.is(no_reg); | 
| +       entry++) { | 
| +    if (object_.is(entry->object) && | 
| +        value_.is(entry->value) && | 
| +        address_.is(entry->address) && | 
| +        remembered_set_action_ == entry->action && | 
| +        save_fp_regs_mode_ == kDontSaveFPRegs) { | 
| +      return true; | 
| +    } | 
| +  } | 
| +  return false; | 
| +} | 
| + | 
| + | 
| +void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() { | 
| +  StoreBufferOverflowStub stub1(kDontSaveFPRegs); | 
| +  stub1.GetCode(); | 
| +  StoreBufferOverflowStub stub2(kSaveFPRegs); | 
| +  stub2.GetCode(); | 
| +} | 
| + | 
| + | 
| +void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() { | 
| +  for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | 
| +       !entry->object.is(no_reg); | 
| +       entry++) { | 
| +    RecordWriteStub stub(entry->object, | 
| +                         entry->value, | 
| +                         entry->address, | 
| +                         entry->action, | 
| +                         kDontSaveFPRegs); | 
| +    stub.GetCode(); | 
| +  } | 
| +} | 
| + | 
| + | 
| +// Takes the input in 3 registers: address_ value_ and object_.  A pointer to | 
| +// the value has just been written into the object, now this stub makes sure | 
| +// we keep the GC informed.  The word in the object where the value has been | 
| +// written is in the address register. | 
| +void RecordWriteStub::Generate(MacroAssembler* masm) { | 
| +  Label skip_to_incremental_noncompacting; | 
| +  Label skip_to_incremental_compacting; | 
| + | 
| +  // The first two instructions are generated with labels so as to get the | 
| +  // offset fixed up correctly by the bind(Label*) call.  We patch it back and | 
| +  // forth between a compare instructions (a nop in this position) and the | 
| +  // real branch when we start and stop incremental heap marking. | 
| +  __ jmp(&skip_to_incremental_noncompacting, Label::kNear); | 
| +  __ jmp(&skip_to_incremental_compacting, Label::kFar); | 
| + | 
| +  if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | 
| +    __ RememberedSetHelper( | 
| +        address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); | 
| +  } else { | 
| +    __ ret(0); | 
| +  } | 
| + | 
| +  __ bind(&skip_to_incremental_noncompacting); | 
| +  GenerateIncremental(masm, INCREMENTAL); | 
| + | 
| +  __ bind(&skip_to_incremental_compacting); | 
| +  GenerateIncremental(masm, INCREMENTAL_COMPACTION); | 
| + | 
| +  // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. | 
| +  // Will be checked in IncrementalMarking::ActivateGeneratedStub. | 
| +  masm->set_byte_at(0, kTwoByteNopInstruction); | 
| +  masm->set_byte_at(2, kFiveByteNopInstruction); | 
| +} | 
| + | 
| + | 
| +void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { | 
| +  regs_.Save(masm); | 
| + | 
| +  if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | 
| +    Label dont_need_remembered_set; | 
| + | 
| +    __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); | 
| +    __ JumpIfNotInNewSpace(regs_.scratch0(), | 
| +                           regs_.scratch0(), | 
| +                           &dont_need_remembered_set); | 
| + | 
| +    __ CheckPageFlag(regs_.object(), | 
| +                     regs_.scratch0(), | 
| +                     1 << MemoryChunk::SCAN_ON_SCAVENGE, | 
| +                     not_zero, | 
| +                     &dont_need_remembered_set); | 
| + | 
| +    // First notify the incremental marker if necessary, then update the | 
| +    // remembered set. | 
| +    CheckNeedsToInformIncrementalMarker( | 
| +        masm, | 
| +        kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, | 
| +        mode); | 
| +    InformIncrementalMarker(masm, mode); | 
| +    regs_.Restore(masm); | 
| +    __ RememberedSetHelper( | 
| +        address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); | 
| + | 
| +    __ bind(&dont_need_remembered_set); | 
| +  } | 
| + | 
| +  CheckNeedsToInformIncrementalMarker( | 
| +      masm, | 
| +      kReturnOnNoNeedToInformIncrementalMarker, | 
| +      mode); | 
| +  InformIncrementalMarker(masm, mode); | 
| +  regs_.Restore(masm); | 
| +  __ ret(0); | 
| +} | 
| + | 
| + | 
| +void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) { | 
| +  regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); | 
| +  int argument_count = 3; | 
| +  __ PrepareCallCFunction(argument_count, regs_.scratch0()); | 
| +  __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); | 
| +  if (mode == INCREMENTAL_COMPACTION) { | 
| +    __ mov(Operand(esp, 1 * kPointerSize), regs_.address());  // Slot. | 
| +  } else { | 
| +    ASSERT(mode == INCREMENTAL); | 
| +    __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); | 
| +    __ mov(Operand(esp, 1 * kPointerSize), regs_.scratch0());  // Value. | 
| +  } | 
| +  __ mov(Operand(esp, 2 * kPointerSize), | 
| +         Immediate(ExternalReference::isolate_address())); | 
| + | 
| +  AllowExternalCallThatCantCauseGC scope(masm); | 
| +  if (mode == INCREMENTAL_COMPACTION) { | 
| +    __ CallCFunction( | 
| +        ExternalReference::incremental_evacuation_record_write_function( | 
| +            masm->isolate()), | 
| +        argument_count); | 
| +  } else { | 
| +    ASSERT(mode == INCREMENTAL); | 
| +    __ CallCFunction( | 
| +        ExternalReference::incremental_marking_record_write_function( | 
| +            masm->isolate()), | 
| +        argument_count); | 
| +  } | 
| +  regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); | 
| +} | 
| + | 
| + | 
| +void RecordWriteStub::CheckNeedsToInformIncrementalMarker( | 
| +    MacroAssembler* masm, | 
| +    OnNoNeedToInformIncrementalMarker on_no_need, | 
| +    Mode mode) { | 
| +  Label object_is_black, need_incremental, need_incremental_pop_object; | 
| + | 
| +  // Let's look at the color of the object:  If it is not black we don't have | 
| +  // to inform the incremental marker. | 
| +  __ JumpIfBlack(regs_.object(), | 
| +                 regs_.scratch0(), | 
| +                 regs_.scratch1(), | 
| +                 &object_is_black, | 
| +                 Label::kNear); | 
| + | 
| +  regs_.Restore(masm); | 
| +  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { | 
| +    __ RememberedSetHelper( | 
| +        address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); | 
| +  } else { | 
| +    __ ret(0); | 
| +  } | 
| + | 
| +  __ bind(&object_is_black); | 
| + | 
| +  // Get the value from the slot. | 
| +  __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); | 
| + | 
| +  if (mode == INCREMENTAL_COMPACTION) { | 
| +    Label ensure_not_white; | 
| + | 
| +    __ CheckPageFlag(regs_.scratch0(),  // Contains value. | 
| +                     regs_.scratch1(),  // Scratch. | 
| +                     MemoryChunk::kEvacuationCandidateMask, | 
| +                     zero, | 
| +                     &ensure_not_white, | 
| +                     Label::kNear); | 
| + | 
| +    __ CheckPageFlag(regs_.object(), | 
| +                     regs_.scratch1(),  // Scratch. | 
| +                     MemoryChunk::kSkipEvacuationSlotsRecordingMask, | 
| +                     not_zero, | 
| +                     &ensure_not_white, | 
| +                     Label::kNear); | 
| + | 
| +    __ jmp(&need_incremental); | 
| + | 
| +    __ bind(&ensure_not_white); | 
| +  } | 
| + | 
| +  // We need an extra register for this, so we push the object register | 
| +  // temporarily. | 
| +  __ push(regs_.object()); | 
| +  __ EnsureNotWhite(regs_.scratch0(),  // The value. | 
| +                    regs_.scratch1(),  // Scratch. | 
| +                    regs_.object(),  // Scratch. | 
| +                    &need_incremental_pop_object, | 
| +                    Label::kNear); | 
| +  __ pop(regs_.object()); | 
| + | 
| +  regs_.Restore(masm); | 
| +  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { | 
| +    __ RememberedSetHelper( | 
| +        address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); | 
| +  } else { | 
| +    __ ret(0); | 
| +  } | 
| + | 
| +  __ bind(&need_incremental_pop_object); | 
| +  __ pop(regs_.object()); | 
| + | 
| +  __ bind(&need_incremental); | 
| + | 
| +  // Fall through when we need to inform the incremental marker. | 
| +} | 
| + | 
| + | 
| #undef __ | 
|  | 
| } }  // namespace v8::internal | 
|  |