Index: src/x64/lithium-codegen-x64.cc |
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc |
index 8a5f09a9b1602a33297c975a1bc51abe1def254b..ad72095cd0cbb6427c838454925903f80094fd3a 100644 |
--- a/src/x64/lithium-codegen-x64.cc |
+++ b/src/x64/lithium-codegen-x64.cc |
@@ -191,10 +191,13 @@ bool LCodeGen::GeneratePrologue() { |
int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
if (heap_slots > 0) { |
Comment(";;; Allocate local context"); |
+ bool need_write_barrier = true; |
// Argument to NewContext is the function, which is still in rdi. |
if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
FastNewContextStub stub(isolate(), heap_slots); |
__ CallStub(&stub); |
+ // Result of FastNewContextStub is always in new space. |
+ need_write_barrier = false; |
} else { |
__ Push(rdi); |
__ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); |
@@ -218,7 +221,14 @@ bool LCodeGen::GeneratePrologue() { |
int context_offset = Context::SlotOffset(var->index()); |
__ movp(Operand(rsi, context_offset), rax); |
// Update the write barrier. This clobbers rax and rbx. |
- __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs); |
+ if (need_write_barrier) { |
+ __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs); |
+ } else if (FLAG_debug_code) { |
+ Label done; |
+ __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear); |
+ __ Abort(kExpectedNewSpaceObject); |
+ __ bind(&done); |
+ } |
} |
} |
Comment(";;; End allocate local context"); |