Index: src/ia32/lithium-codegen-ia32.cc |
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc |
index eb31d5aeeb339890dc7eb03f6f7b1f822d703720..ab4b10f18ee642a6f1c6238bf1a46da4c4540122 100644 |
--- a/src/ia32/lithium-codegen-ia32.cc |
+++ b/src/ia32/lithium-codegen-ia32.cc |
@@ -411,17 +411,18 @@ void LCodeGen::AddToTranslation(Translation* translation, |
void LCodeGen::CallCode(Handle<Code> code, |
RelocInfo::Mode mode, |
LInstruction* instr, |
- bool adjusted) { |
+ ContextMode context_mode, |
+ SafepointMode safepoint_mode) { |
ASSERT(instr != NULL); |
LPointerMap* pointers = instr->pointer_map(); |
RecordPosition(pointers->position()); |
- if (!adjusted) { |
+ if (context_mode == RESTORE_CONTEXT) { |
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
} |
__ call(code, mode); |
- RegisterLazyDeoptimization(instr); |
+ RegisterLazyDeoptimization(instr, safepoint_mode); |
// Signal that we don't inline smi code before these stubs in the |
// optimizing code generator. |
@@ -435,13 +436,13 @@ void LCodeGen::CallCode(Handle<Code> code, |
void LCodeGen::CallRuntime(const Runtime::Function* fun, |
int argc, |
LInstruction* instr, |
- bool adjusted) { |
+ ContextMode context_mode) { |
ASSERT(instr != NULL); |
ASSERT(instr->HasPointerMap()); |
LPointerMap* pointers = instr->pointer_map(); |
RecordPosition(pointers->position()); |
- if (!adjusted) { |
+ if (context_mode == RESTORE_CONTEXT) { |
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
} |
__ CallRuntime(fun, argc); |
@@ -450,7 +451,8 @@ void LCodeGen::CallRuntime(const Runtime::Function* fun, |
} |
-void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { |
+void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, |
+ SafepointMode safepoint_mode) { |
// Create the environment to bailout to. If the call has side effects |
// execution has to continue after the call otherwise execution can continue |
// from a previous bailout point repeating the call. |
@@ -462,8 +464,16 @@ void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { |
} |
RegisterEnvironmentForDeoptimization(deoptimization_environment); |
- RecordSafepoint(instr->pointer_map(), |
- deoptimization_environment->deoptimization_index()); |
+ if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { |
+ RecordSafepoint(instr->pointer_map(), |
+ deoptimization_environment->deoptimization_index()); |
+ } else { |
+ ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
+ RecordSafepointWithRegisters( |
+ instr->pointer_map(), |
+ 0, |
+ deoptimization_environment->deoptimization_index()); |
+ } |
} |
@@ -1143,7 +1153,7 @@ void LCodeGen::DoBitNotI(LBitNotI* instr) { |
void LCodeGen::DoThrow(LThrow* instr) { |
__ push(ToOperand(instr->InputAt(0))); |
- CallRuntime(Runtime::kThrow, 1, instr, false); |
+ CallRuntime(Runtime::kThrow, 1, instr, RESTORE_CONTEXT); |
if (FLAG_debug_code) { |
Comment("Unreachable code."); |
@@ -1218,7 +1228,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
ASSERT(ToRegister(instr->result()).is(eax)); |
TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); |
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
} |
@@ -1949,7 +1959,11 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
__ mov(temp, Immediate(delta)); |
__ StoreToSafepointRegisterSlot(temp, temp); |
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(stub.GetCode(), |
+ RelocInfo::CODE_TARGET, |
+ instr, |
+ RESTORE_CONTEXT, |
+ RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
// Put the result value into the eax slot and restore all registers. |
__ StoreToSafepointRegisterSlot(eax, eax); |
__ PopSafepointRegisters(); |
@@ -1980,7 +1994,7 @@ void LCodeGen::DoCmpT(LCmpT* instr) { |
Token::Value op = instr->op(); |
Handle<Code> ic = CompareIC::GetUninitialized(op); |
- CallCode(ic, RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
Condition condition = ComputeCompareCondition(op); |
if (op == Token::GT || op == Token::LTE) { |
@@ -2003,7 +2017,7 @@ void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { |
int false_block = chunk_->LookupDestination(instr->false_block_id()); |
Handle<Code> ic = CompareIC::GetUninitialized(op); |
- CallCode(ic, RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
// The compare stub expects compare condition and the input operands |
// reversed for GT and LTE. |
@@ -2135,7 +2149,7 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { |
ASSERT(instr->hydrogen()->need_generic()); |
__ mov(ecx, name); |
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
- CallCode(ic, RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
} else { |
NearLabel done; |
for (int i = 0; i < map_count - 1; ++i) { |
@@ -2157,7 +2171,7 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { |
__ bind(&generic); |
__ mov(ecx, name); |
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
- CallCode(ic, RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
} else { |
DeoptimizeIf(not_equal, instr->environment()); |
EmitLoadField(result, object, map, name); |
@@ -2779,7 +2793,7 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { |
ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
TranscendentalCacheStub stub(TranscendentalCache::LOG, |
TranscendentalCacheStub::UNTAGGED); |
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
} |
@@ -2787,7 +2801,7 @@ void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { |
ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
TranscendentalCacheStub stub(TranscendentalCache::COS, |
TranscendentalCacheStub::UNTAGGED); |
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
} |
@@ -2795,7 +2809,7 @@ void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { |
ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
TranscendentalCacheStub stub(TranscendentalCache::SIN, |
TranscendentalCacheStub::UNTAGGED); |
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
} |
@@ -2898,7 +2912,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) { |
void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
- CallRuntime(instr->function(), instr->arity(), instr, false); |
+ CallRuntime(instr->function(), instr->arity(), instr, RESTORE_CONTEXT); |
} |
@@ -3768,16 +3782,16 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { |
FastCloneShallowArrayStub::Mode mode = |
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
FastCloneShallowArrayStub stub(mode, length); |
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
} else if (instr->hydrogen()->depth() > 1) { |
- CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false); |
+ CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, RESTORE_CONTEXT); |
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
- CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false); |
+ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, RESTORE_CONTEXT); |
} else { |
FastCloneShallowArrayStub::Mode mode = |
FastCloneShallowArrayStub::CLONE_ELEMENTS; |
FastCloneShallowArrayStub stub(mode, length); |
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
} |
} |
@@ -3834,7 +3848,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
__ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
__ push(Immediate(instr->hydrogen()->pattern())); |
__ push(Immediate(instr->hydrogen()->flags())); |
- CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false); |
+ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, RESTORE_CONTEXT); |
__ mov(ebx, eax); |
__ bind(&materialized); |
@@ -3846,7 +3860,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
__ bind(&runtime_allocate); |
__ push(ebx); |
__ push(Immediate(Smi::FromInt(size))); |
- CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false); |
+ CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, RESTORE_CONTEXT); |
__ pop(ebx); |
__ bind(&allocated); |
@@ -3874,14 +3888,14 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
FastNewClosureStub stub( |
shared_info->strict_mode() ? kStrictMode : kNonStrictMode); |
__ push(Immediate(shared_info)); |
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
} else { |
__ push(Operand(ebp, StandardFrameConstants::kContextOffset)); |
__ push(Immediate(shared_info)); |
__ push(Immediate(pretenure |
? factory()->true_value() |
: factory()->false_value())); |
- CallRuntime(Runtime::kNewClosure, 3, instr, false); |
+ CallRuntime(Runtime::kNewClosure, 3, instr, RESTORE_CONTEXT); |
} |
} |
@@ -3893,7 +3907,7 @@ void LCodeGen::DoTypeof(LTypeof* instr) { |
} else { |
__ push(ToOperand(input)); |
} |
- CallRuntime(Runtime::kTypeof, 1, instr, false); |
+ CallRuntime(Runtime::kTypeof, 1, instr, RESTORE_CONTEXT); |
} |
@@ -4096,7 +4110,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { |
__ j(above_equal, &done); |
StackCheckStub stub; |
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
__ bind(&done); |
} |