Index: src/x64/full-codegen-x64.cc |
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc |
index 66bc4ede96c85bce4ba491cc10f5ee88b71b3aef..724a7c598ac6fbfb520cb81f8397b9e0fd729fcf 100644 |
--- a/src/x64/full-codegen-x64.cc |
+++ b/src/x64/full-codegen-x64.cc |
@@ -1,4 +1,4 @@ |
-// Copyright 2010 the V8 project authors. All rights reserved. |
+// Copyright 2011 the V8 project authors. All rights reserved. |
// Redistribution and use in source and binary forms, with or without |
// modification, are permitted provided that the following conditions are |
// met: |
@@ -210,10 +210,17 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { |
__ j(above_equal, &ok); |
StackCheckStub stub; |
__ CallStub(&stub); |
+ // Record a mapping of this PC offset to the OSR id. This is used to find |
+ // the AST id from the unoptimized code in order to use it as a key into |
+ // the deoptimization input data found in the optimized code. |
+ RecordStackCheck(stmt->OsrEntryId()); |
+ |
__ bind(&ok); |
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
+ // Record a mapping of the OSR id to this PC. This is used if the OSR |
+ // entry becomes the target of a bailout. We don't expect it to be, but |
+ // we want it to work if it is. |
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); |
- RecordStackCheck(stmt->OsrEntryId()); |
} |
@@ -459,7 +466,10 @@ void FullCodeGenerator::StackValueContext::Plug(bool flag) const { |
void FullCodeGenerator::TestContext::Plug(bool flag) const { |
- codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); |
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, |
+ true, |
+ true_label_, |
+ false_label_); |
if (flag) { |
if (true_label_ != fall_through_) __ jmp(true_label_); |
} else { |
@@ -555,6 +565,25 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, |
bool should_normalize, |
Label* if_true, |
Label* if_false) { |
+ // Only prepare for bailouts before splits if we're in a test |
+ // context. Otherwise, we let the Visit function deal with the |
+ // preparation to avoid preparing with the same AST id twice. |
+ if (!context()->IsTest() || !info_->IsOptimizable()) return; |
+ |
+ NearLabel skip; |
+ if (should_normalize) __ jmp(&skip); |
+ |
+ ForwardBailoutStack* current = forward_bailout_stack_; |
+ while (current != NULL) { |
+ PrepareForBailout(current->expr(), state); |
+ current = current->parent(); |
+ } |
+ |
+ if (should_normalize) { |
+ __ CompareRoot(rax, Heap::kTrueValueRootIndex); |
+ Split(equal, if_true, if_false, NULL); |
+ __ bind(&skip); |
+ } |
} |
@@ -669,8 +698,10 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { |
Comment cmnt(masm_, "[ SwitchStatement"); |
Breakable nested_statement(this, stmt); |
SetStatementPosition(stmt); |
+ |
// Keep the switch value on the stack until a case matches. |
VisitForStackValue(stmt->tag()); |
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
ZoneList<CaseClause*>* clauses = stmt->cases(); |
CaseClause* default_clause = NULL; // Can occur anywhere in the list. |
@@ -735,6 +766,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { |
} |
__ bind(nested_statement.break_target()); |
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); |
} |
@@ -1224,6 +1256,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { |
if (property->emit_store()) { |
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); |
EmitCallIC(ic, RelocInfo::CODE_TARGET); |
+ PrepareForBailoutForId(key->id(), NO_REGISTERS); |
} |
break; |
} |
@@ -1311,6 +1344,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { |
// Update the write barrier for the array store. |
__ RecordWrite(rbx, offset, result_register(), rcx); |
+ |
+ PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); |
} |
if (result_saved) { |
@@ -1355,17 +1390,34 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) { |
VisitForStackValue(property->obj()); |
} |
break; |
- case KEYED_PROPERTY: |
+ case KEYED_PROPERTY: { |
if (expr->is_compound()) { |
- VisitForStackValue(property->obj()); |
- VisitForAccumulatorValue(property->key()); |
+ if (property->is_arguments_access()) { |
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); |
+ MemOperand slot_operand = |
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx); |
+ __ push(slot_operand); |
+ __ Move(rax, property->key()->AsLiteral()->handle()); |
+ } else { |
+ VisitForStackValue(property->obj()); |
+ VisitForAccumulatorValue(property->key()); |
+ } |
__ movq(rdx, Operand(rsp, 0)); |
__ push(rax); |
} else { |
- VisitForStackValue(property->obj()); |
- VisitForStackValue(property->key()); |
+ if (property->is_arguments_access()) { |
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); |
+ MemOperand slot_operand = |
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx); |
+ __ push(slot_operand); |
+ __ Push(property->key()->AsLiteral()->handle()); |
+ } else { |
+ VisitForStackValue(property->obj()); |
+ VisitForStackValue(property->key()); |
+ } |
} |
break; |
+ } |
} |
if (expr->is_compound()) { |
@@ -1383,6 +1435,12 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) { |
} |
} |
+ // For property compound assignments we need another deoptimization |
+ // point after the property load. |
+ if (property != NULL) { |
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG); |
+ } |
+ |
Token::Value op = expr->binary_op(); |
ConstantOperand constant = ShouldInlineSmiCase(op) |
? GetConstantOperand(op, expr->target(), expr->value()) |
@@ -1408,6 +1466,8 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) { |
} else { |
EmitBinaryOp(op, mode); |
} |
+ // Deoptimization point in case the binary operation may have side effects. |
+ PrepareForBailout(expr->binary_operation(), TOS_REG); |
} else { |
VisitForAccumulatorValue(expr->value()); |
} |
@@ -1420,6 +1480,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) { |
case VARIABLE: |
EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), |
expr->op()); |
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
context()->Plug(rax); |
break; |
case NAMED_PROPERTY: |
@@ -1529,7 +1590,7 @@ void FullCodeGenerator::EmitBinaryOp(Token::Value op, |
} |
-void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_id) { |
+void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) { |
// Invalid left-hand sides are rewritten to have a 'throw |
// ReferenceError' on the left-hand side. |
if (!expr->IsValidLeftHandSide()) { |
@@ -1577,6 +1638,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_id) { |
break; |
} |
} |
+ PrepareForBailoutForId(bailout_ast_id, TOS_REG); |
context()->Plug(rax); |
} |
@@ -1688,6 +1750,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { |
__ pop(rax); |
__ Drop(1); |
} |
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
context()->Plug(rax); |
} |
@@ -1726,6 +1789,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { |
__ pop(rax); |
} |
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
context()->Plug(rax); |
} |
@@ -1766,6 +1830,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr, |
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop); |
EmitCallIC(ic, mode); |
+ RecordJSReturnSite(expr); |
// Restore context register. |
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
context()->Plug(rax); |
@@ -1799,6 +1864,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, |
Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop); |
__ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key. |
EmitCallIC(ic, mode); |
+ RecordJSReturnSite(expr); |
// Restore context register. |
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
context()->DropAndPlug(1, rax); // Drop the key still on the stack. |
@@ -1819,6 +1885,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { |
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); |
__ CallStub(&stub); |
+ RecordJSReturnSite(expr); |
// Restore context register. |
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
// Discard the function left on TOS. |
@@ -1827,6 +1894,12 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { |
void FullCodeGenerator::VisitCall(Call* expr) { |
+#ifdef DEBUG |
+ // We want to verify that RecordJSReturnSite gets called on all paths |
+ // through this function. Avoid early returns. |
+ expr->return_is_recorded_ = false; |
+#endif |
+ |
Comment cmnt(masm_, "[ Call"); |
Expression* fun = expr->expression(); |
Variable* var = fun->AsVariableProxy()->AsVariable(); |
@@ -1834,7 +1907,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { |
if (var != NULL && var->is_possibly_eval()) { |
// In a call to eval, we first call %ResolvePossiblyDirectEval to |
// resolve the function we need to call and the receiver of the |
- // call. The we call the resolved function using the given |
+ // call. Then we call the resolved function using the given |
// arguments. |
ZoneList<Expression*>* args = expr->arguments(); |
int arg_count = args->length(); |
@@ -1871,6 +1944,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { |
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); |
__ CallStub(&stub); |
+ RecordJSReturnSite(expr); |
// Restore context register. |
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
context()->DropAndPlug(1, rax); |
@@ -1893,32 +1967,31 @@ void FullCodeGenerator::VisitCall(Call* expr) { |
&done); |
__ bind(&slow); |
- // Call the runtime to find the function to call (returned in rax) |
- // and the object holding it (returned in rdx). |
- __ push(context_register()); |
- __ Push(var->name()); |
- __ CallRuntime(Runtime::kLoadContextSlot, 2); |
- __ push(rax); // Function. |
- __ push(rdx); // Receiver. |
- |
- // If fast case code has been generated, emit code to push the |
- // function and receiver and have the slow path jump around this |
- // code. |
- if (done.is_linked()) { |
- NearLabel call; |
- __ jmp(&call); |
- __ bind(&done); |
- // Push function. |
- __ push(rax); |
- // Push global receiver. |
+ } |
+ // Call the runtime to find the function to call (returned in rax) |
+ // and the object holding it (returned in rdx). |
+ __ push(context_register()); |
+ __ Push(var->name()); |
+ __ CallRuntime(Runtime::kLoadContextSlot, 2); |
+ __ push(rax); // Function. |
+ __ push(rdx); // Receiver. |
+ |
+ // If fast case code has been generated, emit code to push the |
+ // function and receiver and have the slow path jump around this |
+ // code. |
+ if (done.is_linked()) { |
+ NearLabel call; |
+ __ jmp(&call); |
+ __ bind(&done); |
+ // Push function. |
+ __ push(rax); |
+ // Push global receiver. |
__ movq(rbx, GlobalObjectOperand()); |
__ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); |
__ bind(&call); |
- } |
} |
EmitCallWithStub(expr); |
- |
} else if (fun->AsProperty() != NULL) { |
// Call to an object property. |
Property* prop = fun->AsProperty(); |
@@ -1932,24 +2005,23 @@ void FullCodeGenerator::VisitCall(Call* expr) { |
} else { |
// Call to a keyed property. |
// For a synthetic property use keyed load IC followed by function call, |
- // for a regular property use KeyedCallIC. |
+ // for a regular property use keyed EmitCallIC. |
{ PreservePositionScope scope(masm()->positions_recorder()); |
VisitForStackValue(prop->obj()); |
} |
if (prop->is_synthetic()) { |
{ PreservePositionScope scope(masm()->positions_recorder()); |
VisitForAccumulatorValue(prop->key()); |
- __ movq(rdx, Operand(rsp, 0)); |
} |
// Record source code position for IC call. |
SetSourcePosition(prop->position()); |
+ __ pop(rdx); // We do not need to keep the receiver. |
+ |
Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
EmitCallIC(ic, RelocInfo::CODE_TARGET); |
- // Pop receiver. |
- __ pop(rbx); |
// Push result (function). |
__ push(rax); |
- // Push receiver object on stack. |
+ // Push Global receiver. |
__ movq(rcx, GlobalObjectOperand()); |
__ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); |
EmitCallWithStub(expr); |
@@ -1960,7 +2032,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { |
} else { |
// Call to some other expression. If the expression is an anonymous |
// function literal not called in a loop, mark it as one that should |
- // also use the fast code generator. |
+ // also use the full code generator. |
FunctionLiteral* lit = fun->AsFunctionLiteral(); |
if (lit != NULL && |
lit->name()->Equals(Heap::empty_string()) && |
@@ -1976,6 +2048,11 @@ void FullCodeGenerator::VisitCall(Call* expr) { |
// Emit function call. |
EmitCallWithStub(expr); |
} |
+ |
+#ifdef DEBUG |
+ // RecordJSReturnSite should have been called. |
+ ASSERT(expr->return_is_recorded_); |
+#endif |
} |
@@ -2023,6 +2100,7 @@ void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) { |
context()->PrepareTest(&materialize_true, &materialize_false, |
&if_true, &if_false, &fall_through); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
__ JumpIfSmi(rax, if_true); |
__ jmp(if_false); |
@@ -2042,6 +2120,7 @@ void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) { |
context()->PrepareTest(&materialize_true, &materialize_false, |
&if_true, &if_false, &fall_through); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax); |
Split(non_negative_smi, if_true, if_false, fall_through); |
@@ -2073,6 +2152,7 @@ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) { |
__ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE)); |
__ j(below, if_false); |
__ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE)); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
Split(below_equal, if_true, if_false, fall_through); |
context()->Plug(if_true, if_false); |
@@ -2093,6 +2173,7 @@ void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) { |
__ JumpIfSmi(rax, if_false); |
__ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
Split(above_equal, if_true, if_false, fall_through); |
context()->Plug(if_true, if_false); |
@@ -2115,6 +2196,7 @@ void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) { |
__ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
__ testb(FieldOperand(rbx, Map::kBitFieldOffset), |
Immediate(1 << Map::kIsUndetectable)); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
Split(not_zero, if_true, if_false, fall_through); |
context()->Plug(if_true, if_false); |
@@ -2137,6 +2219,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( |
// Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only |
// used in a few functions in runtime.js which should not normally be hit by |
// this compiler. |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
__ jmp(if_false); |
context()->Plug(if_true, if_false); |
} |
@@ -2156,6 +2239,7 @@ void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) { |
__ JumpIfSmi(rax, if_false); |
__ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
Split(equal, if_true, if_false, fall_through); |
context()->Plug(if_true, if_false); |
@@ -2176,6 +2260,7 @@ void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) { |
__ JumpIfSmi(rax, if_false); |
__ CmpObjectType(rax, JS_ARRAY_TYPE, rbx); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
Split(equal, if_true, if_false, fall_through); |
context()->Plug(if_true, if_false); |
@@ -2196,6 +2281,7 @@ void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) { |
__ JumpIfSmi(rax, if_false); |
__ CmpObjectType(rax, JS_REGEXP_TYPE, rbx); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
Split(equal, if_true, if_false, fall_through); |
context()->Plug(if_true, if_false); |
@@ -2227,6 +2313,7 @@ void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) { |
__ bind(&check_frame_marker); |
__ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset), |
Smi::FromInt(StackFrame::CONSTRUCT)); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
Split(equal, if_true, if_false, fall_through); |
context()->Plug(if_true, if_false); |
@@ -2249,6 +2336,7 @@ void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) { |
__ pop(rbx); |
__ cmpq(rax, rbx); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
Split(equal, if_true, if_false, fall_through); |
context()->Plug(if_true, if_false); |
@@ -2822,6 +2910,7 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) { |
__ testl(FieldOperand(rax, String::kHashFieldOffset), |
Immediate(String::kContainsCachedArrayIndexMask)); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
__ j(zero, if_true); |
__ jmp(if_false); |
@@ -2943,6 +3032,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
// Notice that the labels are swapped. |
context()->PrepareTest(&materialize_true, &materialize_false, |
&if_false, &if_true, &fall_through); |
+ if (context()->IsTest()) ForwardBailoutToChild(expr); |
VisitForControl(expr->expression(), if_true, if_false, fall_through); |
context()->Plug(if_false, if_true); // Labels swapped. |
break; |
@@ -3056,14 +3146,26 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { |
__ push(rax); // Copy of receiver, needed for later store. |
EmitNamedPropertyLoad(prop); |
} else { |
- VisitForStackValue(prop->obj()); |
- VisitForAccumulatorValue(prop->key()); |
+ if (prop->is_arguments_access()) { |
+ VariableProxy* obj_proxy = prop->obj()->AsVariableProxy(); |
+ MemOperand slot_operand = |
+ EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx); |
+ __ push(slot_operand); |
+ __ Move(rax, prop->key()->AsLiteral()->handle()); |
+ } else { |
+ VisitForStackValue(prop->obj()); |
+ VisitForAccumulatorValue(prop->key()); |
+ } |
__ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack |
__ push(rax); // Copy of key, needed for later store. |
EmitKeyedPropertyLoad(prop); |
} |
} |
+ // We need a second deoptimization point after loading the value |
+ // in case evaluating the property load my have a side effect. |
+ PrepareForBailout(expr->increment(), TOS_REG); |
+ |
// Call ToNumber only if operand is not a smi. |
NearLabel no_conversion; |
Condition is_smi; |
@@ -3133,6 +3235,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { |
{ EffectContext context(this); |
EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
Token::ASSIGN); |
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
context.Plug(rax); |
} |
// For all contexts except kEffect: We have the result on |
@@ -3144,6 +3247,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { |
// Perform the assignment as if via '='. |
EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
Token::ASSIGN); |
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
context()->Plug(rax); |
} |
break; |
@@ -3152,6 +3256,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { |
__ pop(rdx); |
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); |
EmitCallIC(ic, RelocInfo::CODE_TARGET); |
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
if (expr->is_postfix()) { |
if (!context()->IsEffect()) { |
context()->PlugTOS(); |
@@ -3166,6 +3271,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { |
__ pop(rdx); |
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); |
EmitCallIC(ic, RelocInfo::CODE_TARGET); |
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
if (expr->is_postfix()) { |
if (!context()->IsEffect()) { |
context()->PlugTOS(); |
@@ -3192,6 +3298,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { |
// Use a regular load, not a contextual load, to avoid a reference |
// error. |
EmitCallIC(ic, RelocInfo::CODE_TARGET); |
+ PrepareForBailout(expr, TOS_REG); |
context()->Plug(rax); |
} else if (proxy != NULL && |
proxy->var()->AsSlot() != NULL && |
@@ -3207,12 +3314,13 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { |
__ push(rsi); |
__ Push(proxy->name()); |
__ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
+ PrepareForBailout(expr, TOS_REG); |
__ bind(&done); |
context()->Plug(rax); |
} else { |
// This expression cannot throw a reference error at the top level. |
- Visit(expr); |
+ context()->HandleExpression(expr); |
} |
} |
@@ -3237,6 +3345,7 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op, |
{ AccumulatorValueContext context(this); |
VisitForTypeofValue(left_unary->expression()); |
} |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
if (check->Equals(Heap::number_symbol())) { |
Condition is_smi = masm_->CheckSmi(rax); |
@@ -3330,6 +3439,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
case Token::IN: |
VisitForStackValue(expr->right()); |
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); |
+ PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); |
__ CompareRoot(rax, Heap::kTrueValueRootIndex); |
Split(equal, if_true, if_false, fall_through); |
break; |
@@ -3338,6 +3448,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
VisitForStackValue(expr->right()); |
InstanceofStub stub(InstanceofStub::kNoFlags); |
__ CallStub(&stub); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
__ testq(rax, rax); |
// The stub returns 0 for true. |
Split(zero, if_true, if_false, fall_through); |
@@ -3396,6 +3507,8 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
: NO_COMPARE_FLAGS; |
CompareStub stub(cc, strict, flags); |
__ CallStub(&stub); |
+ |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
__ testq(rax, rax); |
Split(cc, if_true, if_false, fall_through); |
} |
@@ -3417,6 +3530,7 @@ void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { |
&if_true, &if_false, &fall_through); |
VisitForAccumulatorValue(expr->expression()); |
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
__ CompareRoot(rax, Heap::kNullValueRootIndex); |
if (expr->is_strict()) { |
Split(equal, if_true, if_false, fall_through); |