Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(310)

Unified Diff: src/arm/full-codegen-arm.cc

Issue 6529055: [Isolates] Merge crankshaft (r5922 from bleeding_edge). (Closed)
Patch Set: Win32 port Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/arm/frames-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/arm/full-codegen-arm.cc
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 4842cf9ff0c5c1d8169069c40ac5f08395928fa3..c97c5a4679978d3a905925074bb0c1bd3478b441 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2009 the V8 project authors. All rights reserved.
+// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -171,21 +171,20 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
}
}
+ if (FLAG_trace) {
+ __ CallRuntime(Runtime::kTraceEnter, 0);
+ }
+
// Check the stack for overflow or break request.
{ Comment cmnt(masm_, "[ Stack check");
- __ LoadRoot(r2, Heap::kStackLimitRootIndex);
- __ cmp(sp, Operand(r2));
+ PrepareForBailout(info->function(), NO_REGISTERS);
+ Label ok;
+ __ LoadRoot(ip, Heap::kStackLimitRootIndex);
+ __ cmp(sp, Operand(ip));
+ __ b(hs, &ok);
StackCheckStub stub;
- __ mov(ip,
- Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
- RelocInfo::CODE_TARGET),
- LeaveCC,
- lo);
- __ Call(ip, lo);
- }
-
- if (FLAG_trace) {
- __ CallRuntime(Runtime::kTraceEnter, 0);
+ __ CallStub(&stub);
+ __ bind(&ok);
}
{ Comment cmnt(masm_, "[ Body");
@@ -200,6 +199,25 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
}
EmitReturnSequence();
+
+ // Force emit the constant pool, so it doesn't get emitted in the middle
+ // of the stack check table.
+ masm()->CheckConstPool(true, false);
+}
+
+
+void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
+ Comment cmnt(masm_, "[ Stack check");
+ Label ok;
+ __ LoadRoot(ip, Heap::kStackLimitRootIndex);
+ __ cmp(sp, Operand(ip));
+ __ b(hs, &ok);
+ StackCheckStub stub;
+ __ CallStub(&stub);
+ __ bind(&ok);
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
+ PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
+ RecordStackCheck(stmt->OsrEntryId());
}
@@ -275,6 +293,7 @@ void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
// For simplicity we always test the accumulator register.
codegen()->Move(result_register(), slot);
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
}
@@ -297,12 +316,16 @@ void FullCodeGenerator::StackValueContext::Plug(
void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
+ true,
+ true_label_,
+ false_label_);
if (index == Heap::kUndefinedValueRootIndex ||
index == Heap::kNullValueRootIndex ||
index == Heap::kFalseValueRootIndex) {
- __ b(false_label_);
+ if (false_label_ != fall_through_) __ b(false_label_);
} else if (index == Heap::kTrueValueRootIndex) {
- __ b(true_label_);
+ if (true_label_ != fall_through_) __ b(true_label_);
} else {
__ LoadRoot(result_register(), index);
codegen()->DoTest(true_label_, false_label_, fall_through_);
@@ -321,29 +344,34 @@ void FullCodeGenerator::AccumulatorValueContext::Plug(
void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
- // Immediates can be pushed directly.
+ // Immediates cannot be pushed directly.
__ mov(result_register(), Operand(lit));
__ push(result_register());
}
void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
+ true,
+ true_label_,
+ false_label_);
ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
- __ b(false_label_);
+ if (false_label_ != fall_through_) __ b(false_label_);
} else if (lit->IsTrue() || lit->IsJSObject()) {
- __ b(true_label_);
+ if (true_label_ != fall_through_) __ b(true_label_);
} else if (lit->IsString()) {
if (String::cast(*lit)->length() == 0) {
+ if (false_label_ != fall_through_) __ b(false_label_);
__ b(false_label_);
} else {
- __ b(true_label_);
+ if (true_label_ != fall_through_) __ b(true_label_);
}
} else if (lit->IsSmi()) {
if (Smi::cast(*lit)->value() == 0) {
- __ b(false_label_);
+ if (false_label_ != fall_through_) __ b(false_label_);
} else {
- __ b(true_label_);
+ if (true_label_ != fall_through_) __ b(true_label_);
}
} else {
// For simplicity we always test the accumulator register.
@@ -383,13 +411,14 @@ void FullCodeGenerator::TestContext::DropAndPlug(int count,
// For simplicity we always test the accumulator register.
__ Drop(count);
__ Move(result_register(), reg);
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
}
void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
Label* materialize_false) const {
- ASSERT_EQ(materialize_true, materialize_false);
+ ASSERT(materialize_true == materialize_false);
__ bind(materialize_true);
}
@@ -424,8 +453,8 @@ void FullCodeGenerator::StackValueContext::Plug(
void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
Label* materialize_false) const {
- ASSERT(materialize_false == false_label_);
ASSERT(materialize_true == true_label_);
+ ASSERT(materialize_false == false_label_);
}
@@ -449,6 +478,10 @@ void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
void FullCodeGenerator::TestContext::Plug(bool flag) const {
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
+ true,
+ true_label_,
+ false_label_);
if (flag) {
if (true_label_ != fall_through_) __ b(true_label_);
} else {
@@ -529,6 +562,33 @@ void FullCodeGenerator::Move(Slot* dst,
}
+void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
+ bool should_normalize,
+ Label* if_true,
+ Label* if_false) {
+ // Only prepare for bailouts before splits if we're in a test
+ // context. Otherwise, we let the Visit function deal with the
+ // preparation to avoid preparing with the same AST id twice.
+ if (!context()->IsTest() || !info_->IsOptimizable()) return;
+
+ Label skip;
+ if (should_normalize) __ b(&skip);
+
+ ForwardBailoutStack* current = forward_bailout_stack_;
+ while (current != NULL) {
+ PrepareForBailout(current->expr(), state);
+ current = current->parent();
+ }
+
+ if (should_normalize) {
+ __ LoadRoot(ip, Heap::kTrueValueRootIndex);
+ __ cmp(r0, ip);
+ Split(eq, if_true, if_false, NULL);
+ __ bind(&skip);
+ }
+}
+
+
void FullCodeGenerator::EmitDeclaration(Variable* variable,
Variable::Mode mode,
FunctionLiteral* function) {
@@ -652,6 +712,8 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Keep the switch value on the stack until a case matches.
VisitForStackValue(stmt->tag());
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
+
ZoneList<CaseClause*>* clauses = stmt->cases();
CaseClause* default_clause = NULL; // Can occur anywhere in the list.
@@ -717,6 +779,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
}
__ bind(nested_statement.break_target());
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
}
@@ -831,25 +894,17 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
EmitAssignment(stmt->each());
// Generate code for the body of the loop.
- Label stack_limit_hit, stack_check_done;
Visit(stmt->body());
- __ StackLimitCheck(&stack_limit_hit);
- __ bind(&stack_check_done);
-
// Generate code for the going to the next element by incrementing
// the index (smi) stored on top of the stack.
__ bind(loop_statement.continue_target());
__ pop(r0);
__ add(r0, r0, Operand(Smi::FromInt(1)));
__ push(r0);
- __ b(&loop);
- // Slow case for the stack limit check.
- StackCheckStub stack_check_stub;
- __ bind(&stack_limit_hit);
- __ CallStub(&stack_check_stub);
- __ b(&stack_check_done);
+ EmitStackCheck(stmt);
+ __ b(&loop);
// Remove the pointers stored on the stack.
__ bind(loop_statement.break_target());
@@ -1200,13 +1255,16 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Fall through.
case ObjectLiteral::Property::COMPUTED:
if (key->handle()->IsSymbol()) {
- VisitForAccumulatorValue(value);
- __ mov(r2, Operand(key->handle()));
- __ ldr(r1, MemOperand(sp));
if (property->emit_store()) {
+ VisitForAccumulatorValue(value);
+ __ mov(r2, Operand(key->handle()));
+ __ ldr(r1, MemOperand(sp));
Handle<Code> ic(Isolate::Current()->builtins()->builtin(
Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ PrepareForBailoutForId(key->id(), NO_REGISTERS);
+ } else {
+ VisitForEffect(value);
}
break;
}
@@ -1301,6 +1359,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
// Update the write barrier for the array store with r0 as the scratch
// register.
__ RecordWrite(r1, Operand(offset), r2, result_register());
+
+ PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
}
if (result_saved) {
@@ -1347,13 +1407,27 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
break;
case KEYED_PROPERTY:
if (expr->is_compound()) {
- VisitForStackValue(property->obj());
- VisitForAccumulatorValue(property->key());
+ if (property->is_arguments_access()) {
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
+ __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
+ __ push(r0);
+ __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
+ } else {
+ VisitForStackValue(property->obj());
+ VisitForAccumulatorValue(property->key());
+ }
__ ldr(r1, MemOperand(sp, 0));
__ push(r0);
} else {
- VisitForStackValue(property->obj());
- VisitForStackValue(property->key());
+ if (property->is_arguments_access()) {
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
+ __ ldr(r1, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
+ __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
+ __ Push(r1, r0);
+ } else {
+ VisitForStackValue(property->obj());
+ VisitForStackValue(property->key());
+ }
}
break;
}
@@ -1373,6 +1447,12 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
}
}
+ // For property compound assignments we need another deoptimization
+ // point after the property load.
+ if (property != NULL) {
+ PrepareForBailoutForId(expr->compound_bailout_id(), TOS_REG);
+ }
+
Token::Value op = expr->binary_op();
ConstantOperand constant = ShouldInlineSmiCase(op)
? GetConstantOperand(op, expr->target(), expr->value())
@@ -1398,6 +1478,9 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
} else {
EmitBinaryOp(op, mode);
}
+
+ // Deoptimization point in case the binary operation may have side effects.
+ PrepareForBailout(expr->binary_operation(), TOS_REG);
} else {
VisitForAccumulatorValue(expr->value());
}
@@ -1689,13 +1772,14 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
if (key->IsPropertyName()) {
VisitForAccumulatorValue(expr->obj());
EmitNamedPropertyLoad(expr);
+ context()->Plug(r0);
} else {
VisitForStackValue(expr->obj());
VisitForAccumulatorValue(expr->key());
__ pop(r1);
EmitKeyedPropertyLoad(expr);
+ context()->Plug(r0);
}
- context()->Plug(r0);
}
void FullCodeGenerator::EmitCallWithIC(Call* expr,
@@ -1717,6 +1801,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
Handle<Code> ic =
ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
EmitCallIC(ic, mode);
+ RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->Plug(r0);
@@ -1751,6 +1836,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
__ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
EmitCallIC(ic, mode);
+ RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, r0); // Drop the key still on the stack.
@@ -1771,6 +1857,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
__ CallStub(&stub);
+ RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, r0);
@@ -1778,6 +1865,12 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
void FullCodeGenerator::VisitCall(Call* expr) {
+#ifdef DEBUG
+ // We want to verify that RecordJSReturnSite gets called on all paths
+ // through this function. Avoid early returns.
+ expr->return_is_recorded_ = false;
+#endif
+
Comment cmnt(masm_, "[ Call");
Expression* fun = expr->expression();
Variable* var = fun->AsVariableProxy()->AsVariable();
@@ -1829,6 +1922,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
__ CallStub(&stub);
+ RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, r0);
@@ -1934,6 +2028,11 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Emit function call.
EmitCallWithStub(expr);
}
+
+#ifdef DEBUG
+ // RecordJSReturnSite should have been called.
+ ASSERT(expr->return_is_recorded_);
+#endif
}
@@ -1982,8 +2081,9 @@ void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
- __ BranchOnSmi(r0, if_true);
- __ b(if_false);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
+ __ tst(r0, Operand(kSmiTagMask));
+ Split(eq, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
@@ -2001,6 +2101,7 @@ void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ tst(r0, Operand(kSmiTagMask | 0x80000000));
Split(eq, if_true, if_false, fall_through);
@@ -2033,6 +2134,7 @@ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
__ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
__ b(lt, if_false);
__ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(le, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2053,6 +2155,7 @@ void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
__ BranchOnSmi(r0, if_false);
__ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(ge, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2075,6 +2178,7 @@ void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
__ tst(r1, Operand(1 << Map::kIsUndetectable));
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(ne, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2098,6 +2202,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
// Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
// used in a few functions in runtime.js which should not normally be hit by
// this compiler.
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ jmp(if_false);
context()->Plug(if_true, if_false);
}
@@ -2117,6 +2222,7 @@ void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
__ BranchOnSmi(r0, if_false);
__ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(eq, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2137,6 +2243,7 @@ void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
__ BranchOnSmi(r0, if_false);
__ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(eq, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2157,6 +2264,7 @@ void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
__ BranchOnSmi(r0, if_false);
__ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(eq, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2188,6 +2296,7 @@ void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
__ bind(&check_frame_marker);
__ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
__ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(eq, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2210,6 +2319,7 @@ void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
__ pop(r1);
__ cmp(r0, r1);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(eq, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2663,11 +2773,12 @@ void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
+ RegExpConstructResultStub stub;
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
- __ CallRuntime(Runtime::kRegExpConstructResult, 3);
+ __ CallStub(&stub);
context()->Plug(r0);
}
@@ -2786,9 +2897,8 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
__ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
__ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
-
- __ b(eq, if_true);
- __ b(if_false);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
+ Split(eq, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
}
@@ -2912,6 +3022,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
// Notice that the labels are swapped.
context()->PrepareTest(&materialize_true, &materialize_false,
&if_false, &if_true, &fall_through);
+ if (context()->IsTest()) ForwardBailoutToChild(expr);
VisitForControl(expr->expression(), if_true, if_false, fall_through);
context()->Plug(if_false, if_true); // Labels swapped.
break;
@@ -3031,14 +3142,25 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ push(r0);
EmitNamedPropertyLoad(prop);
} else {
- VisitForStackValue(prop->obj());
- VisitForAccumulatorValue(prop->key());
+ if (prop->is_arguments_access()) {
+ VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
+ __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
+ __ push(r0);
+ __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
+ } else {
+ VisitForStackValue(prop->obj());
+ VisitForAccumulatorValue(prop->key());
+ }
__ ldr(r1, MemOperand(sp, 0));
__ push(r0);
EmitKeyedPropertyLoad(prop);
}
}
+ // We need a second deoptimization point after loading the value
+ // in case evaluating the property load my have a side effect.
+ PrepareForBailout(expr->increment(), TOS_REG);
+
// Call ToNumber only if operand is not a smi.
Label no_conversion;
__ BranchOnSmi(r0, &no_conversion);
@@ -3081,6 +3203,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ sub(r0, r0, Operand(Smi::FromInt(count_value)));
}
__ mov(r1, Operand(Smi::FromInt(count_value)));
+
+ // Record position before stub call.
+ SetSourcePosition(expr->position());
+
GenericBinaryOpStub stub(Token::ADD, NO_OVERWRITE, r1, r0);
__ CallStub(&stub);
__ bind(&done);
@@ -3150,6 +3276,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
// Use a regular load, not a contextual load, to avoid a reference
// error.
EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ PrepareForBailout(expr, TOS_REG);
context()->Plug(r0);
} else if (proxy != NULL &&
proxy->var()->AsSlot() != NULL &&
@@ -3165,12 +3292,13 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
__ mov(r0, Operand(proxy->name()));
__ Push(cp, r0);
__ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
+ PrepareForBailout(expr, TOS_REG);
__ bind(&done);
context()->Plug(r0);
} else {
// This expression cannot throw a reference error at the top level.
- Visit(expr);
+ context()->HandleExpression(expr);
}
}
@@ -3195,6 +3323,8 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
{ AccumulatorValueContext context(this);
VisitForTypeofValue(left_unary->expression());
}
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
+
if (check->Equals(HEAP->number_symbol())) {
__ tst(r0, Operand(kSmiTagMask));
__ b(eq, if_true);
@@ -3298,6 +3428,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
case Token::IN:
VisitForStackValue(expr->right());
__ InvokeBuiltin(Builtins::IN, CALL_JS);
+ PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r0, ip);
Split(eq, if_true, if_false, fall_through);
@@ -3307,6 +3438,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
VisitForStackValue(expr->right());
InstanceofStub stub;
__ CallStub(&stub);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
// The stub returns 0 for true.
__ tst(r0, r0);
Split(eq, if_true, if_false, fall_through);
@@ -3365,6 +3497,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
: NO_COMPARE_FLAGS;
CompareStub stub(cc, strict, flags, r1, r0);
__ CallStub(&stub);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ cmp(r0, Operand(0, RelocInfo::NONE));
Split(cc, if_true, if_false, fall_through);
}
@@ -3386,6 +3519,7 @@ void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
&if_true, &if_false, &fall_through);
VisitForAccumulatorValue(expr->expression());
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ LoadRoot(r1, Heap::kNullValueRootIndex);
__ cmp(r0, r1);
if (expr->is_strict()) {
« no previous file with comments | « src/arm/frames-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698