Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(756)

Unified Diff: src/ia32/full-codegen-ia32.cc

Issue 6529055: [Isolates] Merge crankshaft (r5922 from bleeding_edge). (Closed)
Patch Set: Win32 port Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/ia32/frames-ia32.h ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/ia32/full-codegen-ia32.cc
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 5c940e313a7e5afc4f32c0651af5998617834840..06e7612de82afe4e5dd847fe81265acb6493822a 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -168,7 +168,12 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
}
}
+ if (FLAG_trace) {
+ __ CallRuntime(Runtime::kTraceEnter, 0);
+ }
+
{ Comment cmnt(masm_, "[ Stack check");
+ PrepareForBailout(info->function(), NO_REGISTERS);
NearLabel ok;
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit();
@@ -179,10 +184,6 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
__ bind(&ok);
}
- if (FLAG_trace) {
- __ CallRuntime(Runtime::kTraceEnter, 0);
- }
-
{ Comment cmnt(masm_, "[ Body");
ASSERT(loop_depth() == 0);
VisitStatements(function()->body());
@@ -197,6 +198,27 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
}
+void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
+ Comment cmnt(masm_, "[ Stack check");
+ NearLabel ok;
+ ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
+ __ cmp(esp, Operand::StaticVariable(stack_limit));
+ __ j(above_equal, &ok, taken);
+ StackCheckStub stub;
+ __ CallStub(&stub);
+ __ bind(&ok);
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
+ PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
+ RecordStackCheck(stmt->OsrEntryId());
+ // Loop stack checks can be patched to perform on-stack
+ // replacement. In order to decide whether or not to perform OSR we
+ // embed the loop depth in a test instruction after the call so we
+ // can extract it from the OSR builtin.
+ ASSERT(loop_depth() > 0);
+ __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
+}
+
+
void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence");
if (return_label_.is_bound()) {
@@ -213,7 +235,7 @@ void FullCodeGenerator::EmitReturnSequence() {
Label check_exit_codesize;
masm_->bind(&check_exit_codesize);
#endif
- CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
+ SetSourcePosition(function()->end_position() - 1);
__ RecordJSReturn();
// Do not use the leave instruction here because it is too short to
// patch with the code required by the debugger.
@@ -266,6 +288,7 @@ void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
// For simplicity we always test the accumulator register.
codegen()->Move(result_register(), slot);
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
}
@@ -309,22 +332,26 @@ void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
+ true,
+ true_label_,
+ false_label_);
ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
- __ jmp(false_label_);
+ if (false_label_ != fall_through_) __ jmp(false_label_);
} else if (lit->IsTrue() || lit->IsJSObject()) {
- __ jmp(true_label_);
+ if (true_label_ != fall_through_) __ jmp(true_label_);
} else if (lit->IsString()) {
if (String::cast(*lit)->length() == 0) {
- __ jmp(false_label_);
+ if (false_label_ != fall_through_) __ jmp(false_label_);
} else {
- __ jmp(true_label_);
+ if (true_label_ != fall_through_) __ jmp(true_label_);
}
} else if (lit->IsSmi()) {
if (Smi::cast(*lit)->value() == 0) {
- __ jmp(false_label_);
+ if (false_label_ != fall_through_) __ jmp(false_label_);
} else {
- __ jmp(true_label_);
+ if (true_label_ != fall_through_) __ jmp(true_label_);
}
} else {
// For simplicity we always test the accumulator register.
@@ -364,13 +391,14 @@ void FullCodeGenerator::TestContext::DropAndPlug(int count,
// For simplicity we always test the accumulator register.
__ Drop(count);
__ Move(result_register(), reg);
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
}
void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
Label* materialize_false) const {
- ASSERT_EQ(materialize_true, materialize_false);
+ ASSERT(materialize_true == materialize_false);
__ bind(materialize_true);
}
@@ -403,8 +431,8 @@ void FullCodeGenerator::StackValueContext::Plug(
void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
Label* materialize_false) const {
- ASSERT(materialize_false == false_label_);
ASSERT(materialize_true == true_label_);
+ ASSERT(materialize_false == false_label_);
}
@@ -427,6 +455,10 @@ void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
void FullCodeGenerator::TestContext::Plug(bool flag) const {
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
+ true,
+ true_label_,
+ false_label_);
if (flag) {
if (true_label_ != fall_through_) __ jmp(true_label_);
} else {
@@ -518,6 +550,32 @@ void FullCodeGenerator::Move(Slot* dst,
}
+void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
+ bool should_normalize,
+ Label* if_true,
+ Label* if_false) {
+ // Only prepare for bailouts before splits if we're in a test
+ // context. Otherwise, we let the Visit function deal with the
+ // preparation to avoid preparing with the same AST id twice.
+ if (!context()->IsTest() || !info_->IsOptimizable()) return;
+
+ NearLabel skip;
+ if (should_normalize) __ jmp(&skip);
+
+ ForwardBailoutStack* current = forward_bailout_stack_;
+ while (current != NULL) {
+ PrepareForBailout(current->expr(), state);
+ current = current->parent();
+ }
+
+ if (should_normalize) {
+ __ cmp(eax, FACTORY->true_value());
+ Split(equal, if_true, if_false, NULL);
+ __ bind(&skip);
+ }
+}
+
+
void FullCodeGenerator::EmitDeclaration(Variable* variable,
Variable::Mode mode,
FunctionLiteral* function) {
@@ -629,6 +687,9 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
Comment cmnt(masm_, "[ SwitchStatement");
Breakable nested_statement(this, stmt);
SetStatementPosition(stmt);
+
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
+
// Keep the switch value on the stack until a case matches.
VisitForStackValue(stmt->tag());
@@ -668,11 +729,12 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
__ bind(&slow_case);
}
- CompareFlags flags = inline_smi_code
- ? NO_SMI_COMPARE_IN_STUB
- : NO_COMPARE_FLAGS;
- CompareStub stub(equal, true, flags);
- __ CallStub(&stub);
+ // Record position before stub call for type feedback.
+ SetSourcePosition(clause->position());
+
+ Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
+ __ call(ic, RelocInfo::CODE_TARGET);
+
__ test(eax, Operand(eax));
__ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed.
@@ -698,6 +760,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
}
__ bind(nested_statement.break_target());
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
}
@@ -852,24 +915,15 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
EmitAssignment(stmt->each());
// Generate code for the body of the loop.
- Label stack_limit_hit;
- NearLabel stack_check_done;
Visit(stmt->body());
- __ StackLimitCheck(&stack_limit_hit);
- __ bind(&stack_check_done);
-
// Generate code for going to the next element by incrementing the
// index (smi) stored on top of the stack.
__ bind(loop_statement.continue_target());
__ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
- __ jmp(&loop);
- // Slow case for the stack limit check.
- StackCheckStub stack_check_stub;
- __ bind(&stack_limit_hit);
- __ CallStub(&stack_check_stub);
- __ jmp(&stack_check_done);
+ EmitStackCheck(stmt);
+ __ jmp(&loop);
// Remove the pointers stored on the stack.
__ bind(loop_statement.break_target());
@@ -884,8 +938,14 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
- // space for nested functions that don't need literals cloning.
- if (scope()->is_function_scope() &&
+ // space for nested functions that don't need literals cloning. If
+ // we're running with the --always-opt or the --prepare-always-opt
+ // flag, we need to use the runtime function so that the new function
+ // we are creating here gets a chance to have its code optimized and
+ // doesn't just get a copy of the existing unoptimized code.
+ if (!FLAG_always_opt &&
+ !FLAG_prepare_always_opt &&
+ scope()->is_function_scope() &&
info->num_literals() == 0 &&
!pretenure) {
FastNewClosureStub stub;
@@ -1235,13 +1295,16 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Fall through.
case ObjectLiteral::Property::COMPUTED:
if (key->handle()->IsSymbol()) {
- VisitForAccumulatorValue(value);
- __ mov(ecx, Immediate(key->handle()));
- __ mov(edx, Operand(esp, 0));
if (property->emit_store()) {
+ VisitForAccumulatorValue(value);
+ __ mov(ecx, Immediate(key->handle()));
+ __ mov(edx, Operand(esp, 0));
Handle<Code> ic(Isolate::Current()->builtins()->builtin(
Builtins::StoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ PrepareForBailoutForId(key->id(), NO_REGISTERS);
+ } else {
+ VisitForEffect(value);
}
break;
}
@@ -1289,6 +1352,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ push(Immediate(Smi::FromInt(expr->literal_index())));
__ push(Immediate(expr->constant_elements()));
if (expr->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
+ ASSERT(expr->depth() == 1);
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
__ CallStub(&stub);
@@ -1330,6 +1394,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
// Update the write barrier for the array store.
__ RecordWrite(ebx, offset, result_register(), ecx);
+
+ PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
}
if (result_saved) {
@@ -1374,17 +1440,30 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
VisitForStackValue(property->obj());
}
break;
- case KEYED_PROPERTY:
+ case KEYED_PROPERTY: {
if (expr->is_compound()) {
- VisitForStackValue(property->obj());
- VisitForAccumulatorValue(property->key());
+ if (property->is_arguments_access()) {
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
+ __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
+ __ mov(eax, Immediate(property->key()->AsLiteral()->handle()));
+ } else {
+ VisitForStackValue(property->obj());
+ VisitForAccumulatorValue(property->key());
+ }
__ mov(edx, Operand(esp, 0));
__ push(eax);
} else {
- VisitForStackValue(property->obj());
- VisitForStackValue(property->key());
+ if (property->is_arguments_access()) {
+ VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
+ __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
+ __ push(Immediate(property->key()->AsLiteral()->handle()));
+ } else {
+ VisitForStackValue(property->obj());
+ VisitForStackValue(property->key());
+ }
}
break;
+ }
}
if (expr->is_compound()) {
@@ -1402,6 +1481,12 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
}
}
+ // For property compound assignments we need another deoptimization
+ // point after the property load.
+ if (property != NULL) {
+ PrepareForBailoutForId(expr->compound_bailout_id(), TOS_REG);
+ }
+
Token::Value op = expr->binary_op();
ConstantOperand constant = ShouldInlineSmiCase(op)
? GetConstantOperand(op, expr->target(), expr->value())
@@ -1427,6 +1512,9 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
} else {
EmitBinaryOp(op, mode);
}
+
+ // Deoptimization point in case the binary operation may have side effects.
+ PrepareForBailout(expr->binary_operation(), TOS_REG);
} else {
VisitForAccumulatorValue(expr->value());
}
@@ -1483,13 +1571,12 @@ void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
__ bind(&call_stub);
__ sub(Operand(eax), Immediate(value));
Token::Value op = Token::ADD;
- GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown());
+ TypeRecordingBinaryOpStub stub(op, mode);
if (left_is_constant_smi) {
- __ push(Immediate(value));
- __ push(eax);
+ __ mov(edx, Immediate(value));
} else {
- __ push(eax);
- __ push(Immediate(value));
+ __ mov(edx, eax);
+ __ mov(eax, Immediate(value));
}
__ CallStub(&stub);
__ bind(&done);
@@ -1514,19 +1601,16 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
__ j(zero, &done);
__ bind(&call_stub);
- if (left_is_constant_smi) {
- __ push(Immediate(value));
- __ push(ecx);
+ if (left_is_constant_smi) {
+ __ mov(edx, Immediate(value));
+ __ mov(eax, ecx);
} else {
- // Undo the optimistic sub operation.
- __ add(Operand(eax), Immediate(value));
-
- __ push(eax);
- __ push(Immediate(value));
+ __ add(Operand(eax), Immediate(value)); // Undo the subtraction.
+ __ mov(edx, eax);
+ __ mov(eax, Immediate(value));
}
-
Token::Value op = Token::SUB;
- GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown());
+ TypeRecordingBinaryOpStub stub(op, mode);
__ CallStub(&stub);
__ bind(&done);
context()->Plug(eax);
@@ -1544,9 +1628,9 @@ void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
__ j(zero, &smi_case);
__ bind(&call_stub);
- GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown());
- __ push(eax);
- __ push(Immediate(value));
+ __ mov(edx, eax);
+ __ mov(eax, Immediate(value));
+ TypeRecordingBinaryOpStub stub(op, mode);
__ CallStub(&stub);
__ jmp(&done);
@@ -1603,11 +1687,10 @@ void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
__ test(eax, Immediate(kSmiTagMask));
__ j(zero, &smi_case);
- GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown());
// The order of the arguments does not matter for bit-ops with a
// constant operand.
- __ push(Immediate(value));
- __ push(eax);
+ __ mov(edx, Immediate(value));
+ TypeRecordingBinaryOpStub stub(op, mode);
__ CallStub(&stub);
__ jmp(&done);
@@ -1686,14 +1769,9 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
__ j(zero, &smi_case);
__ bind(&stub_call);
- GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown());
- if (stub.ArgsInRegistersSupported()) {
- stub.GenerateCall(masm_, edx, ecx);
- } else {
- __ push(edx);
- __ push(ecx);
- __ CallStub(&stub);
- }
+ __ mov(eax, ecx);
+ TypeRecordingBinaryOpStub stub(op, mode);
+ __ CallStub(&stub);
__ jmp(&done);
__ bind(&smi_case);
@@ -1772,15 +1850,9 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
void FullCodeGenerator::EmitBinaryOp(Token::Value op,
OverwriteMode mode) {
- TypeInfo type = TypeInfo::Unknown();
- GenericBinaryOpStub stub(op, mode, NO_GENERIC_BINARY_FLAGS, type);
- if (stub.ArgsInRegistersSupported()) {
- __ pop(edx);
- stub.GenerateCall(masm_, edx, eax);
- } else {
- __ push(result_register());
- __ CallStub(&stub);
- }
+ __ pop(edx);
+ TypeRecordingBinaryOpStub stub(op, mode);
+ __ CallStub(&stub);
context()->Plug(eax);
}
@@ -2000,13 +2072,14 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
if (key->IsPropertyName()) {
VisitForAccumulatorValue(expr->obj());
EmitNamedPropertyLoad(expr);
+ context()->Plug(eax);
} else {
VisitForStackValue(expr->obj());
VisitForAccumulatorValue(expr->key());
__ pop(edx);
EmitKeyedPropertyLoad(expr);
+ context()->Plug(eax);
}
- context()->Plug(eax);
}
@@ -2028,6 +2101,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
Handle<Code> ic = ISOLATE->stub_cache()->ComputeCallInitialize(arg_count,
in_loop);
EmitCallIC(ic, mode);
+ RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
context()->Plug(eax);
@@ -2061,6 +2135,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
in_loop);
__ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
EmitCallIC(ic, mode);
+ RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, eax); // Drop the key still on the stack.
@@ -2081,6 +2156,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
__ CallStub(&stub);
+ RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, eax);
@@ -2088,6 +2164,12 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
void FullCodeGenerator::VisitCall(Call* expr) {
+#ifdef DEBUG
+ // We want to verify that RecordJSReturnSite gets called on all paths
+ // through this function. Avoid early returns.
+ expr->return_is_recorded_ = false;
+#endif
+
Comment cmnt(masm_, "[ Call");
Expression* fun = expr->expression();
Variable* var = fun->AsVariableProxy()->AsVariable();
@@ -2133,6 +2215,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
__ CallStub(&stub);
+ RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, eax);
@@ -2234,6 +2317,11 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Emit function call.
EmitCallWithStub(expr);
}
+
+#ifdef DEBUG
+ // RecordJSReturnSite should have been called.
+ ASSERT(expr->return_is_recorded_);
+#endif
}
@@ -2282,6 +2370,7 @@ void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ test(eax, Immediate(kSmiTagMask));
Split(zero, if_true, if_false, fall_through);
@@ -2301,6 +2390,7 @@ void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ test(eax, Immediate(kSmiTagMask | 0x80000000));
Split(zero, if_true, if_false, fall_through);
@@ -2333,6 +2423,7 @@ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
__ cmp(ecx, FIRST_JS_OBJECT_TYPE);
__ j(below, if_false);
__ cmp(ecx, LAST_JS_OBJECT_TYPE);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(below_equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2354,6 +2445,7 @@ void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
__ test(eax, Immediate(kSmiTagMask));
__ j(equal, if_false);
__ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ebx);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(above_equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2377,6 +2469,7 @@ void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
__ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
__ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
__ test(ebx, Immediate(1 << Map::kIsUndetectable));
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(not_zero, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2396,9 +2489,9 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
- // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
- // used in a few functions in runtime.js which should not normally be hit by
- // this compiler.
+ // TODO(3110205): Implement this.
+ // Currently unimplemented. Emit false, a safe choice.
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ jmp(if_false);
context()->Plug(if_true, if_false);
}
@@ -2419,6 +2512,7 @@ void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
__ test(eax, Immediate(kSmiTagMask));
__ j(zero, if_false);
__ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2440,6 +2534,7 @@ void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
__ test(eax, Immediate(kSmiTagMask));
__ j(equal, if_false);
__ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2461,6 +2556,7 @@ void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
__ test(eax, Immediate(kSmiTagMask));
__ j(equal, if_false);
__ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2492,6 +2588,7 @@ void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
__ bind(&check_frame_marker);
__ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2514,6 +2611,7 @@ void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
__ pop(ebx);
__ cmp(eax, Operand(ebx));
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2731,7 +2829,9 @@ void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- __ CallRuntime(Runtime::kMath_pow, 2);
+
+ MathPowStub stub;
+ __ CallStub(&stub);
context()->Plug(eax);
}
@@ -2973,11 +3073,13 @@ void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
+ // Load the arguments on the stack and call the stub.
+ RegExpConstructResultStub stub;
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
- __ CallRuntime(Runtime::kRegExpConstructResult, 3);
+ __ CallStub(&stub);
context()->Plug(eax);
}
@@ -2987,7 +3089,64 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
+ Label done;
+ Label slow_case;
+ Register object = eax;
+ Register index_1 = ebx;
+ Register index_2 = ecx;
+ Register elements = edi;
+ Register temp = edx;
+ __ mov(object, Operand(esp, 2 * kPointerSize));
+ // Fetch the map and check if array is in fast case.
+ // Check that object doesn't require security checks and
+ // has no indexed interceptor.
+ __ CmpObjectType(object, FIRST_JS_OBJECT_TYPE, temp);
+ __ j(below, &slow_case);
+ __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
+ KeyedLoadIC::kSlowCaseBitFieldMask);
+ __ j(not_zero, &slow_case);
+
+ // Check the object's elements are in fast case and writable.
+ __ mov(elements, FieldOperand(object, JSObject::kElementsOffset));
+ __ cmp(FieldOperand(elements, HeapObject::kMapOffset),
+ Immediate(FACTORY->fixed_array_map()));
+ __ j(not_equal, &slow_case);
+
+ // Check that both indices are smis.
+ __ mov(index_1, Operand(esp, 1 * kPointerSize));
+ __ mov(index_2, Operand(esp, 0));
+ __ mov(temp, index_1);
+ __ or_(temp, Operand(index_2));
+ __ test(temp, Immediate(kSmiTagMask));
+ __ j(not_zero, &slow_case);
+
+ // Bring addresses into index1 and index2.
+ __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1));
+ __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2));
+
+ // Swap elements. Use object and temp as scratch registers.
+ __ mov(object, Operand(index_1, 0));
+ __ mov(temp, Operand(index_2, 0));
+ __ mov(Operand(index_2, 0), object);
+ __ mov(Operand(index_1, 0), temp);
+
+ Label new_space;
+ __ InNewSpace(elements, temp, equal, &new_space);
+
+ __ mov(object, elements);
+ __ RecordWriteHelper(object, index_1, temp);
+ __ RecordWriteHelper(elements, index_2, temp);
+
+ __ bind(&new_space);
+ // We are done. Drop elements from the stack, and return undefined.
+ __ add(Operand(esp), Immediate(3 * kPointerSize));
+ __ mov(eax, FACTORY->undefined_value());
+ __ jmp(&done);
+
+ __ bind(&slow_case);
__ CallRuntime(Runtime::kSwapElements, 3);
+
+ __ bind(&done);
context()->Plug(eax);
}
@@ -3096,6 +3255,7 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
__ test(FieldOperand(eax, String::kHashFieldOffset),
Immediate(String::kContainsCachedArrayIndexMask));
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(zero, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -3401,6 +3561,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
// Notice that the labels are swapped.
context()->PrepareTest(&materialize_true, &materialize_false,
&if_false, &if_true, &fall_through);
+ if (context()->IsTest()) ForwardBailoutToChild(expr);
VisitForControl(expr->expression(), if_true, if_false, fall_through);
context()->Plug(if_false, if_true); // Labels swapped.
break;
@@ -3517,14 +3678,24 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ push(eax);
EmitNamedPropertyLoad(prop);
} else {
- VisitForStackValue(prop->obj());
- VisitForAccumulatorValue(prop->key());
+ if (prop->is_arguments_access()) {
+ VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
+ __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
+ __ mov(eax, Immediate(prop->key()->AsLiteral()->handle()));
+ } else {
+ VisitForStackValue(prop->obj());
+ VisitForAccumulatorValue(prop->key());
+ }
__ mov(edx, Operand(esp, 0));
__ push(eax);
EmitKeyedPropertyLoad(prop);
}
}
+ // We need a second deoptimization point after loading the value
+ // in case evaluating the property load my have a side effect.
+ PrepareForBailout(expr->increment(), TOS_REG);
+
// Call ToNumber only if operand is not a smi.
NearLabel no_conversion;
if (ShouldInlineSmiCase(expr->op())) {
@@ -3577,12 +3748,16 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ add(Operand(eax), Immediate(Smi::FromInt(1)));
}
}
+
+ // Record position before stub call.
+ SetSourcePosition(expr->position());
+
// Call stub for +1/-1.
- GenericBinaryOpStub stub(expr->binary_op(),
- NO_OVERWRITE,
- NO_GENERIC_BINARY_FLAGS,
- TypeInfo::Unknown());
- stub.GenerateCall(masm(), eax, Smi::FromInt(1));
+ __ mov(edx, eax);
+ __ mov(eax, Immediate(Smi::FromInt(1)));
+ TypeRecordingBinaryOpStub stub(expr->binary_op(),
+ NO_OVERWRITE);
+ __ CallStub(&stub);
__ bind(&done);
// Store the value returned in eax.
@@ -3654,6 +3829,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
// Use a regular load, not a contextual load, to avoid a reference
// error.
EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ PrepareForBailout(expr, TOS_REG);
context()->Plug(eax);
} else if (proxy != NULL &&
proxy->var()->AsSlot() != NULL &&
@@ -3669,12 +3845,13 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
__ push(esi);
__ push(Immediate(proxy->name()));
__ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
+ PrepareForBailout(expr, TOS_REG);
__ bind(&done);
context()->Plug(eax);
} else {
// This expression cannot throw a reference error at the top level.
- Visit(expr);
+ context()->HandleExpression(expr);
}
}
@@ -3699,6 +3876,7 @@ bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
{ AccumulatorValueContext context(this);
VisitForTypeofValue(left_unary->expression());
}
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
if (check->Equals(HEAP->number_symbol())) {
__ test(eax, Immediate(kSmiTagMask));
@@ -3794,6 +3972,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
case Token::IN:
VisitForStackValue(expr->right());
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
+ PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
__ cmp(eax, FACTORY->true_value());
Split(equal, if_true, if_false, fall_through);
break;
@@ -3802,6 +3981,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
VisitForStackValue(expr->right());
InstanceofStub stub;
__ CallStub(&stub);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ test(eax, Operand(eax));
// The stub returns 0 for true.
Split(zero, if_true, if_false, fall_through);
@@ -3858,11 +4038,11 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
__ bind(&slow_case);
}
- CompareFlags flags = inline_smi_code
- ? NO_SMI_COMPARE_IN_STUB
- : NO_COMPARE_FLAGS;
- CompareStub stub(cc, strict, flags);
- __ CallStub(&stub);
+ // Record position and call the compare IC.
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
+ SetSourcePosition(expr->position());
+ __ call(ic, RelocInfo::CODE_TARGET);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ test(eax, Operand(eax));
Split(cc, if_true, if_false, fall_through);
}
@@ -3883,6 +4063,8 @@ void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
&if_true, &if_false, &fall_through);
VisitForAccumulatorValue(expr->expression());
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
+
__ cmp(eax, FACTORY->null_value());
if (expr->is_strict()) {
Split(equal, if_true, if_false, fall_through);
@@ -3921,8 +4103,31 @@ Register FullCodeGenerator::context_register() {
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
ASSERT(mode == RelocInfo::CODE_TARGET ||
mode == RelocInfo::CODE_TARGET_CONTEXT);
+ switch (ic->kind()) {
+ case Code::LOAD_IC:
+ __ IncrementCounter(COUNTERS->named_load_full(), 1);
+ break;
+ case Code::KEYED_LOAD_IC:
+ __ IncrementCounter(COUNTERS->keyed_load_full(), 1);
+ break;
+ case Code::STORE_IC:
+ __ IncrementCounter(COUNTERS->named_store_full(), 1);
+ break;
+ case Code::KEYED_STORE_IC:
+ __ IncrementCounter(COUNTERS->keyed_store_full(), 1);
+ default:
+ break;
+ }
+
__ call(ic, mode);
+ // Crankshaft doesn't need patching of inlined loads and stores.
+ // When compiling the snapshot we need to produce code that works
+ // with and without Crankshaft.
+ if (V8::UseCrankshaft() && !Serializer::enabled()) {
+ return;
+ }
+
// If we're calling a (keyed) load or store stub, we have to mark
// the call as containing no inlined code so we will not attempt to
// patch it.
« no previous file with comments | « src/ia32/frames-ia32.h ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698