| Index: src/arm/full-codegen-arm.cc
|
| diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
|
| index 2eb5ccf974be3810a51f89c244bd661ff3e83ada..813e9492df8be45d49ff95a9d6c77bbac3903338 100644
|
| --- a/src/arm/full-codegen-arm.cc
|
| +++ b/src/arm/full-codegen-arm.cc
|
| @@ -130,9 +130,6 @@ void FullCodeGenerator::Generate() {
|
| CompilationInfo* info = info_;
|
| handler_table_ =
|
| isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
|
| -
|
| - InitializeFeedbackVector();
|
| -
|
| profiling_counter_ = isolate()->factory()->NewCell(
|
| Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
|
| SetFunctionPosition(function());
|
| @@ -671,7 +668,7 @@ void FullCodeGenerator::DoTest(Expression* condition,
|
| Label* if_false,
|
| Label* fall_through) {
|
| Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
|
| - CallIC(ic, condition->test_id());
|
| + CallIC(ic, NOT_CONTEXTUAL, condition->test_id());
|
| __ tst(result_register(), result_register());
|
| Split(ne, if_true, if_false, fall_through);
|
| }
|
| @@ -1032,7 +1029,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
|
| // Record position before stub call for type feedback.
|
| SetSourcePosition(clause->position());
|
| Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
|
| - CallIC(ic, clause->CompareId());
|
| + CallIC(ic, NOT_CONTEXTUAL, clause->CompareId());
|
| patch_site.EmitPatchInfo();
|
|
|
| Label skip;
|
| @@ -1077,7 +1074,6 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
|
|
|
| void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| Comment cmnt(masm_, "[ ForInStatement");
|
| - int slot = stmt->ForInFeedbackSlot();
|
| SetStatementPosition(stmt);
|
|
|
| Label loop, exit;
|
| @@ -1167,13 +1163,13 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| Label non_proxy;
|
| __ bind(&fixed_array);
|
|
|
| - Handle<Object> feedback = Handle<Object>(
|
| - Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
|
| - isolate());
|
| - StoreFeedbackVectorSlot(slot, feedback);
|
| - __ Move(r1, FeedbackVector());
|
| - __ mov(r2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
|
| - __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot)));
|
| + Handle<Cell> cell = isolate()->factory()->NewCell(
|
| + Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
|
| + isolate()));
|
| + RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
|
| + __ Move(r1, cell);
|
| + __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
|
| + __ str(r2, FieldMemOperand(r1, Cell::kValueOffset));
|
|
|
| __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
|
| __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
|
| @@ -1482,7 +1478,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
|
| // variables.
|
| switch (var->location()) {
|
| case Variable::UNALLOCATED: {
|
| - Comment cmnt(masm_, "[ Global variable");
|
| + Comment cmnt(masm_, "Global variable");
|
| // Use inline caching. Variable name is passed in r2 and the global
|
| // object (receiver) in r0.
|
| __ ldr(r0, GlobalObjectOperand());
|
| @@ -1495,8 +1491,9 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
|
| case Variable::PARAMETER:
|
| case Variable::LOCAL:
|
| case Variable::CONTEXT: {
|
| - Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
|
| - : "[ Stack variable");
|
| + Comment cmnt(masm_, var->IsContextSlot()
|
| + ? "Context variable"
|
| + : "Stack variable");
|
| if (var->binding_needs_init()) {
|
| // var->scope() may be NULL when the proxy is located in eval code and
|
| // refers to a potential outside binding. Currently those bindings are
|
| @@ -1559,12 +1556,12 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
|
| }
|
|
|
| case Variable::LOOKUP: {
|
| - Comment cmnt(masm_, "[ Lookup variable");
|
| Label done, slow;
|
| // Generate code for loading from variables potentially shadowed
|
| // by eval-introduced variables.
|
| EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
|
| __ bind(&slow);
|
| + Comment cmnt(masm_, "Lookup variable");
|
| __ mov(r1, Operand(var->name()));
|
| __ Push(cp, r1); // Context and name.
|
| __ CallRuntime(Runtime::kLoadContextSlot, 2);
|
| @@ -1695,7 +1692,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
| VisitForAccumulatorValue(value);
|
| __ mov(r2, Operand(key->value()));
|
| __ ldr(r1, MemOperand(sp));
|
| - CallStoreIC(key->LiteralFeedbackId());
|
| + CallStoreIC(NOT_CONTEXTUAL, key->LiteralFeedbackId());
|
| PrepareForBailoutForId(key->id(), NO_REGISTERS);
|
| } else {
|
| VisitForEffect(value);
|
| @@ -2097,7 +2094,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
|
| __ ldr(r1, MemOperand(sp, kPointerSize));
|
| __ ldr(r0, MemOperand(sp, 2 * kPointerSize));
|
| Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
|
| - CallIC(ic, TypeFeedbackId::None());
|
| + CallIC(ic, NOT_CONTEXTUAL, TypeFeedbackId::None());
|
| __ mov(r1, r0);
|
| __ str(r1, MemOperand(sp, 2 * kPointerSize));
|
| CallFunctionStub stub(1, CALL_AS_METHOD);
|
| @@ -2294,7 +2291,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
|
| SetSourcePosition(prop->position());
|
| // Call keyed load IC. It has arguments key and receiver in r0 and r1.
|
| Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
|
| - CallIC(ic, prop->PropertyFeedbackId());
|
| + CallIC(ic, NOT_CONTEXTUAL, prop->PropertyFeedbackId());
|
| }
|
|
|
|
|
| @@ -2321,7 +2318,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
|
|
|
| __ bind(&stub_call);
|
| BinaryOpICStub stub(op, mode);
|
| - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
|
| + CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
|
| + expr->BinaryOperationFeedbackId());
|
| patch_site.EmitPatchInfo();
|
| __ jmp(&done);
|
|
|
| @@ -2398,7 +2396,8 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
|
| __ pop(r1);
|
| BinaryOpICStub stub(op, mode);
|
| JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
|
| - CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
|
| + CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
|
| + expr->BinaryOperationFeedbackId());
|
| patch_site.EmitPatchInfo();
|
| context()->Plug(r0);
|
| }
|
| @@ -2436,7 +2435,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
|
| __ mov(r1, r0);
|
| __ pop(r0); // Restore value.
|
| __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
|
| - CallStoreIC();
|
| + CallStoreIC(NOT_CONTEXTUAL);
|
| break;
|
| }
|
| case KEYED_PROPERTY: {
|
| @@ -2456,60 +2455,41 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
|
| }
|
|
|
|
|
| -void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
|
| - Variable* var, MemOperand location) {
|
| - __ str(result_register(), location);
|
| - if (var->IsContextSlot()) {
|
| - // RecordWrite may destroy all its register arguments.
|
| - __ mov(r3, result_register());
|
| - int offset = Context::SlotOffset(var->index());
|
| - __ RecordWriteContextSlot(
|
| - r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
|
| - }
|
| -}
|
| -
|
| -
|
| -void FullCodeGenerator::EmitCallStoreContextSlot(
|
| - Handle<String> name, LanguageMode mode) {
|
| - __ push(r0); // Value.
|
| - __ mov(r1, Operand(name));
|
| - __ mov(r0, Operand(Smi::FromInt(mode)));
|
| - __ Push(cp, r1, r0); // Context, name, strict mode.
|
| - __ CallRuntime(Runtime::kStoreContextSlot, 4);
|
| -}
|
| -
|
| -
|
| void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
| Token::Value op) {
|
| if (var->IsUnallocated()) {
|
| // Global var, const, or let.
|
| __ mov(r2, Operand(var->name()));
|
| __ ldr(r1, GlobalObjectOperand());
|
| - CallStoreIC();
|
| -
|
| + CallStoreIC(CONTEXTUAL);
|
| } else if (op == Token::INIT_CONST) {
|
| // Const initializers need a write barrier.
|
| ASSERT(!var->IsParameter()); // No const parameters.
|
| - if (var->IsLookupSlot()) {
|
| + if (var->IsStackLocal()) {
|
| + __ ldr(r1, StackOperand(var));
|
| + __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
|
| + __ str(result_register(), StackOperand(var), eq);
|
| + } else {
|
| + ASSERT(var->IsContextSlot() || var->IsLookupSlot());
|
| + // Like var declarations, const declarations are hoisted to function
|
| + // scope. However, unlike var initializers, const initializers are
|
| + // able to drill a hole to that function context, even from inside a
|
| + // 'with' context. We thus bypass the normal static scope lookup for
|
| + // var->IsContextSlot().
|
| __ push(r0);
|
| __ mov(r0, Operand(var->name()));
|
| __ Push(cp, r0); // Context and name.
|
| __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
|
| - } else {
|
| - ASSERT(var->IsStackAllocated() || var->IsContextSlot());
|
| - Label skip;
|
| - MemOperand location = VarOperand(var, r1);
|
| - __ ldr(r2, location);
|
| - __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
|
| - __ b(ne, &skip);
|
| - EmitStoreToStackLocalOrContextSlot(var, location);
|
| - __ bind(&skip);
|
| }
|
|
|
| } else if (var->mode() == LET && op != Token::INIT_LET) {
|
| // Non-initializing assignment to let variable needs a write barrier.
|
| if (var->IsLookupSlot()) {
|
| - EmitCallStoreContextSlot(var->name(), language_mode());
|
| + __ push(r0); // Value.
|
| + __ mov(r1, Operand(var->name()));
|
| + __ mov(r0, Operand(Smi::FromInt(language_mode())));
|
| + __ Push(cp, r1, r0); // Context, name, strict mode.
|
| + __ CallRuntime(Runtime::kStoreContextSlot, 4);
|
| } else {
|
| ASSERT(var->IsStackAllocated() || var->IsContextSlot());
|
| Label assign;
|
| @@ -2522,16 +2502,20 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
| __ CallRuntime(Runtime::kThrowReferenceError, 1);
|
| // Perform the assignment.
|
| __ bind(&assign);
|
| - EmitStoreToStackLocalOrContextSlot(var, location);
|
| + __ str(result_register(), location);
|
| + if (var->IsContextSlot()) {
|
| + // RecordWrite may destroy all its register arguments.
|
| + __ mov(r3, result_register());
|
| + int offset = Context::SlotOffset(var->index());
|
| + __ RecordWriteContextSlot(
|
| + r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
|
| + }
|
| }
|
|
|
| } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
|
| // Assignment to var or initializing assignment to let/const
|
| // in harmony mode.
|
| - if (var->IsLookupSlot()) {
|
| - EmitCallStoreContextSlot(var->name(), language_mode());
|
| - } else {
|
| - ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
|
| + if (var->IsStackAllocated() || var->IsContextSlot()) {
|
| MemOperand location = VarOperand(var, r1);
|
| if (generate_debug_code_ && op == Token::INIT_LET) {
|
| // Check for an uninitialized let binding.
|
| @@ -2539,7 +2523,21 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
| __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
|
| __ Check(eq, kLetBindingReInitialization);
|
| }
|
| - EmitStoreToStackLocalOrContextSlot(var, location);
|
| + // Perform the assignment.
|
| + __ str(r0, location);
|
| + if (var->IsContextSlot()) {
|
| + __ mov(r3, r0);
|
| + int offset = Context::SlotOffset(var->index());
|
| + __ RecordWriteContextSlot(
|
| + r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
|
| + }
|
| + } else {
|
| + ASSERT(var->IsLookupSlot());
|
| + __ push(r0); // Value.
|
| + __ mov(r1, Operand(var->name()));
|
| + __ mov(r0, Operand(Smi::FromInt(language_mode())));
|
| + __ Push(cp, r1, r0); // Context, name, strict mode.
|
| + __ CallRuntime(Runtime::kStoreContextSlot, 4);
|
| }
|
| }
|
| // Non-initializing assignments to consts are ignored.
|
| @@ -2557,7 +2555,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
|
| __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
|
| __ pop(r1);
|
|
|
| - CallStoreIC(expr->AssignmentFeedbackId());
|
| + CallStoreIC(NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
|
|
|
| PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
|
| context()->Plug(r0);
|
| @@ -2574,7 +2572,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
|
| Handle<Code> ic = is_classic_mode()
|
| ? isolate()->builtins()->KeyedStoreIC_Initialize()
|
| : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
|
| - CallIC(ic, expr->AssignmentFeedbackId());
|
| + CallIC(ic, NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
|
|
|
| PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
|
| context()->Plug(r0);
|
| @@ -2601,10 +2599,12 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
|
|
|
|
|
| void FullCodeGenerator::CallIC(Handle<Code> code,
|
| + ContextualMode mode,
|
| TypeFeedbackId ast_id) {
|
| ic_total_count_++;
|
| // All calls must have a predictable size in full-codegen code to ensure that
|
| // the debugger can patch them correctly.
|
| + ASSERT(mode != CONTEXTUAL || ast_id.IsNone());
|
| __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
|
| NEVER_INLINE_TARGET_ADDRESS);
|
| }
|
| @@ -2716,15 +2716,15 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
|
| SetSourcePosition(expr->position());
|
|
|
| Handle<Object> uninitialized =
|
| - TypeFeedbackInfo::UninitializedSentinel(isolate());
|
| - StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
|
| - __ Move(r2, FeedbackVector());
|
| - __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
|
| + TypeFeedbackCells::UninitializedSentinel(isolate());
|
| + Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
|
| + RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
|
| + __ mov(r2, Operand(cell));
|
|
|
| // Record call targets in unoptimized code.
|
| CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
|
| __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
|
| - __ CallStub(&stub);
|
| + __ CallStub(&stub, expr->CallFeedbackId());
|
| RecordJSReturnSite(expr);
|
| // Restore context register.
|
| __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| @@ -2905,10 +2905,10 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
|
|
|
| // Record call targets in unoptimized code.
|
| Handle<Object> uninitialized =
|
| - TypeFeedbackInfo::UninitializedSentinel(isolate());
|
| - StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
|
| - __ Move(r2, FeedbackVector());
|
| - __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
|
| + TypeFeedbackCells::UninitializedSentinel(isolate());
|
| + Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
|
| + RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
|
| + __ mov(r2, Operand(cell));
|
|
|
| CallConstructStub stub(RECORD_CALL_TARGET);
|
| __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
|
| @@ -4411,7 +4411,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| SetSourcePosition(expr->position());
|
|
|
| BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
|
| - CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
|
| + CallIC(stub.GetCode(isolate()),
|
| + NOT_CONTEXTUAL,
|
| + expr->CountBinOpFeedbackId());
|
| patch_site.EmitPatchInfo();
|
| __ bind(&done);
|
|
|
| @@ -4440,7 +4442,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| case NAMED_PROPERTY: {
|
| __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
|
| __ pop(r1);
|
| - CallStoreIC(expr->CountStoreFeedbackId());
|
| + CallStoreIC(NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
|
| PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
|
| if (expr->is_postfix()) {
|
| if (!context()->IsEffect()) {
|
| @@ -4456,7 +4458,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| Handle<Code> ic = is_classic_mode()
|
| ? isolate()->builtins()->KeyedStoreIC_Initialize()
|
| : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
|
| - CallIC(ic, expr->CountStoreFeedbackId());
|
| + CallIC(ic, NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
|
| PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
|
| if (expr->is_postfix()) {
|
| if (!context()->IsEffect()) {
|
| @@ -4476,7 +4478,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
|
| ASSERT(!context()->IsTest());
|
| VariableProxy* proxy = expr->AsVariableProxy();
|
| if (proxy != NULL && proxy->var()->IsUnallocated()) {
|
| - Comment cmnt(masm_, "[ Global variable");
|
| + Comment cmnt(masm_, "Global variable");
|
| __ ldr(r0, GlobalObjectOperand());
|
| __ mov(r2, Operand(proxy->name()));
|
| // Use a regular load, not a contextual load, to avoid a reference
|
| @@ -4485,7 +4487,6 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
|
| PrepareForBailout(expr, TOS_REG);
|
| context()->Plug(r0);
|
| } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
|
| - Comment cmnt(masm_, "[ Lookup slot");
|
| Label done, slow;
|
|
|
| // Generate code for loading from variables potentially shadowed
|
| @@ -4647,7 +4648,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
| // Record position and call the compare IC.
|
| SetSourcePosition(expr->position());
|
| Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
| - CallIC(ic, expr->CompareOperationFeedbackId());
|
| + CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
|
| patch_site.EmitPatchInfo();
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| __ cmp(r0, Operand::Zero());
|
| @@ -4682,7 +4683,7 @@ void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
|
| Split(eq, if_true, if_false, fall_through);
|
| } else {
|
| Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
|
| - CallIC(ic, expr->CompareOperationFeedbackId());
|
| + CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
|
| __ cmp(r0, Operand(0));
|
| Split(ne, if_true, if_false, fall_through);
|
| }
|
|
|