Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(724)

Side by Side Diff: src/a64/full-codegen-a64.cc

Issue 159933002: A64: Synchronize with r19289. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/debug-a64.cc ('k') | src/a64/macro-assembler-a64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
121 // - fp: our caller's frame pointer. 121 // - fp: our caller's frame pointer.
122 // - jssp: stack pointer. 122 // - jssp: stack pointer.
123 // - lr: return address. 123 // - lr: return address.
124 // 124 //
125 // The function builds a JS frame. See JavaScriptFrameConstants in 125 // The function builds a JS frame. See JavaScriptFrameConstants in
126 // frames-arm.h for its layout. 126 // frames-arm.h for its layout.
127 void FullCodeGenerator::Generate() { 127 void FullCodeGenerator::Generate() {
128 CompilationInfo* info = info_; 128 CompilationInfo* info = info_;
129 handler_table_ = 129 handler_table_ =
130 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 130 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
131
132 InitializeFeedbackVector();
133
131 profiling_counter_ = isolate()->factory()->NewCell( 134 profiling_counter_ = isolate()->factory()->NewCell(
132 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 135 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
133 SetFunctionPosition(function()); 136 SetFunctionPosition(function());
134 Comment cmnt(masm_, "[ Function compiled by full code generator"); 137 Comment cmnt(masm_, "[ Function compiled by full code generator");
135 138
136 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 139 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
137 140
138 #ifdef DEBUG 141 #ifdef DEBUG
139 if (strlen(FLAG_stop_at) > 0 && 142 if (strlen(FLAG_stop_at) > 0 &&
140 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 143 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
(...skipping 516 matching lines...) Expand 10 before | Expand all | Expand 10 after
657 } 660 }
658 } 661 }
659 } 662 }
660 663
661 664
662 void FullCodeGenerator::DoTest(Expression* condition, 665 void FullCodeGenerator::DoTest(Expression* condition,
663 Label* if_true, 666 Label* if_true,
664 Label* if_false, 667 Label* if_false,
665 Label* fall_through) { 668 Label* fall_through) {
666 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 669 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
667 CallIC(ic, NOT_CONTEXTUAL, condition->test_id()); 670 CallIC(ic, condition->test_id());
668 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through); 671 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
669 } 672 }
670 673
671 674
672 // If (cond), branch to if_true. 675 // If (cond), branch to if_true.
673 // If (!cond), branch to if_false. 676 // If (!cond), branch to if_false.
674 // fall_through is used as an optimization in cases where only one branch 677 // fall_through is used as an optimization in cases where only one branch
675 // instruction is necessary. 678 // instruction is necessary.
676 void FullCodeGenerator::Split(Condition cond, 679 void FullCodeGenerator::Split(Condition cond,
677 Label* if_true, 680 Label* if_true,
(...skipping 347 matching lines...) Expand 10 before | Expand all | Expand 10 after
1025 __ Cmp(x1, x0); 1028 __ Cmp(x1, x0);
1026 __ B(ne, &next_test); 1029 __ B(ne, &next_test);
1027 __ Drop(1); // Switch value is no longer needed. 1030 __ Drop(1); // Switch value is no longer needed.
1028 __ B(clause->body_target()); 1031 __ B(clause->body_target());
1029 __ Bind(&slow_case); 1032 __ Bind(&slow_case);
1030 } 1033 }
1031 1034
1032 // Record position before stub call for type feedback. 1035 // Record position before stub call for type feedback.
1033 SetSourcePosition(clause->position()); 1036 SetSourcePosition(clause->position());
1034 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT); 1037 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1035 CallIC(ic, NOT_CONTEXTUAL, clause->CompareId()); 1038 CallIC(ic, clause->CompareId());
1036 patch_site.EmitPatchInfo(); 1039 patch_site.EmitPatchInfo();
1037 1040
1038 Label skip; 1041 Label skip;
1039 __ B(&skip); 1042 __ B(&skip);
1040 PrepareForBailout(clause, TOS_REG); 1043 PrepareForBailout(clause, TOS_REG);
1041 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test); 1044 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1042 __ Drop(1); 1045 __ Drop(1);
1043 __ B(clause->body_target()); 1046 __ B(clause->body_target());
1044 __ Bind(&skip); 1047 __ Bind(&skip);
1045 1048
(...skipping 22 matching lines...) Expand all
1068 } 1071 }
1069 1072
1070 __ Bind(nested_statement.break_label()); 1073 __ Bind(nested_statement.break_label());
1071 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1074 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1072 } 1075 }
1073 1076
1074 1077
1075 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 1078 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1076 ASM_LOCATION("FullCodeGenerator::VisitForInStatement"); 1079 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1077 Comment cmnt(masm_, "[ ForInStatement"); 1080 Comment cmnt(masm_, "[ ForInStatement");
1081 int slot = stmt->ForInFeedbackSlot();
1078 // TODO(all): This visitor probably needs better comments and a revisit. 1082 // TODO(all): This visitor probably needs better comments and a revisit.
1079 SetStatementPosition(stmt); 1083 SetStatementPosition(stmt);
1080 1084
1081 Label loop, exit; 1085 Label loop, exit;
1082 ForIn loop_statement(this, stmt); 1086 ForIn loop_statement(this, stmt);
1083 increment_loop_depth(); 1087 increment_loop_depth();
1084 1088
1085 // Get the object to enumerate over. If the object is null or undefined, skip 1089 // Get the object to enumerate over. If the object is null or undefined, skip
1086 // over the loop. See ECMA-262 version 5, section 12.6.4. 1090 // over the loop. See ECMA-262 version 5, section 12.6.4.
1087 VisitForAccumulatorValue(stmt->enumerable()); 1091 VisitForAccumulatorValue(stmt->enumerable());
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
1151 __ Push(x2, x1, x0); 1155 __ Push(x2, x1, x0);
1152 __ B(&loop); 1156 __ B(&loop);
1153 1157
1154 __ Bind(&no_descriptors); 1158 __ Bind(&no_descriptors);
1155 __ Drop(1); 1159 __ Drop(1);
1156 __ B(&exit); 1160 __ B(&exit);
1157 1161
1158 // We got a fixed array in register x0. Iterate through that. 1162 // We got a fixed array in register x0. Iterate through that.
1159 __ Bind(&fixed_array); 1163 __ Bind(&fixed_array);
1160 1164
1161 Handle<Cell> cell = isolate()->factory()->NewCell( 1165 Handle<Object> feedback = Handle<Object>(
1162 Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker), 1166 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
1163 isolate())); 1167 isolate());
1164 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); 1168 StoreFeedbackVectorSlot(slot, feedback);
1165 __ LoadObject(x1, cell); 1169 __ LoadObject(x1, FeedbackVector());
1166 __ Mov(x10, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker))); 1170 __ Mov(x10, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
1167 __ Str(x10, FieldMemOperand(x1, Cell::kValueOffset)); 1171 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
1168 1172
1169 __ Mov(x1, Operand(Smi::FromInt(1))); // Smi indicates slow check. 1173 __ Mov(x1, Operand(Smi::FromInt(1))); // Smi indicates slow check.
1170 __ Peek(x10, 0); // Get enumerated object. 1174 __ Peek(x10, 0); // Get enumerated object.
1171 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1175 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1172 // TODO(all): similar check was done already. Can we avoid it here? 1176 // TODO(all): similar check was done already. Can we avoid it here?
1173 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE); 1177 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1174 ASSERT(Smi::FromInt(0) == 0); 1178 ASSERT(Smi::FromInt(0) == 0);
1175 __ CzeroX(x1, le); // Zero indicates proxy. 1179 __ CzeroX(x1, le); // Zero indicates proxy.
1176 __ Push(x1, x0); // Smi and array 1180 __ Push(x1, x0); // Smi and array
1177 __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset)); 1181 __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset));
(...skipping 494 matching lines...) Expand 10 before | Expand all | Expand 10 after
1672 UNREACHABLE(); 1676 UNREACHABLE();
1673 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1677 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1674 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); 1678 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1675 // Fall through. 1679 // Fall through.
1676 case ObjectLiteral::Property::COMPUTED: 1680 case ObjectLiteral::Property::COMPUTED:
1677 if (key->value()->IsInternalizedString()) { 1681 if (key->value()->IsInternalizedString()) {
1678 if (property->emit_store()) { 1682 if (property->emit_store()) {
1679 VisitForAccumulatorValue(value); 1683 VisitForAccumulatorValue(value);
1680 __ Mov(x2, Operand(key->value())); 1684 __ Mov(x2, Operand(key->value()));
1681 __ Peek(x1, 0); 1685 __ Peek(x1, 0);
1682 CallStoreIC(NOT_CONTEXTUAL, key->LiteralFeedbackId()); 1686 CallStoreIC(key->LiteralFeedbackId());
1683 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1687 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1684 } else { 1688 } else {
1685 VisitForEffect(value); 1689 VisitForEffect(value);
1686 } 1690 }
1687 break; 1691 break;
1688 } 1692 }
1689 // Duplicate receiver on stack. 1693 // Duplicate receiver on stack.
1690 __ Peek(x0, 0); 1694 __ Peek(x0, 0);
1691 __ Push(x0); 1695 __ Push(x0);
1692 VisitForStackValue(key); 1696 VisitForStackValue(key);
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after
1971 __ Mov(x2, Operand(key->value())); 1975 __ Mov(x2, Operand(key->value()));
1972 // Call load IC. It has arguments receiver and property name x0 and x2. 1976 // Call load IC. It has arguments receiver and property name x0 and x2.
1973 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId()); 1977 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
1974 } 1978 }
1975 1979
1976 1980
1977 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 1981 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1978 SetSourcePosition(prop->position()); 1982 SetSourcePosition(prop->position());
1979 // Call keyed load IC. It has arguments key and receiver in r0 and r1. 1983 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1980 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 1984 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1981 CallIC(ic, NOT_CONTEXTUAL, prop->PropertyFeedbackId()); 1985 CallIC(ic, prop->PropertyFeedbackId());
1982 } 1986 }
1983 1987
1984 1988
1985 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1989 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1986 Token::Value op, 1990 Token::Value op,
1987 OverwriteMode mode, 1991 OverwriteMode mode,
1988 Expression* left_expr, 1992 Expression* left_expr,
1989 Expression* right_expr) { 1993 Expression* right_expr) {
1990 Label done, both_smis, stub_call; 1994 Label done, both_smis, stub_call;
1991 1995
1992 // Get the arguments. 1996 // Get the arguments.
1993 Register left = x1; 1997 Register left = x1;
1994 Register right = x0; 1998 Register right = x0;
1995 Register result = x0; 1999 Register result = x0;
1996 __ Pop(left); 2000 __ Pop(left);
1997 2001
1998 // Perform combined smi check on both operands. 2002 // Perform combined smi check on both operands.
1999 __ Orr(x10, left, right); 2003 __ Orr(x10, left, right);
2000 JumpPatchSite patch_site(masm_); 2004 JumpPatchSite patch_site(masm_);
2001 patch_site.EmitJumpIfSmi(x10, &both_smis); 2005 patch_site.EmitJumpIfSmi(x10, &both_smis);
2002 2006
2003 __ Bind(&stub_call); 2007 __ Bind(&stub_call);
2004 BinaryOpICStub stub(op, mode); 2008 BinaryOpICStub stub(op, mode);
2005 { 2009 {
2006 Assembler::BlockConstPoolScope scope(masm_); 2010 Assembler::BlockConstPoolScope scope(masm_);
2007 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL, 2011 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2008 expr->BinaryOperationFeedbackId());
2009 patch_site.EmitPatchInfo(); 2012 patch_site.EmitPatchInfo();
2010 } 2013 }
2011 __ B(&done); 2014 __ B(&done);
2012 2015
2013 __ Bind(&both_smis); 2016 __ Bind(&both_smis);
2014 // Smi case. This code works in the same way as the smi-smi case in the type 2017 // Smi case. This code works in the same way as the smi-smi case in the type
2015 // recording binary operation stub, see 2018 // recording binary operation stub, see
2016 // BinaryOpStub::GenerateSmiSmiOperation for comments. 2019 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2017 // TODO(all): That doesn't exist any more. Where are the comments? 2020 // TODO(all): That doesn't exist any more. Where are the comments?
2018 // 2021 //
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
2084 2087
2085 2088
2086 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2089 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2087 Token::Value op, 2090 Token::Value op,
2088 OverwriteMode mode) { 2091 OverwriteMode mode) {
2089 __ Pop(x1); 2092 __ Pop(x1);
2090 BinaryOpICStub stub(op, mode); 2093 BinaryOpICStub stub(op, mode);
2091 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code. 2094 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2092 { 2095 {
2093 Assembler::BlockConstPoolScope scope(masm_); 2096 Assembler::BlockConstPoolScope scope(masm_);
2094 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL, 2097 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2095 expr->BinaryOperationFeedbackId());
2096 patch_site.EmitPatchInfo(); 2098 patch_site.EmitPatchInfo();
2097 } 2099 }
2098 context()->Plug(x0); 2100 context()->Plug(x0);
2099 } 2101 }
2100 2102
2101 2103
2102 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2104 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2103 // Invalid left-hand sides are rewritten to have a 'throw 2105 // Invalid left-hand sides are rewritten to have a 'throw
2104 // ReferenceError' on the left-hand side. 2106 // ReferenceError' on the left-hand side.
2105 if (!expr->IsValidLeftHandSide()) { 2107 if (!expr->IsValidLeftHandSide()) {
(...skipping 20 matching lines...) Expand all
2126 break; 2128 break;
2127 } 2129 }
2128 case NAMED_PROPERTY: { 2130 case NAMED_PROPERTY: {
2129 __ Push(x0); // Preserve value. 2131 __ Push(x0); // Preserve value.
2130 VisitForAccumulatorValue(prop->obj()); 2132 VisitForAccumulatorValue(prop->obj());
2131 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid 2133 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2132 // this copy. 2134 // this copy.
2133 __ Mov(x1, x0); 2135 __ Mov(x1, x0);
2134 __ Pop(x0); // Restore value. 2136 __ Pop(x0); // Restore value.
2135 __ Mov(x2, Operand(prop->key()->AsLiteral()->value())); 2137 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2136 CallStoreIC(NOT_CONTEXTUAL); 2138 CallStoreIC();
2137 break; 2139 break;
2138 } 2140 }
2139 case KEYED_PROPERTY: { 2141 case KEYED_PROPERTY: {
2140 __ Push(x0); // Preserve value. 2142 __ Push(x0); // Preserve value.
2141 VisitForStackValue(prop->obj()); 2143 VisitForStackValue(prop->obj());
2142 VisitForAccumulatorValue(prop->key()); 2144 VisitForAccumulatorValue(prop->key());
2143 __ Mov(x1, x0); 2145 __ Mov(x1, x0);
2144 __ Pop(x2, x0); 2146 __ Pop(x2, x0);
2145 Handle<Code> ic = is_classic_mode() 2147 Handle<Code> ic = is_classic_mode()
2146 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2148 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2147 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2149 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2148 CallIC(ic); 2150 CallIC(ic);
2149 break; 2151 break;
2150 } 2152 }
2151 } 2153 }
2152 context()->Plug(x0); 2154 context()->Plug(x0);
2153 } 2155 }
2154 2156
2155 2157
2156 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2158 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2157 Token::Value op) { 2159 Token::Value op) {
2158 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment"); 2160 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2159 if (var->IsUnallocated()) { 2161 if (var->IsUnallocated()) {
2160 // Global var, const, or let. 2162 // Global var, const, or let.
2161 __ Mov(x2, Operand(var->name())); 2163 __ Mov(x2, Operand(var->name()));
2162 __ Ldr(x1, GlobalObjectMemOperand()); 2164 __ Ldr(x1, GlobalObjectMemOperand());
2163 CallStoreIC(CONTEXTUAL); 2165 CallStoreIC();
2164 2166
2165 } else if (op == Token::INIT_CONST) { 2167 } else if (op == Token::INIT_CONST) {
2166 // Const initializers need a write barrier. 2168 // Const initializers need a write barrier.
2167 ASSERT(!var->IsParameter()); // No const parameters. 2169 ASSERT(!var->IsParameter()); // No const parameters.
2168 if (var->IsStackLocal()) { 2170 if (var->IsStackLocal()) {
2169 Label skip; 2171 Label skip;
2170 __ Ldr(x1, StackOperand(var)); 2172 __ Ldr(x1, StackOperand(var));
2171 __ JumpIfNotRoot(x1, Heap::kTheHoleValueRootIndex, &skip); 2173 __ JumpIfNotRoot(x1, Heap::kTheHoleValueRootIndex, &skip);
2172 __ Str(result_register(), StackOperand(var)); 2174 __ Str(result_register(), StackOperand(var));
2173 __ Bind(&skip); 2175 __ Bind(&skip);
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
2252 // Assignment to a property, using a named store IC. 2254 // Assignment to a property, using a named store IC.
2253 Property* prop = expr->target()->AsProperty(); 2255 Property* prop = expr->target()->AsProperty();
2254 ASSERT(prop != NULL); 2256 ASSERT(prop != NULL);
2255 ASSERT(prop->key()->AsLiteral() != NULL); 2257 ASSERT(prop->key()->AsLiteral() != NULL);
2256 2258
2257 // Record source code position before IC call. 2259 // Record source code position before IC call.
2258 SetSourcePosition(expr->position()); 2260 SetSourcePosition(expr->position());
2259 __ Mov(x2, Operand(prop->key()->AsLiteral()->value())); 2261 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2260 __ Pop(x1); 2262 __ Pop(x1);
2261 2263
2262 CallStoreIC(NOT_CONTEXTUAL, expr->AssignmentFeedbackId()); 2264 CallStoreIC(expr->AssignmentFeedbackId());
2263 2265
2264 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2266 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2265 context()->Plug(x0); 2267 context()->Plug(x0);
2266 } 2268 }
2267 2269
2268 2270
2269 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2271 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2270 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment"); 2272 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2271 // Assignment to a property, using a keyed store IC. 2273 // Assignment to a property, using a keyed store IC.
2272 2274
2273 // Record source code position before IC call. 2275 // Record source code position before IC call.
2274 SetSourcePosition(expr->position()); 2276 SetSourcePosition(expr->position());
2275 // TODO(all): Could we pass this in registers rather than on the stack? 2277 // TODO(all): Could we pass this in registers rather than on the stack?
2276 __ Pop(x1, x2); // Key and object holding the property. 2278 __ Pop(x1, x2); // Key and object holding the property.
2277 2279
2278 Handle<Code> ic = is_classic_mode() 2280 Handle<Code> ic = is_classic_mode()
2279 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2281 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2280 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2282 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2281 CallIC(ic, NOT_CONTEXTUAL, expr->AssignmentFeedbackId()); 2283 CallIC(ic, expr->AssignmentFeedbackId());
2282 2284
2283 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2285 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2284 context()->Plug(x0); 2286 context()->Plug(x0);
2285 } 2287 }
2286 2288
2287 2289
2288 void FullCodeGenerator::VisitProperty(Property* expr) { 2290 void FullCodeGenerator::VisitProperty(Property* expr) {
2289 Comment cmnt(masm_, "[ Property"); 2291 Comment cmnt(masm_, "[ Property");
2290 Expression* key = expr->key(); 2292 Expression* key = expr->key();
2291 2293
2292 if (key->IsPropertyName()) { 2294 if (key->IsPropertyName()) {
2293 VisitForAccumulatorValue(expr->obj()); 2295 VisitForAccumulatorValue(expr->obj());
2294 EmitNamedPropertyLoad(expr); 2296 EmitNamedPropertyLoad(expr);
2295 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2297 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2296 context()->Plug(x0); 2298 context()->Plug(x0);
2297 } else { 2299 } else {
2298 VisitForStackValue(expr->obj()); 2300 VisitForStackValue(expr->obj());
2299 VisitForAccumulatorValue(expr->key()); 2301 VisitForAccumulatorValue(expr->key());
2300 __ Pop(x1); 2302 __ Pop(x1);
2301 EmitKeyedPropertyLoad(expr); 2303 EmitKeyedPropertyLoad(expr);
2302 context()->Plug(x0); 2304 context()->Plug(x0);
2303 } 2305 }
2304 } 2306 }
2305 2307
2306 2308
2307 void FullCodeGenerator::CallIC(Handle<Code> code, 2309 void FullCodeGenerator::CallIC(Handle<Code> code,
2308 ContextualMode mode,
2309 TypeFeedbackId ast_id) { 2310 TypeFeedbackId ast_id) {
2310 ic_total_count_++; 2311 ic_total_count_++;
2311 // All calls must have a predictable size in full-codegen code to ensure that 2312 // All calls must have a predictable size in full-codegen code to ensure that
2312 // the debugger can patch them correctly. 2313 // the debugger can patch them correctly.
2313 ASSERT((mode != CONTEXTUAL) || ast_id.IsNone());
2314 __ Call(code, RelocInfo::CODE_TARGET, ast_id); 2314 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2315 } 2315 }
2316 2316
2317 2317
2318 // Code common for calls using the IC. 2318 // Code common for calls using the IC.
2319 void FullCodeGenerator::EmitCallWithIC(Call* expr) { 2319 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2320 ASM_LOCATION("EmitCallWithIC"); 2320 ASM_LOCATION("EmitCallWithIC");
2321 2321
2322 Expression* callee = expr->expression(); 2322 Expression* callee = expr->expression();
2323 ZoneList<Expression*>* args = expr->arguments(); 2323 ZoneList<Expression*>* args = expr->arguments();
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
2414 int arg_count = args->length(); 2414 int arg_count = args->length();
2415 { PreservePositionScope scope(masm()->positions_recorder()); 2415 { PreservePositionScope scope(masm()->positions_recorder());
2416 for (int i = 0; i < arg_count; i++) { 2416 for (int i = 0; i < arg_count; i++) {
2417 VisitForStackValue(args->at(i)); 2417 VisitForStackValue(args->at(i));
2418 } 2418 }
2419 } 2419 }
2420 // Record source position for debugger. 2420 // Record source position for debugger.
2421 SetSourcePosition(expr->position()); 2421 SetSourcePosition(expr->position());
2422 2422
2423 Handle<Object> uninitialized = 2423 Handle<Object> uninitialized =
2424 TypeFeedbackCells::UninitializedSentinel(isolate()); 2424 TypeFeedbackInfo::UninitializedSentinel(isolate());
2425 Handle<Cell> cell = 2425 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2426 isolate()->factory()->NewCell(uninitialized); 2426 __ LoadObject(x2, FeedbackVector());
2427 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell); 2427 __ Mov(x3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2428 __ Mov(x2, Operand(cell));
2429 2428
2430 // Record call targets in unoptimized code. 2429 // Record call targets in unoptimized code.
2431 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); 2430 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2432 __ Peek(x1, (arg_count + 1) * kXRegSizeInBytes); 2431 __ Peek(x1, (arg_count + 1) * kXRegSizeInBytes);
2433 __ CallStub(&stub, expr->CallFeedbackId()); 2432 __ CallStub(&stub);
2434 RecordJSReturnSite(expr); 2433 RecordJSReturnSite(expr);
2435 // Restore context register. 2434 // Restore context register.
2436 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2435 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2437 context()->DropAndPlug(1, x0); 2436 context()->DropAndPlug(1, x0);
2438 } 2437 }
2439 2438
2440 2439
2441 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2440 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2442 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval"); 2441 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2443 // Prepare to push a copy of the first argument or undefined if it doesn't 2442 // Prepare to push a copy of the first argument or undefined if it doesn't
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
2614 // Call the construct call builtin that handles allocation and 2613 // Call the construct call builtin that handles allocation and
2615 // constructor invocation. 2614 // constructor invocation.
2616 SetSourcePosition(expr->position()); 2615 SetSourcePosition(expr->position());
2617 2616
2618 // Load function and argument count into x1 and x0. 2617 // Load function and argument count into x1 and x0.
2619 __ Mov(x0, arg_count); 2618 __ Mov(x0, arg_count);
2620 __ Peek(x1, arg_count * kXRegSizeInBytes); 2619 __ Peek(x1, arg_count * kXRegSizeInBytes);
2621 2620
2622 // Record call targets in unoptimized code. 2621 // Record call targets in unoptimized code.
2623 Handle<Object> uninitialized = 2622 Handle<Object> uninitialized =
2624 TypeFeedbackCells::UninitializedSentinel(isolate()); 2623 TypeFeedbackInfo::UninitializedSentinel(isolate());
2625 Handle<Cell> cell = 2624 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2626 isolate()->factory()->NewCell(uninitialized); 2625 __ LoadObject(x2, FeedbackVector());
2627 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell); 2626 __ Mov(x3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2628 __ Mov(x2, Operand(cell));
2629 2627
2630 CallConstructStub stub(RECORD_CALL_TARGET); 2628 CallConstructStub stub(RECORD_CALL_TARGET);
2631 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); 2629 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2632 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2630 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2633 context()->Plug(x0); 2631 context()->Plug(x0);
2634 } 2632 }
2635 2633
2636 2634
2637 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2635 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2638 ZoneList<Expression*>* args = expr->arguments(); 2636 ZoneList<Expression*>* args = expr->arguments();
(...skipping 1481 matching lines...) Expand 10 before | Expand all | Expand 10 after
4120 __ Bind(&stub_call); 4118 __ Bind(&stub_call);
4121 __ Mov(x1, x0); 4119 __ Mov(x1, x0);
4122 __ Mov(x0, Operand(Smi::FromInt(count_value))); 4120 __ Mov(x0, Operand(Smi::FromInt(count_value)));
4123 4121
4124 // Record position before stub call. 4122 // Record position before stub call.
4125 SetSourcePosition(expr->position()); 4123 SetSourcePosition(expr->position());
4126 4124
4127 { 4125 {
4128 Assembler::BlockConstPoolScope scope(masm_); 4126 Assembler::BlockConstPoolScope scope(masm_);
4129 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE); 4127 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4130 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL, 4128 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4131 expr->CountBinOpFeedbackId());
4132 patch_site.EmitPatchInfo(); 4129 patch_site.EmitPatchInfo();
4133 } 4130 }
4134 __ Bind(&done); 4131 __ Bind(&done);
4135 4132
4136 // Store the value returned in x0. 4133 // Store the value returned in x0.
4137 switch (assign_type) { 4134 switch (assign_type) {
4138 case VARIABLE: 4135 case VARIABLE:
4139 if (expr->is_postfix()) { 4136 if (expr->is_postfix()) {
4140 { EffectContext context(this); 4137 { EffectContext context(this);
4141 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4138 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4142 Token::ASSIGN); 4139 Token::ASSIGN);
4143 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4140 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4144 context.Plug(x0); 4141 context.Plug(x0);
4145 } 4142 }
4146 // For all contexts except EffectConstant We have the result on 4143 // For all contexts except EffectConstant We have the result on
4147 // top of the stack. 4144 // top of the stack.
4148 if (!context()->IsEffect()) { 4145 if (!context()->IsEffect()) {
4149 context()->PlugTOS(); 4146 context()->PlugTOS();
4150 } 4147 }
4151 } else { 4148 } else {
4152 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4149 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4153 Token::ASSIGN); 4150 Token::ASSIGN);
4154 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4151 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4155 context()->Plug(x0); 4152 context()->Plug(x0);
4156 } 4153 }
4157 break; 4154 break;
4158 case NAMED_PROPERTY: { 4155 case NAMED_PROPERTY: {
4159 __ Mov(x2, Operand(prop->key()->AsLiteral()->value())); 4156 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
4160 __ Pop(x1); 4157 __ Pop(x1);
4161 CallStoreIC(NOT_CONTEXTUAL, expr->CountStoreFeedbackId()); 4158 CallStoreIC(expr->CountStoreFeedbackId());
4162 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4159 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4163 if (expr->is_postfix()) { 4160 if (expr->is_postfix()) {
4164 if (!context()->IsEffect()) { 4161 if (!context()->IsEffect()) {
4165 context()->PlugTOS(); 4162 context()->PlugTOS();
4166 } 4163 }
4167 } else { 4164 } else {
4168 context()->Plug(x0); 4165 context()->Plug(x0);
4169 } 4166 }
4170 break; 4167 break;
4171 } 4168 }
4172 case KEYED_PROPERTY: { 4169 case KEYED_PROPERTY: {
4173 __ Pop(x1); // Key. 4170 __ Pop(x1); // Key.
4174 __ Pop(x2); // Receiver. 4171 __ Pop(x2); // Receiver.
4175 Handle<Code> ic = is_classic_mode() 4172 Handle<Code> ic = is_classic_mode()
4176 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4173 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4177 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4174 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4178 CallIC(ic, NOT_CONTEXTUAL, expr->CountStoreFeedbackId()); 4175 CallIC(ic, expr->CountStoreFeedbackId());
4179 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4176 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4180 if (expr->is_postfix()) { 4177 if (expr->is_postfix()) {
4181 if (!context()->IsEffect()) { 4178 if (!context()->IsEffect()) {
4182 context()->PlugTOS(); 4179 context()->PlugTOS();
4183 } 4180 }
4184 } else { 4181 } else {
4185 context()->Plug(x0); 4182 context()->Plug(x0);
4186 } 4183 }
4187 break; 4184 break;
4188 } 4185 }
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
4368 Label slow_case; 4365 Label slow_case;
4369 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case); 4366 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4370 __ Cmp(x1, x0); 4367 __ Cmp(x1, x0);
4371 Split(cond, if_true, if_false, NULL); 4368 Split(cond, if_true, if_false, NULL);
4372 __ Bind(&slow_case); 4369 __ Bind(&slow_case);
4373 } 4370 }
4374 4371
4375 // Record position and call the compare IC. 4372 // Record position and call the compare IC.
4376 SetSourcePosition(expr->position()); 4373 SetSourcePosition(expr->position());
4377 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 4374 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4378 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId()); 4375 CallIC(ic, expr->CompareOperationFeedbackId());
4379 patch_site.EmitPatchInfo(); 4376 patch_site.EmitPatchInfo();
4380 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4377 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4381 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through); 4378 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4382 } 4379 }
4383 } 4380 }
4384 4381
4385 // Convert the result of the comparison into one expected for this 4382 // Convert the result of the comparison into one expected for this
4386 // expression's context. 4383 // expression's context.
4387 context()->Plug(if_true, if_false); 4384 context()->Plug(if_true, if_false);
4388 } 4385 }
(...skipping 14 matching lines...) Expand all
4403 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4400 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4404 4401
4405 if (expr->op() == Token::EQ_STRICT) { 4402 if (expr->op() == Token::EQ_STRICT) {
4406 Heap::RootListIndex nil_value = nil == kNullValue ? 4403 Heap::RootListIndex nil_value = nil == kNullValue ?
4407 Heap::kNullValueRootIndex : 4404 Heap::kNullValueRootIndex :
4408 Heap::kUndefinedValueRootIndex; 4405 Heap::kUndefinedValueRootIndex;
4409 __ CompareRoot(x0, nil_value); 4406 __ CompareRoot(x0, nil_value);
4410 Split(eq, if_true, if_false, fall_through); 4407 Split(eq, if_true, if_false, fall_through);
4411 } else { 4408 } else {
4412 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4409 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4413 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId()); 4410 CallIC(ic, expr->CompareOperationFeedbackId());
4414 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through); 4411 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4415 } 4412 }
4416 4413
4417 context()->Plug(if_true, if_false); 4414 context()->Plug(if_true, if_false);
4418 } 4415 }
4419 4416
4420 4417
4421 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4418 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4422 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4419 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4423 context()->Plug(x0); 4420 context()->Plug(x0);
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
4546 __ Bind(&l_next); 4543 __ Bind(&l_next);
4547 __ LoadRoot(x2, Heap::knext_stringRootIndex); // "next" 4544 __ LoadRoot(x2, Heap::knext_stringRootIndex); // "next"
4548 __ Peek(x3, 1 * kPointerSize); // iter 4545 __ Peek(x3, 1 * kPointerSize); // iter
4549 __ Push(x2, x3, x0); // "next", iter, received 4546 __ Push(x2, x3, x0); // "next", iter, received
4550 4547
4551 // result = receiver[f](arg); 4548 // result = receiver[f](arg);
4552 __ Bind(&l_call); 4549 __ Bind(&l_call);
4553 __ Peek(x1, 1 * kPointerSize); 4550 __ Peek(x1, 1 * kPointerSize);
4554 __ Peek(x0, 2 * kPointerSize); 4551 __ Peek(x0, 2 * kPointerSize);
4555 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 4552 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
4556 CallIC(ic, NOT_CONTEXTUAL, TypeFeedbackId::None()); 4553 CallIC(ic, TypeFeedbackId::None());
4557 __ Mov(x1, x0); 4554 __ Mov(x1, x0);
4558 __ Poke(x1, 2 * kPointerSize); 4555 __ Poke(x1, 2 * kPointerSize);
4559 CallFunctionStub stub(1, CALL_AS_METHOD); 4556 CallFunctionStub stub(1, CALL_AS_METHOD);
4560 __ CallStub(&stub); 4557 __ CallStub(&stub);
4561 4558
4562 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4559 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4563 __ Drop(1); // The function is still on the stack; drop it. 4560 __ Drop(1); // The function is still on the stack; drop it.
4564 4561
4565 // if (!result.done) goto l_try; 4562 // if (!result.done) goto l_try;
4566 __ Bind(&l_loop); 4563 __ Bind(&l_loop);
(...skipping 443 matching lines...) Expand 10 before | Expand all | Expand 10 after
5010 return previous_; 5007 return previous_;
5011 } 5008 }
5012 5009
5013 5010
5014 #undef __ 5011 #undef __
5015 5012
5016 5013
5017 } } // namespace v8::internal 5014 } } // namespace v8::internal
5018 5015
5019 #endif // V8_TARGET_ARCH_A64 5016 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/debug-a64.cc ('k') | src/a64/macro-assembler-a64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698