Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(415)

Side by Side Diff: src/codegen-arm.cc

Issue 6527: Move code generation for storing to a reference out of the AST nodes, and... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 12 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
43 // ----------------------------------------------------------------------------- 43 // -----------------------------------------------------------------------------
44 // Reference support 44 // Reference support
45 45
46 // A reference is a C++ stack-allocated object that keeps an ECMA 46 // A reference is a C++ stack-allocated object that keeps an ECMA
47 // reference on the execution stack while in scope. For variables 47 // reference on the execution stack while in scope. For variables
48 // the reference is empty, indicating that it isn't necessary to 48 // the reference is empty, indicating that it isn't necessary to
49 // store state on the stack for keeping track of references to those. 49 // store state on the stack for keeping track of references to those.
50 // For properties, we keep either one (named) or two (indexed) values 50 // For properties, we keep either one (named) or two (indexed) values
51 // on the execution stack to represent the reference. 51 // on the execution stack to represent the reference.
52 52
53 enum InitState { CONST_INIT, NOT_CONST_INIT };
54
53 class Reference BASE_EMBEDDED { 55 class Reference BASE_EMBEDDED {
54 public: 56 public:
55 // The values of the types is important, see size(). 57 // The values of the types is important, see size().
56 enum Type { ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 }; 58 enum Type { ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
57 Reference(ArmCodeGenerator* cgen, Expression* expression); 59 Reference(ArmCodeGenerator* cgen, Expression* expression);
58 ~Reference(); 60 ~Reference();
59 61
60 Expression* expression() const { return expression_; } 62 Expression* expression() const { return expression_; }
61 Type type() const { return type_; } 63 Type type() const { return type_; }
62 void set_type(Type value) { 64 void set_type(Type value) {
63 ASSERT(type_ == ILLEGAL); 65 ASSERT(type_ == ILLEGAL);
64 type_ = value; 66 type_ = value;
65 } 67 }
66 // The size of the reference or -1 if the reference is illegal. 68 // The size of the reference or -1 if the reference is illegal.
67 int size() const { return type_; } 69 int size() const { return type_; }
68 70
69 bool is_illegal() const { return type_ == ILLEGAL; } 71 bool is_illegal() const { return type_ == ILLEGAL; }
70 bool is_slot() const { return type_ == SLOT; } 72 bool is_slot() const { return type_ == SLOT; }
71 bool is_property() const { return type_ == NAMED || type_ == KEYED; } 73 bool is_property() const { return type_ == NAMED || type_ == KEYED; }
72 74
75 void SetValue(InitState init_state);
76
73 private: 77 private:
74 ArmCodeGenerator* cgen_; 78 ArmCodeGenerator* cgen_;
75 Expression* expression_; 79 Expression* expression_;
76 Type type_; 80 Type type_;
77 }; 81 };
78 82
79 83
80 // ------------------------------------------------------------------------- 84 // -------------------------------------------------------------------------
81 // Code generation state 85 // Code generation state
82 86
83 // The state is passed down the AST by the code generator. It is passed 87 // The state is passed down the AST by the code generator (and back up, in
84 // implicitly (in a member variable) to the non-static code generator member 88 // the form of the state of the label pair). It is threaded through the
85 // functions, and explicitly (as an argument) to the static member functions 89 // call stack. Constructing a state implicitly pushes it on the owning code
86 // and the AST node member functions. 90 // generator's stack of states, and destroying one implicitly pops it.
87 //
88 // The state is threaded through the call stack. Constructing a state
89 // implicitly pushes it on the owning code generator's stack of states, and
90 // destroying one implicitly pops it.
91 91
92 class CodeGenState BASE_EMBEDDED { 92 class CodeGenState BASE_EMBEDDED {
93 public: 93 public:
94 enum AccessType { 94 enum AccessType {
95 UNDEFINED, 95 UNDEFINED,
96 LOAD, 96 LOAD,
97 LOAD_TYPEOF_EXPR 97 LOAD_TYPEOF_EXPR
98 }; 98 };
99 99
100 // Create an initial code generator state. Destroying the initial state 100 // Create an initial code generator state. Destroying the initial state
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
183 bool is_referenced() const { return state_->ref() != NULL; } 183 bool is_referenced() const { return state_->ref() != NULL; }
184 Label* true_target() const { return state_->true_target(); } 184 Label* true_target() const { return state_->true_target(); }
185 Label* false_target() const { return state_->false_target(); } 185 Label* false_target() const { return state_->false_target(); }
186 186
187 187
188 // Expressions 188 // Expressions
189 MemOperand GlobalObject() const { 189 MemOperand GlobalObject() const {
190 return ContextOperand(cp, Context::GLOBAL_INDEX); 190 return ContextOperand(cp, Context::GLOBAL_INDEX);
191 } 191 }
192 192
193 static MemOperand ContextOperand(Register context, int index) { 193 MemOperand ContextOperand(Register context, int index) const {
194 return MemOperand(context, Context::SlotOffset(index)); 194 return MemOperand(context, Context::SlotOffset(index));
195 } 195 }
196 196
197 static MemOperand ParameterOperand(const CodeGenerator* cgen, int index) { 197 MemOperand ParameterOperand(int index) const {
198 int num_parameters = cgen->scope()->num_parameters(); 198 int num_parameters = scope()->num_parameters();
199 // index -2 corresponds to the activated closure, -1 corresponds 199 // index -2 corresponds to the activated closure, -1 corresponds
200 // to the receiver 200 // to the receiver
201 ASSERT(-2 <= index && index < num_parameters); 201 ASSERT(-2 <= index && index < num_parameters);
202 int offset = (1 + num_parameters - index) * kPointerSize; 202 int offset = (1 + num_parameters - index) * kPointerSize;
203 return MemOperand(fp, offset); 203 return MemOperand(fp, offset);
204 } 204 }
205 205
206 MemOperand ParameterOperand(int index) const {
207 return ParameterOperand(this, index);
208 }
209
210 MemOperand FunctionOperand() const { 206 MemOperand FunctionOperand() const {
211 return MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset); 207 return MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset);
212 } 208 }
213 209
214 static MemOperand SlotOperand(CodeGenerator* cgen, 210 MemOperand SlotOperand(Slot* slot, Register tmp);
215 Slot* slot,
216 Register tmp);
217
218 MemOperand SlotOperand(Slot* slot, Register tmp) {
219 return SlotOperand(this, slot, tmp);
220 }
221 211
222 void LoadCondition(Expression* x, CodeGenState::AccessType access, 212 void LoadCondition(Expression* x, CodeGenState::AccessType access,
223 Label* true_target, Label* false_target, bool force_cc); 213 Label* true_target, Label* false_target, bool force_cc);
224 void Load(Expression* x, 214 void Load(Expression* x,
225 CodeGenState::AccessType access = CodeGenState::LOAD); 215 CodeGenState::AccessType access = CodeGenState::LOAD);
226 void LoadGlobal(); 216 void LoadGlobal();
227 217
228 // Special code for typeof expressions: Unfortunately, we must 218 // Special code for typeof expressions: Unfortunately, we must
229 // be careful when loading the expression in 'typeof' 219 // be careful when loading the expression in 'typeof'
230 // expressions. We are not allowed to throw reference errors for 220 // expressions. We are not allowed to throw reference errors for
231 // non-existing properties of the global object, so we must make it 221 // non-existing properties of the global object, so we must make it
232 // look like an explicit property access, instead of an access 222 // look like an explicit property access, instead of an access
233 // through the context chain. 223 // through the context chain.
234 void LoadTypeofExpression(Expression* x); 224 void LoadTypeofExpression(Expression* x);
235 225
236 226
237 // References 227 // References
238 228
239 // Generate code to fetch the value of a reference. The reference is 229 // Generate code to fetch the value of a reference. The reference is
240 // expected to be on top of the expression stack. It is left in place and 230 // expected to be on top of the expression stack. It is left in place and
241 // its value is pushed on top of it. 231 // its value is pushed on top of it.
242 void GetValue(Reference* ref) { 232 void GetValue(Reference* ref) {
243 ASSERT(!has_cc()); 233 ASSERT(!has_cc());
244 ASSERT(!ref->is_illegal()); 234 ASSERT(!ref->is_illegal());
245 CodeGenState new_state(this, ref); 235 CodeGenState new_state(this, ref);
246 Visit(ref->expression()); 236 Visit(ref->expression());
247 } 237 }
248 238
249 // Generate code to store a value in a reference. The stored value is
250 // expected on top of the expression stack, with the reference immediately
251 // below it. The expression stack is left unchanged.
252 void SetValue(Reference* ref) {
253 ASSERT(!has_cc());
254 ASSERT(!ref->is_illegal());
255 ref->expression()->GenerateStoreCode(this, ref, NOT_CONST_INIT);
256 }
257
258 // Generate code to store a value in a reference. The stored value is
259 // expected on top of the expression stack, with the reference immediately
260 // below it. The expression stack is left unchanged.
261 void InitConst(Reference* ref) {
262 ASSERT(!has_cc());
263 ASSERT(!ref->is_illegal());
264 ref->expression()->GenerateStoreCode(this, ref, CONST_INIT);
265 }
266
267 // Generate code to fetch a value from a property of a reference. The 239 // Generate code to fetch a value from a property of a reference. The
268 // reference is expected on top of the expression stack. It is left in 240 // reference is expected on top of the expression stack. It is left in
269 // place and its value is pushed on top of it. 241 // place and its value is pushed on top of it.
270 void GetReferenceProperty(Expression* key); 242 void GetReferenceProperty(Expression* key);
271 243
272 // Generate code to store a value in a property of a reference. The
273 // stored value is expected on top of the expression stack, with the
274 // reference immediately below it. The expression stack is left
275 // unchanged.
276 static void SetReferenceProperty(CodeGenerator* cgen,
277 Reference* ref,
278 Expression* key);
279
280
281 void ToBoolean(Label* true_target, Label* false_target); 244 void ToBoolean(Label* true_target, Label* false_target);
282 245
283 void GenericBinaryOperation(Token::Value op); 246 void GenericBinaryOperation(Token::Value op);
284 void Comparison(Condition cc, bool strict = false); 247 void Comparison(Condition cc, bool strict = false);
285 248
286 void SmiOperation(Token::Value op, Handle<Object> value, bool reversed); 249 void SmiOperation(Token::Value op, Handle<Object> value, bool reversed);
287 250
288 void CallWithArguments(ZoneList<Expression*>* arguments, int position); 251 void CallWithArguments(ZoneList<Expression*>* arguments, int position);
289 252
290 // Declare global variables and functions in the given array of 253 // Declare global variables and functions in the given array of
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after
594 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); 557 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
595 __ ldr(r2, FunctionOperand()); 558 __ ldr(r2, FunctionOperand());
596 // The receiver is below the arguments, the return address, 559 // The receiver is below the arguments, the return address,
597 // and the frame pointer on the stack. 560 // and the frame pointer on the stack.
598 const int kReceiverDisplacement = 2 + scope->num_parameters(); 561 const int kReceiverDisplacement = 2 + scope->num_parameters();
599 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize)); 562 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize));
600 __ mov(r0, Operand(Smi::FromInt(scope->num_parameters()))); 563 __ mov(r0, Operand(Smi::FromInt(scope->num_parameters())));
601 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit()); 564 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit());
602 __ CallStub(&stub); 565 __ CallStub(&stub);
603 __ push(r0); 566 __ push(r0);
604 SetValue(&arguments_ref); 567 arguments_ref.SetValue(NOT_CONST_INIT);
605 } 568 }
606 SetValue(&shadow_ref); 569 shadow_ref.SetValue(NOT_CONST_INIT);
607 } 570 }
608 __ pop(r0); // Value is no longer needed. 571 __ pop(r0); // Value is no longer needed.
609 } 572 }
610 573
611 // Generate code to 'execute' declarations and initialize 574 // Generate code to 'execute' declarations and initialize
612 // functions (source elements). In case of an illegal 575 // functions (source elements). In case of an illegal
613 // redeclaration we need to handle that instead of processing the 576 // redeclaration we need to handle that instead of processing the
614 // declarations. 577 // declarations.
615 if (scope->HasIllegalRedeclaration()) { 578 if (scope->HasIllegalRedeclaration()) {
616 Comment cmnt(masm_, "[ illegal redeclarations"); 579 Comment cmnt(masm_, "[ illegal redeclarations");
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
678 __ add(sp, sp, Operand((scope_->num_parameters() + 1) * kPointerSize)); 641 __ add(sp, sp, Operand((scope_->num_parameters() + 1) * kPointerSize));
679 __ mov(pc, lr); 642 __ mov(pc, lr);
680 643
681 // Code generation state must be reset. 644 // Code generation state must be reset.
682 scope_ = NULL; 645 scope_ = NULL;
683 ASSERT(!has_cc()); 646 ASSERT(!has_cc());
684 ASSERT(state_ == NULL); 647 ASSERT(state_ == NULL);
685 } 648 }
686 649
687 650
651 MemOperand ArmCodeGenerator::SlotOperand(Slot* slot, Register tmp) {
652 // Currently, this assertion will fail if we try to assign to
653 // a constant variable that is constant because it is read-only
654 // (such as the variable referring to a named function expression).
655 // We need to implement assignments to read-only variables.
656 // Ideally, we should do this during AST generation (by converting
657 // such assignments into expression statements); however, in general
658 // we may not be able to make the decision until past AST generation,
659 // that is when the entire program is known.
660 ASSERT(slot != NULL);
661 int index = slot->index();
662 switch (slot->type()) {
663 case Slot::PARAMETER:
664 return ParameterOperand(index);
665
666 case Slot::LOCAL: {
667 ASSERT(0 <= index && index < scope()->num_stack_slots());
668 const int kLocalOffset = JavaScriptFrameConstants::kLocal0Offset;
669 return MemOperand(fp, kLocalOffset - index * kPointerSize);
670 }
671
672 case Slot::CONTEXT: {
673 // Follow the context chain if necessary.
674 ASSERT(!tmp.is(cp)); // do not overwrite context register
675 Register context = cp;
676 int chain_length = scope()->ContextChainLength(slot->var()->scope());
677 for (int i = chain_length; i-- > 0;) {
678 // Load the closure.
679 // (All contexts, even 'with' contexts, have a closure,
680 // and it is the same for all contexts inside a function.
681 // There is no need to go to the function context first.)
682 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
683 // Load the function context (which is the incoming, outer context).
684 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
685 context = tmp;
686 }
687 // We may have a 'with' context now. Get the function context.
688 // (In fact this mov may never be the needed, since the scope analysis
689 // may not permit a direct context access in this case and thus we are
690 // always at a function context. However it is safe to dereference be-
691 // cause the function context of a function context is itself. Before
692 // deleting this mov we should try to create a counter-example first,
693 // though...)
694 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
695 return ContextOperand(tmp, index);
696 }
697
698 default:
699 UNREACHABLE();
700 return MemOperand(r0, 0);
701 }
702 }
703
704
688 // Loads a value on the stack. If it is a boolean value, the result may have 705 // Loads a value on the stack. If it is a boolean value, the result may have
689 // been (partially) translated into branches, or it may have set the condition 706 // been (partially) translated into branches, or it may have set the condition
690 // code register. If force_cc is set, the value is forced to set the condition 707 // code register. If force_cc is set, the value is forced to set the condition
691 // code register and no value is pushed. If the condition code register was set, 708 // code register and no value is pushed. If the condition code register was set,
692 // has_cc() is true and cc_reg_ contains the condition to test for 'true'. 709 // has_cc() is true and cc_reg_ contains the condition to test for 'true'.
693 void ArmCodeGenerator::LoadCondition(Expression* x, 710 void ArmCodeGenerator::LoadCondition(Expression* x,
694 CodeGenState::AccessType access, 711 CodeGenState::AccessType access,
695 Label* true_target, 712 Label* true_target,
696 Label* false_target, 713 Label* false_target,
697 bool force_cc) { 714 bool force_cc) {
(...skipping 764 matching lines...) Expand 10 before | Expand all | Expand 10 after
1462 val = new Literal(Factory::the_hole_value()); 1479 val = new Literal(Factory::the_hole_value());
1463 } else { 1480 } else {
1464 val = node->fun(); // NULL if we don't have a function 1481 val = node->fun(); // NULL if we don't have a function
1465 } 1482 }
1466 1483
1467 if (val != NULL) { 1484 if (val != NULL) {
1468 // Set initial value. 1485 // Set initial value.
1469 Reference target(this, node->proxy()); 1486 Reference target(this, node->proxy());
1470 ASSERT(target.is_slot()); 1487 ASSERT(target.is_slot());
1471 Load(val); 1488 Load(val);
1472 SetValue(&target); 1489 target.SetValue(NOT_CONST_INIT);
1473 // Get rid of the assigned value (declarations are statements). It's 1490 // Get rid of the assigned value (declarations are statements). It's
1474 // safe to pop the value lying on top of the reference before unloading 1491 // safe to pop the value lying on top of the reference before unloading
1475 // the reference itself (which preserves the top of stack) because we 1492 // the reference itself (which preserves the top of stack) because we
1476 // know it is a zero-sized reference. 1493 // know it is a zero-sized reference.
1477 __ pop(); 1494 __ pop();
1478 } 1495 }
1479 } 1496 }
1480 1497
1481 1498
1482 void ArmCodeGenerator::VisitExpressionStatement(ExpressionStatement* node) { 1499 void ArmCodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
(...skipping 472 matching lines...) Expand 10 before | Expand all | Expand 10 after
1955 { Reference each(this, node->each()); 1972 { Reference each(this, node->each());
1956 if (!each.is_illegal()) { 1973 if (!each.is_illegal()) {
1957 if (each.size() > 0) { 1974 if (each.size() > 0) {
1958 // Reference's size is positive. 1975 // Reference's size is positive.
1959 __ ldr(r0, MemOperand(sp, kPointerSize * each.size())); 1976 __ ldr(r0, MemOperand(sp, kPointerSize * each.size()));
1960 __ push(r0); 1977 __ push(r0);
1961 } 1978 }
1962 // If the reference was to a slot we rely on the convenient property 1979 // If the reference was to a slot we rely on the convenient property
1963 // that it doesn't matter whether a value (eg, r3 pushed above) is 1980 // that it doesn't matter whether a value (eg, r3 pushed above) is
1964 // right on top of or right underneath a zero-sized reference. 1981 // right on top of or right underneath a zero-sized reference.
1965 SetValue(&each); 1982 each.SetValue(NOT_CONST_INIT);
1966 if (each.size() > 0) { 1983 if (each.size() > 0) {
1967 // It's safe to pop the value lying on top of the reference before 1984 // It's safe to pop the value lying on top of the reference before
1968 // unloading the reference itself (which preserves the top of stack, 1985 // unloading the reference itself (which preserves the top of stack,
1969 // ie, now the topmost value of the non-zero sized reference), since 1986 // ie, now the topmost value of the non-zero sized reference), since
1970 // we will discard the top of stack after unloading the reference 1987 // we will discard the top of stack after unloading the reference
1971 // anyway. 1988 // anyway.
1972 __ pop(r0); 1989 __ pop(r0);
1973 } 1990 }
1974 } 1991 }
1975 } 1992 }
(...skipping 24 matching lines...) Expand all
2000 2017
2001 // --- Catch block --- 2018 // --- Catch block ---
2002 2019
2003 // Store the caught exception in the catch variable. 2020 // Store the caught exception in the catch variable.
2004 __ push(r0); 2021 __ push(r0);
2005 { Reference ref(this, node->catch_var()); 2022 { Reference ref(this, node->catch_var());
2006 ASSERT(ref.is_slot()); 2023 ASSERT(ref.is_slot());
2007 // Here we make use of the convenient property that it doesn't matter 2024 // Here we make use of the convenient property that it doesn't matter
2008 // whether a value is immediately on top of or underneath a zero-sized 2025 // whether a value is immediately on top of or underneath a zero-sized
2009 // reference. 2026 // reference.
2010 SetValue(&ref); 2027 ref.SetValue(NOT_CONST_INIT);
2011 } 2028 }
2012 2029
2013 // Remove the exception from the stack. 2030 // Remove the exception from the stack.
2014 __ pop(); 2031 __ pop();
2015 2032
2016 VisitStatements(node->catch_block()->statements()); 2033 VisitStatements(node->catch_block()->statements());
2017 __ b(&exit); 2034 __ b(&exit);
2018 2035
2019 2036
2020 // --- Try block --- 2037 // --- Try block ---
(...skipping 536 matching lines...) Expand 10 before | Expand all | Expand 10 after
2557 (var->mode() == Variable::CONST) && 2574 (var->mode() == Variable::CONST) &&
2558 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) { 2575 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
2559 // Assignment ignored - leave the value on the stack. 2576 // Assignment ignored - leave the value on the stack.
2560 2577
2561 } else { 2578 } else {
2562 __ RecordPosition(node->position()); 2579 __ RecordPosition(node->position());
2563 if (node->op() == Token::INIT_CONST) { 2580 if (node->op() == Token::INIT_CONST) {
2564 // Dynamic constant initializations must use the function context 2581 // Dynamic constant initializations must use the function context
2565 // and initialize the actual constant declared. Dynamic variable 2582 // and initialize the actual constant declared. Dynamic variable
2566 // initializations are simply assignments and use SetValue. 2583 // initializations are simply assignments and use SetValue.
2567 InitConst(&target); 2584 target.SetValue(CONST_INIT);
2568 } else { 2585 } else {
2569 SetValue(&target); 2586 target.SetValue(NOT_CONST_INIT);
2570 } 2587 }
2571 } 2588 }
2572 } 2589 }
2573 2590
2574 2591
2575 void ArmCodeGenerator::VisitThrow(Throw* node) { 2592 void ArmCodeGenerator::VisitThrow(Throw* node) {
2576 Comment cmnt(masm_, "[ Throw"); 2593 Comment cmnt(masm_, "[ Throw");
2577 2594
2578 Load(node->exception()); 2595 Load(node->exception());
2579 __ RecordPosition(node->position()); 2596 __ RecordPosition(node->position());
(...skipping 543 matching lines...) Expand 10 before | Expand all | Expand 10 after
3123 InvokeBuiltinStub stub(InvokeBuiltinStub::Inc, 1); 3140 InvokeBuiltinStub stub(InvokeBuiltinStub::Inc, 1);
3124 __ CallStub(&stub); 3141 __ CallStub(&stub);
3125 } else { 3142 } else {
3126 InvokeBuiltinStub stub(InvokeBuiltinStub::Dec, 1); 3143 InvokeBuiltinStub stub(InvokeBuiltinStub::Dec, 1);
3127 __ CallStub(&stub); 3144 __ CallStub(&stub);
3128 } 3145 }
3129 3146
3130 // Store the new value in the target if not const. 3147 // Store the new value in the target if not const.
3131 __ bind(&exit); 3148 __ bind(&exit);
3132 __ push(r0); 3149 __ push(r0);
3133 if (!is_const) SetValue(&target); 3150 if (!is_const) target.SetValue(NOT_CONST_INIT);
3134 } 3151 }
3135 3152
3136 // Postfix: Discard the new value and use the old. 3153 // Postfix: Discard the new value and use the old.
3137 if (is_postfix) __ pop(r0); 3154 if (is_postfix) __ pop(r0);
3138 } 3155 }
3139 3156
3140 3157
3141 void ArmCodeGenerator::VisitBinaryOperation(BinaryOperation* node) { 3158 void ArmCodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
3142 Comment cmnt(masm_, "[ BinaryOperation"); 3159 Comment cmnt(masm_, "[ BinaryOperation");
3143 Token::Value op = node->op(); 3160 Token::Value op = node->op();
(...skipping 349 matching lines...) Expand 10 before | Expand all | Expand 10 after
3493 // Drop the execution stack down to the frame pointer and restore the caller 3510 // Drop the execution stack down to the frame pointer and restore the caller
3494 // frame pointer and return address. 3511 // frame pointer and return address.
3495 __ mov(sp, fp); 3512 __ mov(sp, fp);
3496 __ ldm(ia_w, sp, fp.bit() | lr.bit()); 3513 __ ldm(ia_w, sp, fp.bit() | lr.bit());
3497 } 3514 }
3498 3515
3499 3516
3500 #undef __ 3517 #undef __
3501 #define __ masm-> 3518 #define __ masm->
3502 3519
3503 MemOperand ArmCodeGenerator::SlotOperand(CodeGenerator* cgen, 3520 void Reference::SetValue(InitState init_state) {
3504 Slot* slot, 3521 ASSERT(!is_illegal());
3505 Register tmp) { 3522 ASSERT(!cgen_->has_cc());
3506 // Currently, this assertion will fail if we try to assign to 3523 MacroAssembler* masm = cgen_->masm();
3507 // a constant variable that is constant because it is read-only 3524 switch (type_) {
3508 // (such as the variable referring to a named function expression). 3525 case SLOT: {
3509 // We need to implement assignments to read-only variables. 3526 Comment cmnt(masm, "[ Store to Slot");
3510 // Ideally, we should do this during AST generation (by converting 3527 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
3511 // such assignments into expression statements); however, in general 3528 ASSERT(slot != NULL);
3512 // we may not be able to make the decision until past AST generation, 3529 if (slot->type() == Slot::LOOKUP) {
3513 // that is when the entire program is known. 3530 ASSERT(slot->var()->mode() == Variable::DYNAMIC);
3514 ASSERT(slot != NULL);
3515 int index = slot->index();
3516 switch (slot->type()) {
3517 case Slot::PARAMETER:
3518 return ParameterOperand(cgen, index);
3519 3531
3520 case Slot::LOCAL: { 3532 // For now, just do a runtime call.
3521 ASSERT(0 <= index && 3533 __ push(cp);
3522 index < cgen->scope()->num_stack_slots() && 3534 __ mov(r0, Operand(slot->var()->name()));
3523 index >= 0); 3535 __ push(r0);
3524 int local_offset = JavaScriptFrameConstants::kLocal0Offset - 3536
3525 index * kPointerSize; 3537 if (init_state == CONST_INIT) {
3526 return MemOperand(fp, local_offset); 3538 // Same as the case for a normal store, but ignores attribute
3539 // (e.g. READ_ONLY) of context slot so that we can initialize
3540 // const properties (introduced via eval("const foo = (some
3541 // expr);")). Also, uses the current function context instead of
3542 // the top context.
3543 //
3544 // Note that we must declare the foo upon entry of eval(), via a
3545 // context slot declaration, but we cannot initialize it at the
3546 // same time, because the const declaration may be at the end of
3547 // the eval code (sigh...) and the const variable may have been
3548 // used before (where its value is 'undefined'). Thus, we can only
3549 // do the initialization when we actually encounter the expression
3550 // and when the expression operands are defined and valid, and
3551 // thus we need the split into 2 operations: declaration of the
3552 // context slot followed by initialization.
3553 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
3554 } else {
3555 __ CallRuntime(Runtime::kStoreContextSlot, 3);
3556 }
3557 // Storing a variable must keep the (new) value on the expression
3558 // stack. This is necessary for compiling assignment expressions.
3559 __ push(r0);
3560
3561 } else {
3562 ASSERT(slot->var()->mode() != Variable::DYNAMIC);
3563
3564 Label exit;
3565 if (init_state == CONST_INIT) {
3566 ASSERT(slot->var()->mode() == Variable::CONST);
3567 // Only the first const initialization must be executed (the slot
3568 // still contains 'the hole' value). When the assignment is
3569 // executed, the code is identical to a normal store (see below).
3570 Comment cmnt(masm, "[ Init const");
3571 __ ldr(r2, cgen_->SlotOperand(slot, r2));
3572 __ cmp(r2, Operand(Factory::the_hole_value()));
3573 __ b(ne, &exit);
3574 }
3575
3576 // We must execute the store. Storing a variable must keep the
3577 // (new) value on the stack. This is necessary for compiling
3578 // assignment expressions.
3579 //
3580 // Note: We will reach here even with slot->var()->mode() ==
3581 // Variable::CONST because of const declarations which will
3582 // initialize consts to 'the hole' value and by doing so, end up
3583 // calling this code. r2 may be loaded with context; used below in
3584 // RecordWrite.
3585 __ pop(r0);
3586 __ str(r0, cgen_->SlotOperand(slot, r2));
3587 __ push(r0);
3588 if (slot->type() == Slot::CONTEXT) {
3589 // Skip write barrier if the written value is a smi.
3590 __ tst(r0, Operand(kSmiTagMask));
3591 __ b(eq, &exit);
3592 // r2 is loaded with context when calling SlotOperand above.
3593 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
3594 __ mov(r3, Operand(offset));
3595 __ RecordWrite(r2, r3, r1);
3596 }
3597 // If we definitely did not jump over the assignment, we do not need
3598 // to bind the exit label. Doing so can defeat peephole
3599 // optimization.
3600 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
3601 __ bind(&exit);
3602 }
3603 }
3604 break;
3527 } 3605 }
3528 3606
3529 case Slot::CONTEXT: { 3607 case NAMED: {
3530 MacroAssembler* masm = cgen->masm(); 3608 Comment cmnt(masm, "[ Store to named Property");
3531 // Follow the context chain if necessary. 3609 Property* property = expression_->AsProperty();
3532 ASSERT(!tmp.is(cp)); // do not overwrite context register 3610 Handle<String> name;
3533 Register context = cp; 3611 if (property == NULL) {
3534 int chain_length = 3612 // Global variable reference treated as named property access.
3535 cgen->scope()->ContextChainLength(slot->var()->scope()); 3613 VariableProxy* proxy = expression_->AsVariableProxy();
3536 for (int i = chain_length; i-- > 0;) { 3614 ASSERT(proxy->AsVariable() != NULL);
3537 // Load the closure. 3615 ASSERT(proxy->AsVariable()->is_global());
3538 // (All contexts, even 'with' contexts, have a closure, 3616 name = proxy->name();
3539 // and it is the same for all contexts inside a function. 3617 } else {
3540 // There is no need to go to the function context first.) 3618 Literal* raw_name = property->key()->AsLiteral();
3541 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX)); 3619 ASSERT(raw_name != NULL);
3542 // Load the function context (which is the incoming, outer context). 3620 name = Handle<String>(String::cast(*raw_name->handle()));
3543 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); 3621 __ RecordPosition(property->position());
3544 context = tmp;
3545 } 3622 }
3546 // We may have a 'with' context now. Get the function context. 3623
3547 // (In fact this mov may never be the needed, since the scope analysis 3624 // Call the appropriate IC code.
3548 // may not permit a direct context access in this case and thus we are 3625 __ pop(r0); // value
3549 // always at a function context. However it is safe to dereference be- 3626 // Setup the name register.
3550 // cause the function context of a function context is itself. Before 3627 __ mov(r2, Operand(name));
3551 // deleting this mov we should try to create a counter-example first, 3628 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
3552 // though...) 3629 __ Call(ic, RelocInfo::CODE_TARGET);
3553 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX)); 3630 __ push(r0);
3554 return ContextOperand(tmp, index); 3631 break;
3632 }
3633
3634 case KEYED: {
3635 Comment cmnt(masm, "[ Store to keyed Property");
3636 Property* property = expression_->AsProperty();
3637 ASSERT(property != NULL);
3638 __ RecordPosition(property->position());
3639 __ pop(r0); // value
3640 SetPropertyStub stub;
3641 __ CallStub(&stub);
3642 __ push(r0);
3643 break;
3555 } 3644 }
3556 3645
3557 default: 3646 default:
3558 UNREACHABLE(); 3647 UNREACHABLE();
3559 return MemOperand(r0, 0);
3560 } 3648 }
3561 } 3649 }
3562 3650
3563
3564 void Property::GenerateStoreCode(CodeGenerator* cgen,
3565 Reference* ref,
3566 InitState init_state) {
3567 MacroAssembler* masm = cgen->masm();
3568 Comment cmnt(masm, "[ Store to Property");
3569 __ RecordPosition(position());
3570 ArmCodeGenerator::SetReferenceProperty(cgen, ref, key());
3571 }
3572
3573
3574 void VariableProxy::GenerateStoreCode(CodeGenerator* cgen,
3575 Reference* ref,
3576 InitState init_state) {
3577 MacroAssembler* masm = cgen->masm();
3578 Comment cmnt(masm, "[ Store to VariableProxy");
3579 Variable* node = var();
3580
3581 Expression* expr = node->rewrite();
3582 if (expr != NULL) {
3583 expr->GenerateStoreCode(cgen, ref, init_state);
3584 } else {
3585 ASSERT(node->is_global());
3586 if (node->AsProperty() != NULL) {
3587 __ RecordPosition(node->AsProperty()->position());
3588 }
3589 Expression* key = new Literal(node->name());
3590 ArmCodeGenerator::SetReferenceProperty(cgen, ref, key);
3591 }
3592 }
3593
3594
3595 void Slot::GenerateStoreCode(CodeGenerator* cgen,
3596 Reference* ref,
3597 InitState init_state) {
3598 MacroAssembler* masm = cgen->masm();
3599 Comment cmnt(masm, "[ Store to Slot");
3600
3601 if (type() == Slot::LOOKUP) {
3602 ASSERT(var()->mode() == Variable::DYNAMIC);
3603
3604 // For now, just do a runtime call.
3605 __ push(cp);
3606 __ mov(r0, Operand(var()->name()));
3607 __ push(r0);
3608
3609 if (init_state == CONST_INIT) {
3610 // Same as the case for a normal store, but ignores attribute
3611 // (e.g. READ_ONLY) of context slot so that we can initialize const
3612 // properties (introduced via eval("const foo = (some expr);")). Also,
3613 // uses the current function context instead of the top context.
3614 //
3615 // Note that we must declare the foo upon entry of eval(), via a
3616 // context slot declaration, but we cannot initialize it at the same
3617 // time, because the const declaration may be at the end of the eval
3618 // code (sigh...) and the const variable may have been used before
3619 // (where its value is 'undefined'). Thus, we can only do the
3620 // initialization when we actually encounter the expression and when
3621 // the expression operands are defined and valid, and thus we need the
3622 // split into 2 operations: declaration of the context slot followed
3623 // by initialization.
3624 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
3625 } else {
3626 __ CallRuntime(Runtime::kStoreContextSlot, 3);
3627 }
3628 // Storing a variable must keep the (new) value on the expression
3629 // stack. This is necessary for compiling assignment expressions.
3630 __ push(r0);
3631
3632 } else {
3633 ASSERT(var()->mode() != Variable::DYNAMIC);
3634
3635 Label exit;
3636 if (init_state == CONST_INIT) {
3637 ASSERT(var()->mode() == Variable::CONST);
3638 // Only the first const initialization must be executed (the slot
3639 // still contains 'the hole' value). When the assignment is executed,
3640 // the code is identical to a normal store (see below).
3641 Comment cmnt(masm, "[ Init const");
3642 __ ldr(r2, ArmCodeGenerator::SlotOperand(cgen, this, r2));
3643 __ cmp(r2, Operand(Factory::the_hole_value()));
3644 __ b(ne, &exit);
3645 }
3646
3647 // We must execute the store.
3648 // r2 may be loaded with context; used below in RecordWrite.
3649 // Storing a variable must keep the (new) value on the stack. This is
3650 // necessary for compiling assignment expressions.
3651 //
3652 // Note: We will reach here even with var()->mode() == Variable::CONST
3653 // because of const declarations which will initialize consts to 'the
3654 // hole' value and by doing so, end up calling this code. r2 may be
3655 // loaded with context; used below in RecordWrite.
3656 __ pop(r0);
3657 __ str(r0, ArmCodeGenerator::SlotOperand(cgen, this, r2));
3658 __ push(r0);
3659
3660 if (type() == Slot::CONTEXT) {
3661 // Skip write barrier if the written value is a smi.
3662 __ tst(r0, Operand(kSmiTagMask));
3663 __ b(eq, &exit);
3664 // r2 is loaded with context when calling SlotOperand above.
3665 int offset = FixedArray::kHeaderSize + index() * kPointerSize;
3666 __ mov(r3, Operand(offset));
3667 __ RecordWrite(r2, r3, r1);
3668 }
3669 // If we definitely did not jump over the assignment, we do not need to
3670 // bind the exit label. Doing so can defeat peephole optimization.
3671 if (init_state == CONST_INIT || type() == Slot::CONTEXT) {
3672 __ bind(&exit);
3673 }
3674 }
3675 }
3676
3677 3651
3678 void GetPropertyStub::Generate(MacroAssembler* masm) { 3652 void GetPropertyStub::Generate(MacroAssembler* masm) {
3679 // sp[0]: key 3653 // sp[0]: key
3680 // sp[1]: receiver 3654 // sp[1]: receiver
3681 Label slow, fast; 3655 Label slow, fast;
3682 // Get the key and receiver object from the stack. 3656 // Get the key and receiver object from the stack.
3683 __ ldm(ia, sp, r0.bit() | r1.bit()); 3657 __ ldm(ia, sp, r0.bit() | r1.bit());
3684 // Check that the key is a smi. 3658 // Check that the key is a smi.
3685 __ tst(r0, Operand(kSmiTagMask)); 3659 __ tst(r0, Operand(kSmiTagMask));
3686 __ b(ne, &slow); 3660 __ b(ne, &slow);
(...skipping 823 matching lines...) Expand 10 before | Expand all | Expand 10 after
4510 __ add(r3, r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 4484 __ add(r3, r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
4511 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset)); 4485 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
4512 __ str(r3, MemOperand(sp, 1 * kPointerSize)); 4486 __ str(r3, MemOperand(sp, 1 * kPointerSize));
4513 4487
4514 // Do the runtime call to allocate the arguments object. 4488 // Do the runtime call to allocate the arguments object.
4515 __ bind(&runtime); 4489 __ bind(&runtime);
4516 __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3); 4490 __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3);
4517 } 4491 }
4518 4492
4519 4493
4520 void ArmCodeGenerator::SetReferenceProperty(CodeGenerator* cgen,
4521 Reference* ref,
4522 Expression* key) {
4523 ASSERT(!ref->is_illegal());
4524 MacroAssembler* masm = cgen->masm();
4525
4526 if (ref->type() == Reference::NAMED) {
4527 // Compute the name of the property.
4528 Literal* literal = key->AsLiteral();
4529 Handle<String> name(String::cast(*literal->handle()));
4530
4531 // Call the appropriate IC code.
4532 __ pop(r0); // value
4533 // Setup the name register.
4534 __ mov(r2, Operand(name));
4535 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
4536 __ Call(ic, RelocInfo::CODE_TARGET);
4537
4538 } else {
4539 // Access keyed property.
4540 ASSERT(ref->type() == Reference::KEYED);
4541
4542 __ pop(r0); // value
4543 SetPropertyStub stub;
4544 __ CallStub(&stub);
4545 }
4546 __ push(r0);
4547 }
4548
4549
4550 void CallFunctionStub::Generate(MacroAssembler* masm) { 4494 void CallFunctionStub::Generate(MacroAssembler* masm) {
4551 Label slow; 4495 Label slow;
4552 // Get the function to call from the stack. 4496 // Get the function to call from the stack.
4553 // function, receiver [, arguments] 4497 // function, receiver [, arguments]
4554 __ ldr(r1, MemOperand(sp, (argc_ + 1) * kPointerSize)); 4498 __ ldr(r1, MemOperand(sp, (argc_ + 1) * kPointerSize));
4555 4499
4556 // Check that the function is really a JavaScript function. 4500 // Check that the function is really a JavaScript function.
4557 // r1: pushed function (to be verified) 4501 // r1: pushed function (to be verified)
4558 __ tst(r1, Operand(kSmiTagMask)); 4502 __ tst(r1, Operand(kSmiTagMask));
4559 __ b(eq, &slow); 4503 __ b(eq, &slow);
(...skipping 28 matching lines...) Expand all
4588 bool is_eval) { 4532 bool is_eval) {
4589 Handle<Code> code = ArmCodeGenerator::MakeCode(fun, script, is_eval); 4533 Handle<Code> code = ArmCodeGenerator::MakeCode(fun, script, is_eval);
4590 if (!code.is_null()) { 4534 if (!code.is_null()) {
4591 Counters::total_compiled_code_size.Increment(code->instruction_size()); 4535 Counters::total_compiled_code_size.Increment(code->instruction_size());
4592 } 4536 }
4593 return code; 4537 return code;
4594 } 4538 }
4595 4539
4596 4540
4597 } } // namespace v8::internal 4541 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/ast.h ('k') | src/codegen-ia32.cc » ('j') | src/codegen-ia32.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698