OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
212 // stack frame was an arguments adapter frame. | 212 // stack frame was an arguments adapter frame. |
213 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); | 213 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); |
214 __ CallStub(&stub); | 214 __ CallStub(&stub); |
215 // Duplicate the value; move-to-slot operation might clobber registers. | 215 // Duplicate the value; move-to-slot operation might clobber registers. |
216 __ mov(r3, r0); | 216 __ mov(r3, r0); |
217 Move(arguments->AsSlot(), r0, r1, r2); | 217 Move(arguments->AsSlot(), r0, r1, r2); |
218 Slot* dot_arguments_slot = scope()->arguments_shadow()->AsSlot(); | 218 Slot* dot_arguments_slot = scope()->arguments_shadow()->AsSlot(); |
219 Move(dot_arguments_slot, r3, r1, r2); | 219 Move(dot_arguments_slot, r3, r1, r2); |
220 } | 220 } |
221 | 221 |
222 { Comment cmnt(masm_, "[ Declarations"); | |
223 // For named function expressions, declare the function name as a | |
224 // constant. | |
225 if (scope()->is_function_scope() && scope()->function() != NULL) { | |
226 EmitDeclaration(scope()->function(), Variable::CONST, NULL); | |
227 } | |
228 // Visit all the explicit declarations unless there is an illegal | |
229 // redeclaration. | |
230 if (scope()->HasIllegalRedeclaration()) { | |
231 scope()->VisitIllegalRedeclaration(this); | |
232 } else { | |
233 VisitDeclarations(scope()->declarations()); | |
234 } | |
235 } | |
236 | |
237 if (FLAG_trace) { | 222 if (FLAG_trace) { |
238 __ CallRuntime(Runtime::kTraceEnter, 0); | 223 __ CallRuntime(Runtime::kTraceEnter, 0); |
239 } | 224 } |
240 | 225 |
241 // Check the stack for overflow or break request. | 226 // Visit the declarations and body unless there is an illegal |
242 { Comment cmnt(masm_, "[ Stack check"); | 227 // redeclaration. |
243 PrepareForBailout(info->function(), NO_REGISTERS); | 228 if (scope()->HasIllegalRedeclaration()) { |
244 Label ok; | 229 Comment cmnt(masm_, "[ Declarations"); |
245 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 230 scope()->VisitIllegalRedeclaration(this); |
246 __ cmp(sp, Operand(ip)); | 231 |
247 __ b(hs, &ok); | 232 } else { |
248 StackCheckStub stub; | 233 { Comment cmnt(masm_, "[ Declarations"); |
249 __ CallStub(&stub); | 234 // For named function expressions, declare the function name as a |
250 __ bind(&ok); | 235 // constant. |
| 236 if (scope()->is_function_scope() && scope()->function() != NULL) { |
| 237 EmitDeclaration(scope()->function(), Variable::CONST, NULL); |
| 238 } |
| 239 VisitDeclarations(scope()->declarations()); |
| 240 } |
| 241 |
| 242 { Comment cmnt(masm_, "[ Stack check"); |
| 243 PrepareForBailout(info->function(), NO_REGISTERS); |
| 244 Label ok; |
| 245 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 246 __ cmp(sp, Operand(ip)); |
| 247 __ b(hs, &ok); |
| 248 StackCheckStub stub; |
| 249 __ CallStub(&stub); |
| 250 __ bind(&ok); |
| 251 } |
| 252 |
| 253 { Comment cmnt(masm_, "[ Body"); |
| 254 ASSERT(loop_depth() == 0); |
| 255 VisitStatements(function()->body()); |
| 256 ASSERT(loop_depth() == 0); |
| 257 } |
251 } | 258 } |
252 | 259 |
253 { Comment cmnt(masm_, "[ Body"); | 260 // Always emit a 'return undefined' in case control fell off the end of |
254 ASSERT(loop_depth() == 0); | 261 // the body. |
255 VisitStatements(function()->body()); | |
256 ASSERT(loop_depth() == 0); | |
257 } | |
258 | |
259 { Comment cmnt(masm_, "[ return <undefined>;"); | 262 { Comment cmnt(masm_, "[ return <undefined>;"); |
260 // Emit a 'return undefined' in case control fell off the end of the | |
261 // body. | |
262 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 263 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
263 } | 264 } |
264 EmitReturnSequence(); | 265 EmitReturnSequence(); |
265 | 266 |
266 // Force emit the constant pool, so it doesn't get emitted in the middle | 267 // Force emit the constant pool, so it doesn't get emitted in the middle |
267 // of the stack check table. | 268 // of the stack check table. |
268 masm()->CheckConstPool(true, false); | 269 masm()->CheckConstPool(true, false); |
269 } | 270 } |
270 | 271 |
271 | 272 |
(...skipping 415 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
687 } else if (function != NULL) { | 688 } else if (function != NULL) { |
688 VisitForAccumulatorValue(function); | 689 VisitForAccumulatorValue(function); |
689 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); | 690 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); |
690 } | 691 } |
691 break; | 692 break; |
692 | 693 |
693 case Slot::CONTEXT: | 694 case Slot::CONTEXT: |
694 // We bypass the general EmitSlotSearch because we know more about | 695 // We bypass the general EmitSlotSearch because we know more about |
695 // this specific context. | 696 // this specific context. |
696 | 697 |
697 // The variable in the decl always resides in the current context. | 698 // The variable in the decl always resides in the current function |
| 699 // context. |
698 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); | 700 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); |
699 if (FLAG_debug_code) { | 701 if (FLAG_debug_code) { |
700 // Check if we have the correct context pointer. | 702 // Check that we're not inside a 'with'. |
701 __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX)); | 703 __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX)); |
702 __ cmp(r1, cp); | 704 __ cmp(r1, cp); |
703 __ Check(eq, "Unexpected declaration in current context."); | 705 __ Check(eq, "Unexpected declaration in current context."); |
704 } | 706 } |
705 if (mode == Variable::CONST) { | 707 if (mode == Variable::CONST) { |
706 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 708 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
707 __ str(ip, ContextOperand(cp, slot->index())); | 709 __ str(ip, ContextOperand(cp, slot->index())); |
708 // No write barrier since the_hole_value is in old space. | 710 // No write barrier since the_hole_value is in old space. |
709 } else if (function != NULL) { | 711 } else if (function != NULL) { |
710 VisitForAccumulatorValue(function); | 712 VisitForAccumulatorValue(function); |
(...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1030 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { | 1032 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { |
1031 Comment cmnt(masm_, "[ VariableProxy"); | 1033 Comment cmnt(masm_, "[ VariableProxy"); |
1032 EmitVariableLoad(expr->var()); | 1034 EmitVariableLoad(expr->var()); |
1033 } | 1035 } |
1034 | 1036 |
1035 | 1037 |
1036 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( | 1038 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( |
1037 Slot* slot, | 1039 Slot* slot, |
1038 Label* slow) { | 1040 Label* slow) { |
1039 ASSERT(slot->type() == Slot::CONTEXT); | 1041 ASSERT(slot->type() == Slot::CONTEXT); |
1040 Register current = cp; | 1042 Register context = cp; |
1041 Register next = r3; | 1043 Register next = r3; |
1042 Register temp = r4; | 1044 Register temp = r4; |
1043 | 1045 |
1044 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { | 1046 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { |
1045 if (s->num_heap_slots() > 0) { | 1047 if (s->num_heap_slots() > 0) { |
1046 if (s->calls_eval()) { | 1048 if (s->calls_eval()) { |
1047 // Check that extension is NULL. | 1049 // Check that extension is NULL. |
1048 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); | 1050 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
1049 __ tst(temp, temp); | 1051 __ tst(temp, temp); |
1050 __ b(ne, slow); | 1052 __ b(ne, slow); |
1051 } | 1053 } |
1052 __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX)); | 1054 __ ldr(next, ContextOperand(context, Context::CLOSURE_INDEX)); |
1053 __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset)); | 1055 __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset)); |
1054 // Walk the rest of the chain without clobbering cp. | 1056 // Walk the rest of the chain without clobbering cp. |
1055 current = next; | 1057 context = next; |
1056 } | 1058 } |
1057 } | 1059 } |
1058 // Check that last extension is NULL. | 1060 // Check that last extension is NULL. |
1059 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); | 1061 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
1060 __ tst(temp, temp); | 1062 __ tst(temp, temp); |
1061 __ b(ne, slow); | 1063 __ b(ne, slow); |
1062 __ ldr(temp, ContextOperand(current, Context::FCONTEXT_INDEX)); | 1064 |
1063 return ContextOperand(temp, slot->index()); | 1065 // This function is used only for loads, not stores, so it's safe to |
| 1066 // return an cp-based operand (the write barrier cannot be allowed to |
| 1067 // destroy the cp register). |
| 1068 return ContextOperand(context, slot->index()); |
1064 } | 1069 } |
1065 | 1070 |
1066 | 1071 |
1067 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( | 1072 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( |
1068 Slot* slot, | 1073 Slot* slot, |
1069 TypeofState typeof_state, | 1074 TypeofState typeof_state, |
1070 Label* slow, | 1075 Label* slow, |
1071 Label* done) { | 1076 Label* done) { |
1072 // Generate fast-case code for variables that might be shadowed by | 1077 // Generate fast-case code for variables that might be shadowed by |
1073 // eval-introduced variables. Eval is used a lot without | 1078 // eval-introduced variables. Eval is used a lot without |
(...skipping 923 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1997 // Assignment to a global variable. Use inline caching for the | 2002 // Assignment to a global variable. Use inline caching for the |
1998 // assignment. Right-hand-side value is passed in r0, variable name in | 2003 // assignment. Right-hand-side value is passed in r0, variable name in |
1999 // r2, and the global object in r1. | 2004 // r2, and the global object in r1. |
2000 __ mov(r2, Operand(var->name())); | 2005 __ mov(r2, Operand(var->name())); |
2001 __ ldr(r1, GlobalObjectOperand()); | 2006 __ ldr(r1, GlobalObjectOperand()); |
2002 Handle<Code> ic(Builtins::builtin(is_strict() | 2007 Handle<Code> ic(Builtins::builtin(is_strict() |
2003 ? Builtins::StoreIC_Initialize_Strict | 2008 ? Builtins::StoreIC_Initialize_Strict |
2004 : Builtins::StoreIC_Initialize)); | 2009 : Builtins::StoreIC_Initialize)); |
2005 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); | 2010 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); |
2006 | 2011 |
2007 } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) { | 2012 } else if (op == Token::INIT_CONST) { |
2008 // Perform the assignment for non-const variables and for initialization | 2013 // Like var declarations, const declarations are hoisted to function |
2009 // of const variables. Const assignments are simply skipped. | 2014 // scope. However, unlike var initializers, const initializers are able |
2010 Label done; | 2015 // to drill a hole to that function context, even from inside a 'with' |
| 2016 // context. We thus bypass the normal static scope lookup. |
| 2017 Slot* slot = var->AsSlot(); |
| 2018 Label skip; |
| 2019 switch (slot->type()) { |
| 2020 case Slot::PARAMETER: |
| 2021 // No const parameters. |
| 2022 UNREACHABLE(); |
| 2023 break; |
| 2024 case Slot::LOCAL: |
| 2025 // Detect const reinitialization by checking for the hole value. |
| 2026 __ ldr(r1, MemOperand(fp, SlotOffset(slot))); |
| 2027 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 2028 __ cmp(r1, ip); |
| 2029 __ b(ne, &skip); |
| 2030 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); |
| 2031 break; |
| 2032 case Slot::CONTEXT: { |
| 2033 __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX)); |
| 2034 __ ldr(r2, ContextOperand(r1, slot->index())); |
| 2035 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 2036 __ cmp(r2, ip); |
| 2037 __ b(ne, &skip); |
| 2038 __ str(r0, ContextOperand(r1, slot->index())); |
| 2039 int offset = Context::SlotOffset(slot->index()); |
| 2040 __ mov(r3, r0); // Preserve the stored value in r0. |
| 2041 __ RecordWrite(r1, Operand(offset), r3, r2); |
| 2042 break; |
| 2043 } |
| 2044 case Slot::LOOKUP: |
| 2045 __ push(r0); |
| 2046 __ mov(r0, Operand(slot->var()->name())); |
| 2047 __ Push(cp, r0); // Context and name. |
| 2048 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
| 2049 break; |
| 2050 } |
| 2051 __ bind(&skip); |
| 2052 |
| 2053 } else if (var->mode() != Variable::CONST) { |
| 2054 // Perform the assignment for non-const variables. Const assignments |
| 2055 // are simply skipped. |
2011 Slot* slot = var->AsSlot(); | 2056 Slot* slot = var->AsSlot(); |
2012 switch (slot->type()) { | 2057 switch (slot->type()) { |
2013 case Slot::PARAMETER: | 2058 case Slot::PARAMETER: |
2014 case Slot::LOCAL: | 2059 case Slot::LOCAL: |
2015 if (op == Token::INIT_CONST) { | |
2016 // Detect const reinitialization by checking for the hole value. | |
2017 __ ldr(r1, MemOperand(fp, SlotOffset(slot))); | |
2018 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
2019 __ cmp(r1, ip); | |
2020 __ b(ne, &done); | |
2021 } | |
2022 // Perform the assignment. | 2060 // Perform the assignment. |
2023 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); | 2061 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); |
2024 break; | 2062 break; |
2025 | 2063 |
2026 case Slot::CONTEXT: { | 2064 case Slot::CONTEXT: { |
2027 MemOperand target = EmitSlotSearch(slot, r1); | 2065 MemOperand target = EmitSlotSearch(slot, r1); |
2028 if (op == Token::INIT_CONST) { | |
2029 // Detect const reinitialization by checking for the hole value. | |
2030 __ ldr(r2, target); | |
2031 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
2032 __ cmp(r2, ip); | |
2033 __ b(ne, &done); | |
2034 } | |
2035 // Perform the assignment and issue the write barrier. | 2066 // Perform the assignment and issue the write barrier. |
2036 __ str(result_register(), target); | 2067 __ str(result_register(), target); |
2037 // RecordWrite may destroy all its register arguments. | 2068 // RecordWrite may destroy all its register arguments. |
2038 __ mov(r3, result_register()); | 2069 __ mov(r3, result_register()); |
2039 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; | 2070 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; |
2040 __ RecordWrite(r1, Operand(offset), r2, r3); | 2071 __ RecordWrite(r1, Operand(offset), r2, r3); |
2041 break; | 2072 break; |
2042 } | 2073 } |
2043 | 2074 |
2044 case Slot::LOOKUP: | 2075 case Slot::LOOKUP: |
2045 // Call the runtime for the assignment. The runtime will ignore | 2076 // Call the runtime for the assignment. |
2046 // const reinitialization. | |
2047 __ push(r0); // Value. | 2077 __ push(r0); // Value. |
2048 __ mov(r0, Operand(slot->var()->name())); | 2078 __ mov(r0, Operand(slot->var()->name())); |
2049 __ Push(cp, r0); // Context and name. | 2079 __ Push(cp, r0); // Context and name. |
2050 if (op == Token::INIT_CONST) { | 2080 __ CallRuntime(Runtime::kStoreContextSlot, 3); |
2051 // The runtime will ignore const redeclaration. | |
2052 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); | |
2053 } else { | |
2054 __ CallRuntime(Runtime::kStoreContextSlot, 3); | |
2055 } | |
2056 break; | 2081 break; |
2057 } | 2082 } |
2058 __ bind(&done); | |
2059 } | 2083 } |
2060 } | 2084 } |
2061 | 2085 |
2062 | 2086 |
2063 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | 2087 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { |
2064 // Assignment to a property, using a named store IC. | 2088 // Assignment to a property, using a named store IC. |
2065 Property* prop = expr->target()->AsProperty(); | 2089 Property* prop = expr->target()->AsProperty(); |
2066 ASSERT(prop != NULL); | 2090 ASSERT(prop != NULL); |
2067 ASSERT(prop->key()->AsLiteral() != NULL); | 2091 ASSERT(prop->key()->AsLiteral() != NULL); |
2068 | 2092 |
(...skipping 1956 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4025 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value. | 4049 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value. |
4026 __ add(pc, r1, Operand(masm_->CodeObject())); | 4050 __ add(pc, r1, Operand(masm_->CodeObject())); |
4027 } | 4051 } |
4028 | 4052 |
4029 | 4053 |
4030 #undef __ | 4054 #undef __ |
4031 | 4055 |
4032 } } // namespace v8::internal | 4056 } } // namespace v8::internal |
4033 | 4057 |
4034 #endif // V8_TARGET_ARCH_ARM | 4058 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |