| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 124 // o sp: stack pointer | 124 // o sp: stack pointer |
| 125 // o lr: return address | 125 // o lr: return address |
| 126 // | 126 // |
| 127 // The function builds a JS frame. Please see JavaScriptFrameConstants in | 127 // The function builds a JS frame. Please see JavaScriptFrameConstants in |
| 128 // frames-arm.h for its layout. | 128 // frames-arm.h for its layout. |
| 129 void FullCodeGenerator::Generate() { | 129 void FullCodeGenerator::Generate() { |
| 130 CompilationInfo* info = info_; | 130 CompilationInfo* info = info_; |
| 131 handler_table_ = | 131 handler_table_ = |
| 132 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); | 132 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); |
| 133 | 133 |
| 134 InitializeFeedbackVector(); | |
| 135 | |
| 136 profiling_counter_ = isolate()->factory()->NewCell( | 134 profiling_counter_ = isolate()->factory()->NewCell( |
| 137 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); | 135 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); |
| 138 SetFunctionPosition(function()); | 136 SetFunctionPosition(function()); |
| 139 Comment cmnt(masm_, "[ function compiled by full code generator"); | 137 Comment cmnt(masm_, "[ function compiled by full code generator"); |
| 140 | 138 |
| 141 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | 139 ProfileEntryHookStub::MaybeCallEntryHook(masm_); |
| 142 | 140 |
| 143 #ifdef DEBUG | 141 #ifdef DEBUG |
| 144 if (strlen(FLAG_stop_at) > 0 && | 142 if (strlen(FLAG_stop_at) > 0 && |
| 145 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { | 143 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { |
| 146 __ stop("stop-at"); | 144 __ stop("stop-at"); |
| 147 } | 145 } |
| 148 #endif | 146 #endif |
| 149 | 147 |
| 150 // Classic mode functions and builtins need to replace the receiver with the | 148 // Sloppy mode functions and builtins need to replace the receiver with the |
| 151 // global proxy when called as functions (without an explicit receiver | 149 // global proxy when called as functions (without an explicit receiver |
| 152 // object). | 150 // object). |
| 153 if (info->is_classic_mode() && !info->is_native()) { | 151 if (info->strict_mode() == SLOPPY && !info->is_native()) { |
| 154 Label ok; | 152 Label ok; |
| 155 int receiver_offset = info->scope()->num_parameters() * kPointerSize; | 153 int receiver_offset = info->scope()->num_parameters() * kPointerSize; |
| 156 __ ldr(r2, MemOperand(sp, receiver_offset)); | 154 __ ldr(r2, MemOperand(sp, receiver_offset)); |
| 157 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); | 155 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); |
| 158 __ b(ne, &ok); | 156 __ b(ne, &ok); |
| 159 | 157 |
| 160 __ ldr(r2, GlobalObjectOperand()); | 158 __ ldr(r2, GlobalObjectOperand()); |
| 161 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset)); | 159 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset)); |
| 162 | 160 |
| 163 __ str(r2, MemOperand(sp, receiver_offset)); | 161 __ str(r2, MemOperand(sp, receiver_offset)); |
| 164 | 162 |
| 165 __ bind(&ok); | 163 __ bind(&ok); |
| 166 } | 164 } |
| 167 | 165 |
| 168 // Open a frame scope to indicate that there is a frame on the stack. The | 166 // Open a frame scope to indicate that there is a frame on the stack. The |
| 169 // MANUAL indicates that the scope shouldn't actually generate code to set up | 167 // MANUAL indicates that the scope shouldn't actually generate code to set up |
| 170 // the frame (that is done below). | 168 // the frame (that is done below). |
| 171 FrameScope frame_scope(masm_, StackFrame::MANUAL); | 169 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
| 172 | 170 |
| 173 info->set_prologue_offset(masm_->pc_offset()); | 171 info->set_prologue_offset(masm_->pc_offset()); |
| 174 __ Prologue(BUILD_FUNCTION_FRAME); | 172 __ Prologue(BUILD_FUNCTION_FRAME); |
| 175 info->AddNoFrameRange(0, masm_->pc_offset()); | 173 info->AddNoFrameRange(0, masm_->pc_offset()); |
| 176 __ LoadConstantPoolPointerRegister(); | |
| 177 | 174 |
| 178 { Comment cmnt(masm_, "[ Allocate locals"); | 175 { Comment cmnt(masm_, "[ Allocate locals"); |
| 179 int locals_count = info->scope()->num_stack_slots(); | 176 int locals_count = info->scope()->num_stack_slots(); |
| 180 // Generators allocate locals, if any, in context slots. | 177 // Generators allocate locals, if any, in context slots. |
| 181 ASSERT(!info->function()->is_generator() || locals_count == 0); | 178 ASSERT(!info->function()->is_generator() || locals_count == 0); |
| 182 if (locals_count > 0) { | 179 if (locals_count > 0) { |
| 183 // Emit a loop to initialize stack cells for locals when optimizing for | 180 // Emit a loop to initialize stack cells for locals when optimizing for |
| 184 // size. Otherwise, unroll the loop for maximum performance. | 181 // size. Otherwise, unroll the loop for maximum performance. |
| 185 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex); | 182 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex); |
| 186 if (FLAG_optimize_for_size && locals_count > 4) { | 183 if (FLAG_optimize_for_size && locals_count > 4) { |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 257 __ add(r2, fp, | 254 __ add(r2, fp, |
| 258 Operand(StandardFrameConstants::kCallerSPOffset + offset)); | 255 Operand(StandardFrameConstants::kCallerSPOffset + offset)); |
| 259 __ mov(r1, Operand(Smi::FromInt(num_parameters))); | 256 __ mov(r1, Operand(Smi::FromInt(num_parameters))); |
| 260 __ Push(r3, r2, r1); | 257 __ Push(r3, r2, r1); |
| 261 | 258 |
| 262 // Arguments to ArgumentsAccessStub: | 259 // Arguments to ArgumentsAccessStub: |
| 263 // function, receiver address, parameter count. | 260 // function, receiver address, parameter count. |
| 264 // The stub will rewrite receiever and parameter count if the previous | 261 // The stub will rewrite receiever and parameter count if the previous |
| 265 // stack frame was an arguments adapter frame. | 262 // stack frame was an arguments adapter frame. |
| 266 ArgumentsAccessStub::Type type; | 263 ArgumentsAccessStub::Type type; |
| 267 if (!is_classic_mode()) { | 264 if (strict_mode() == STRICT) { |
| 268 type = ArgumentsAccessStub::NEW_STRICT; | 265 type = ArgumentsAccessStub::NEW_STRICT; |
| 269 } else if (function()->has_duplicate_parameters()) { | 266 } else if (function()->has_duplicate_parameters()) { |
| 270 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; | 267 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; |
| 271 } else { | 268 } else { |
| 272 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; | 269 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; |
| 273 } | 270 } |
| 274 ArgumentsAccessStub stub(type); | 271 ArgumentsAccessStub stub(type); |
| 275 __ CallStub(&stub); | 272 __ CallStub(&stub); |
| 276 | 273 |
| 277 SetVar(arguments, r0, r1, r2); | 274 SetVar(arguments, r0, r1, r2); |
| 278 } | 275 } |
| 279 | 276 |
| 280 if (FLAG_trace) { | 277 if (FLAG_trace) { |
| 281 __ CallRuntime(Runtime::kTraceEnter, 0); | 278 __ CallRuntime(Runtime::kTraceEnter, 0); |
| 282 } | 279 } |
| 283 | 280 |
| 284 // Visit the declarations and body unless there is an illegal | 281 // Visit the declarations and body unless there is an illegal |
| 285 // redeclaration. | 282 // redeclaration. |
| 286 if (scope()->HasIllegalRedeclaration()) { | 283 if (scope()->HasIllegalRedeclaration()) { |
| 287 Comment cmnt(masm_, "[ Declarations"); | 284 Comment cmnt(masm_, "[ Declarations"); |
| 288 scope()->VisitIllegalRedeclaration(this); | 285 scope()->VisitIllegalRedeclaration(this); |
| 289 | 286 |
| 290 } else { | 287 } else { |
| 291 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); | 288 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); |
| 292 { Comment cmnt(masm_, "[ Declarations"); | 289 { Comment cmnt(masm_, "[ Declarations"); |
| 293 // For named function expressions, declare the function name as a | 290 // For named function expressions, declare the function name as a |
| 294 // constant. | 291 // constant. |
| 295 if (scope()->is_function_scope() && scope()->function() != NULL) { | 292 if (scope()->is_function_scope() && scope()->function() != NULL) { |
| 296 VariableDeclaration* function = scope()->function(); | 293 VariableDeclaration* function = scope()->function(); |
| 297 ASSERT(function->proxy()->var()->mode() == CONST || | 294 ASSERT(function->proxy()->var()->mode() == CONST || |
| 298 function->proxy()->var()->mode() == CONST_HARMONY); | 295 function->proxy()->var()->mode() == CONST_LEGACY); |
| 299 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); | 296 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); |
| 300 VisitVariableDeclaration(function); | 297 VisitVariableDeclaration(function); |
| 301 } | 298 } |
| 302 VisitDeclarations(scope()->declarations()); | 299 VisitDeclarations(scope()->declarations()); |
| 303 } | 300 } |
| 304 | 301 |
| 305 { Comment cmnt(masm_, "[ Stack check"); | 302 { Comment cmnt(masm_, "[ Stack check"); |
| 306 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); | 303 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); |
| 307 Label ok; | 304 Label ok; |
| 308 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 305 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| (...skipping 476 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 785 | 782 |
| 786 | 783 |
| 787 void FullCodeGenerator::VisitVariableDeclaration( | 784 void FullCodeGenerator::VisitVariableDeclaration( |
| 788 VariableDeclaration* declaration) { | 785 VariableDeclaration* declaration) { |
| 789 // If it was not possible to allocate the variable at compile time, we | 786 // If it was not possible to allocate the variable at compile time, we |
| 790 // need to "declare" it at runtime to make sure it actually exists in the | 787 // need to "declare" it at runtime to make sure it actually exists in the |
| 791 // local context. | 788 // local context. |
| 792 VariableProxy* proxy = declaration->proxy(); | 789 VariableProxy* proxy = declaration->proxy(); |
| 793 VariableMode mode = declaration->mode(); | 790 VariableMode mode = declaration->mode(); |
| 794 Variable* variable = proxy->var(); | 791 Variable* variable = proxy->var(); |
| 795 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET; | 792 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; |
| 796 switch (variable->location()) { | 793 switch (variable->location()) { |
| 797 case Variable::UNALLOCATED: | 794 case Variable::UNALLOCATED: |
| 798 globals_->Add(variable->name(), zone()); | 795 globals_->Add(variable->name(), zone()); |
| 799 globals_->Add(variable->binding_needs_init() | 796 globals_->Add(variable->binding_needs_init() |
| 800 ? isolate()->factory()->the_hole_value() | 797 ? isolate()->factory()->the_hole_value() |
| 801 : isolate()->factory()->undefined_value(), | 798 : isolate()->factory()->undefined_value(), |
| 802 zone()); | 799 zone()); |
| 803 break; | 800 break; |
| 804 | 801 |
| 805 case Variable::PARAMETER: | 802 case Variable::PARAMETER: |
| (...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1160 __ jmp(&loop); | 1157 __ jmp(&loop); |
| 1161 | 1158 |
| 1162 __ bind(&no_descriptors); | 1159 __ bind(&no_descriptors); |
| 1163 __ Drop(1); | 1160 __ Drop(1); |
| 1164 __ jmp(&exit); | 1161 __ jmp(&exit); |
| 1165 | 1162 |
| 1166 // We got a fixed array in register r0. Iterate through that. | 1163 // We got a fixed array in register r0. Iterate through that. |
| 1167 Label non_proxy; | 1164 Label non_proxy; |
| 1168 __ bind(&fixed_array); | 1165 __ bind(&fixed_array); |
| 1169 | 1166 |
| 1170 Handle<Object> feedback = Handle<Object>( | |
| 1171 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), | |
| 1172 isolate()); | |
| 1173 StoreFeedbackVectorSlot(slot, feedback); | |
| 1174 __ Move(r1, FeedbackVector()); | 1167 __ Move(r1, FeedbackVector()); |
| 1175 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); | 1168 __ mov(r2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate()))); |
| 1176 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot))); | 1169 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot))); |
| 1177 | 1170 |
| 1178 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check | 1171 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check |
| 1179 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object | 1172 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object |
| 1180 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 1173 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
| 1181 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); | 1174 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); |
| 1182 __ b(gt, &non_proxy); | 1175 __ b(gt, &non_proxy); |
| 1183 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy | 1176 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy |
| 1184 __ bind(&non_proxy); | 1177 __ bind(&non_proxy); |
| 1185 __ Push(r1, r0); // Smi and array | 1178 __ Push(r1, r0); // Smi and array |
| (...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1324 // space for nested functions that don't need literals cloning. If | 1317 // space for nested functions that don't need literals cloning. If |
| 1325 // we're running with the --always-opt or the --prepare-always-opt | 1318 // we're running with the --always-opt or the --prepare-always-opt |
| 1326 // flag, we need to use the runtime function so that the new function | 1319 // flag, we need to use the runtime function so that the new function |
| 1327 // we are creating here gets a chance to have its code optimized and | 1320 // we are creating here gets a chance to have its code optimized and |
| 1328 // doesn't just get a copy of the existing unoptimized code. | 1321 // doesn't just get a copy of the existing unoptimized code. |
| 1329 if (!FLAG_always_opt && | 1322 if (!FLAG_always_opt && |
| 1330 !FLAG_prepare_always_opt && | 1323 !FLAG_prepare_always_opt && |
| 1331 !pretenure && | 1324 !pretenure && |
| 1332 scope()->is_function_scope() && | 1325 scope()->is_function_scope() && |
| 1333 info->num_literals() == 0) { | 1326 info->num_literals() == 0) { |
| 1334 FastNewClosureStub stub(info->language_mode(), info->is_generator()); | 1327 FastNewClosureStub stub(info->strict_mode(), info->is_generator()); |
| 1335 __ mov(r2, Operand(info)); | 1328 __ mov(r2, Operand(info)); |
| 1336 __ CallStub(&stub); | 1329 __ CallStub(&stub); |
| 1337 } else { | 1330 } else { |
| 1338 __ mov(r0, Operand(info)); | 1331 __ mov(r0, Operand(info)); |
| 1339 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex | 1332 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex |
| 1340 : Heap::kFalseValueRootIndex); | 1333 : Heap::kFalseValueRootIndex); |
| 1341 __ Push(cp, r0, r1); | 1334 __ Push(cp, r0, r1); |
| 1342 __ CallRuntime(Runtime::kNewClosure, 3); | 1335 __ CallRuntime(Runtime::kNewClosure, 3); |
| 1343 } | 1336 } |
| 1344 context()->Plug(r0); | 1337 context()->Plug(r0); |
| 1345 } | 1338 } |
| 1346 | 1339 |
| 1347 | 1340 |
| 1348 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { | 1341 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { |
| 1349 Comment cmnt(masm_, "[ VariableProxy"); | 1342 Comment cmnt(masm_, "[ VariableProxy"); |
| 1350 EmitVariableLoad(expr); | 1343 EmitVariableLoad(expr); |
| 1351 } | 1344 } |
| 1352 | 1345 |
| 1353 | 1346 |
| 1354 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, | 1347 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, |
| 1355 TypeofState typeof_state, | 1348 TypeofState typeof_state, |
| 1356 Label* slow) { | 1349 Label* slow) { |
| 1357 Register current = cp; | 1350 Register current = cp; |
| 1358 Register next = r1; | 1351 Register next = r1; |
| 1359 Register temp = r2; | 1352 Register temp = r2; |
| 1360 | 1353 |
| 1361 Scope* s = scope(); | 1354 Scope* s = scope(); |
| 1362 while (s != NULL) { | 1355 while (s != NULL) { |
| 1363 if (s->num_heap_slots() > 0) { | 1356 if (s->num_heap_slots() > 0) { |
| 1364 if (s->calls_non_strict_eval()) { | 1357 if (s->calls_sloppy_eval()) { |
| 1365 // Check that extension is NULL. | 1358 // Check that extension is NULL. |
| 1366 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); | 1359 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); |
| 1367 __ tst(temp, temp); | 1360 __ tst(temp, temp); |
| 1368 __ b(ne, slow); | 1361 __ b(ne, slow); |
| 1369 } | 1362 } |
| 1370 // Load next context in chain. | 1363 // Load next context in chain. |
| 1371 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); | 1364 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); |
| 1372 // Walk the rest of the chain without clobbering cp. | 1365 // Walk the rest of the chain without clobbering cp. |
| 1373 current = next; | 1366 current = next; |
| 1374 } | 1367 } |
| 1375 // If no outer scope calls eval, we do not need to check more | 1368 // If no outer scope calls eval, we do not need to check more |
| 1376 // context extensions. | 1369 // context extensions. |
| 1377 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; | 1370 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; |
| 1378 s = s->outer_scope(); | 1371 s = s->outer_scope(); |
| 1379 } | 1372 } |
| 1380 | 1373 |
| 1381 if (s->is_eval_scope()) { | 1374 if (s->is_eval_scope()) { |
| 1382 Label loop, fast; | 1375 Label loop, fast; |
| 1383 if (!current.is(next)) { | 1376 if (!current.is(next)) { |
| 1384 __ Move(next, current); | 1377 __ Move(next, current); |
| 1385 } | 1378 } |
| 1386 __ bind(&loop); | 1379 __ bind(&loop); |
| 1387 // Terminate at native context. | 1380 // Terminate at native context. |
| (...skipping 22 matching lines...) Expand all Loading... |
| 1410 | 1403 |
| 1411 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, | 1404 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, |
| 1412 Label* slow) { | 1405 Label* slow) { |
| 1413 ASSERT(var->IsContextSlot()); | 1406 ASSERT(var->IsContextSlot()); |
| 1414 Register context = cp; | 1407 Register context = cp; |
| 1415 Register next = r3; | 1408 Register next = r3; |
| 1416 Register temp = r4; | 1409 Register temp = r4; |
| 1417 | 1410 |
| 1418 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { | 1411 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { |
| 1419 if (s->num_heap_slots() > 0) { | 1412 if (s->num_heap_slots() > 0) { |
| 1420 if (s->calls_non_strict_eval()) { | 1413 if (s->calls_sloppy_eval()) { |
| 1421 // Check that extension is NULL. | 1414 // Check that extension is NULL. |
| 1422 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); | 1415 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
| 1423 __ tst(temp, temp); | 1416 __ tst(temp, temp); |
| 1424 __ b(ne, slow); | 1417 __ b(ne, slow); |
| 1425 } | 1418 } |
| 1426 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); | 1419 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); |
| 1427 // Walk the rest of the chain without clobbering cp. | 1420 // Walk the rest of the chain without clobbering cp. |
| 1428 context = next; | 1421 context = next; |
| 1429 } | 1422 } |
| 1430 } | 1423 } |
| (...skipping 17 matching lines...) Expand all Loading... |
| 1448 // eval-introduced variables. Eval is used a lot without | 1441 // eval-introduced variables. Eval is used a lot without |
| 1449 // introducing variables. In those cases, we do not want to | 1442 // introducing variables. In those cases, we do not want to |
| 1450 // perform a runtime call for all variables in the scope | 1443 // perform a runtime call for all variables in the scope |
| 1451 // containing the eval. | 1444 // containing the eval. |
| 1452 if (var->mode() == DYNAMIC_GLOBAL) { | 1445 if (var->mode() == DYNAMIC_GLOBAL) { |
| 1453 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); | 1446 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); |
| 1454 __ jmp(done); | 1447 __ jmp(done); |
| 1455 } else if (var->mode() == DYNAMIC_LOCAL) { | 1448 } else if (var->mode() == DYNAMIC_LOCAL) { |
| 1456 Variable* local = var->local_if_not_shadowed(); | 1449 Variable* local = var->local_if_not_shadowed(); |
| 1457 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); | 1450 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); |
| 1458 if (local->mode() == LET || | 1451 if (local->mode() == LET || local->mode() == CONST || |
| 1459 local->mode() == CONST || | 1452 local->mode() == CONST_LEGACY) { |
| 1460 local->mode() == CONST_HARMONY) { | |
| 1461 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); | 1453 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); |
| 1462 if (local->mode() == CONST) { | 1454 if (local->mode() == CONST_LEGACY) { |
| 1463 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | 1455 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); |
| 1464 } else { // LET || CONST_HARMONY | 1456 } else { // LET || CONST |
| 1465 __ b(ne, done); | 1457 __ b(ne, done); |
| 1466 __ mov(r0, Operand(var->name())); | 1458 __ mov(r0, Operand(var->name())); |
| 1467 __ push(r0); | 1459 __ push(r0); |
| 1468 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 1460 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
| 1469 } | 1461 } |
| 1470 } | 1462 } |
| 1471 __ jmp(done); | 1463 __ jmp(done); |
| 1472 } | 1464 } |
| 1473 } | 1465 } |
| 1474 | 1466 |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1521 // binding is initialized: | 1513 // binding is initialized: |
| 1522 // function() { f(); let x = 1; function f() { x = 2; } } | 1514 // function() { f(); let x = 1; function f() { x = 2; } } |
| 1523 // | 1515 // |
| 1524 bool skip_init_check; | 1516 bool skip_init_check; |
| 1525 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { | 1517 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { |
| 1526 skip_init_check = false; | 1518 skip_init_check = false; |
| 1527 } else { | 1519 } else { |
| 1528 // Check that we always have valid source position. | 1520 // Check that we always have valid source position. |
| 1529 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); | 1521 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); |
| 1530 ASSERT(proxy->position() != RelocInfo::kNoPosition); | 1522 ASSERT(proxy->position() != RelocInfo::kNoPosition); |
| 1531 skip_init_check = var->mode() != CONST && | 1523 skip_init_check = var->mode() != CONST_LEGACY && |
| 1532 var->initializer_position() < proxy->position(); | 1524 var->initializer_position() < proxy->position(); |
| 1533 } | 1525 } |
| 1534 | 1526 |
| 1535 if (!skip_init_check) { | 1527 if (!skip_init_check) { |
| 1536 // Let and const need a read barrier. | 1528 // Let and const need a read barrier. |
| 1537 GetVar(r0, var); | 1529 GetVar(r0, var); |
| 1538 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); | 1530 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); |
| 1539 if (var->mode() == LET || var->mode() == CONST_HARMONY) { | 1531 if (var->mode() == LET || var->mode() == CONST) { |
| 1540 // Throw a reference error when using an uninitialized let/const | 1532 // Throw a reference error when using an uninitialized let/const |
| 1541 // binding in harmony mode. | 1533 // binding in harmony mode. |
| 1542 Label done; | 1534 Label done; |
| 1543 __ b(ne, &done); | 1535 __ b(ne, &done); |
| 1544 __ mov(r0, Operand(var->name())); | 1536 __ mov(r0, Operand(var->name())); |
| 1545 __ push(r0); | 1537 __ push(r0); |
| 1546 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 1538 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
| 1547 __ bind(&done); | 1539 __ bind(&done); |
| 1548 } else { | 1540 } else { |
| 1549 // Uninitalized const bindings outside of harmony mode are unholed. | 1541 // Uninitalized const bindings outside of harmony mode are unholed. |
| 1550 ASSERT(var->mode() == CONST); | 1542 ASSERT(var->mode() == CONST_LEGACY); |
| 1551 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | 1543 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); |
| 1552 } | 1544 } |
| 1553 context()->Plug(r0); | 1545 context()->Plug(r0); |
| 1554 break; | 1546 break; |
| 1555 } | 1547 } |
| 1556 } | 1548 } |
| 1557 context()->Plug(var); | 1549 context()->Plug(var); |
| 1558 break; | 1550 break; |
| 1559 } | 1551 } |
| 1560 | 1552 |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1645 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); | 1637 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); |
| 1646 __ mov(r1, Operand(constant_properties)); | 1638 __ mov(r1, Operand(constant_properties)); |
| 1647 int flags = expr->fast_elements() | 1639 int flags = expr->fast_elements() |
| 1648 ? ObjectLiteral::kFastElements | 1640 ? ObjectLiteral::kFastElements |
| 1649 : ObjectLiteral::kNoFlags; | 1641 : ObjectLiteral::kNoFlags; |
| 1650 flags |= expr->has_function() | 1642 flags |= expr->has_function() |
| 1651 ? ObjectLiteral::kHasFunction | 1643 ? ObjectLiteral::kHasFunction |
| 1652 : ObjectLiteral::kNoFlags; | 1644 : ObjectLiteral::kNoFlags; |
| 1653 __ mov(r0, Operand(Smi::FromInt(flags))); | 1645 __ mov(r0, Operand(Smi::FromInt(flags))); |
| 1654 int properties_count = constant_properties->length() / 2; | 1646 int properties_count = constant_properties->length() / 2; |
| 1655 if ((FLAG_track_double_fields && expr->may_store_doubles()) || | 1647 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() || |
| 1656 expr->depth() > 1 || Serializer::enabled() || | |
| 1657 flags != ObjectLiteral::kFastElements || | 1648 flags != ObjectLiteral::kFastElements || |
| 1658 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { | 1649 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { |
| 1659 __ Push(r3, r2, r1, r0); | 1650 __ Push(r3, r2, r1, r0); |
| 1660 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); | 1651 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); |
| 1661 } else { | 1652 } else { |
| 1662 FastCloneShallowObjectStub stub(properties_count); | 1653 FastCloneShallowObjectStub stub(properties_count); |
| 1663 __ CallStub(&stub); | 1654 __ CallStub(&stub); |
| 1664 } | 1655 } |
| 1665 | 1656 |
| 1666 // If result_saved is true the result is on top of the stack. If | 1657 // If result_saved is true the result is on top of the stack. If |
| (...skipping 771 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2438 __ mov(r2, Operand(prop->key()->AsLiteral()->value())); | 2429 __ mov(r2, Operand(prop->key()->AsLiteral()->value())); |
| 2439 CallStoreIC(); | 2430 CallStoreIC(); |
| 2440 break; | 2431 break; |
| 2441 } | 2432 } |
| 2442 case KEYED_PROPERTY: { | 2433 case KEYED_PROPERTY: { |
| 2443 __ push(r0); // Preserve value. | 2434 __ push(r0); // Preserve value. |
| 2444 VisitForStackValue(prop->obj()); | 2435 VisitForStackValue(prop->obj()); |
| 2445 VisitForAccumulatorValue(prop->key()); | 2436 VisitForAccumulatorValue(prop->key()); |
| 2446 __ mov(r1, r0); | 2437 __ mov(r1, r0); |
| 2447 __ Pop(r0, r2); // r0 = restored value. | 2438 __ Pop(r0, r2); // r0 = restored value. |
| 2448 Handle<Code> ic = is_classic_mode() | 2439 Handle<Code> ic = strict_mode() == SLOPPY |
| 2449 ? isolate()->builtins()->KeyedStoreIC_Initialize() | 2440 ? isolate()->builtins()->KeyedStoreIC_Initialize() |
| 2450 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); | 2441 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); |
| 2451 CallIC(ic); | 2442 CallIC(ic); |
| 2452 break; | 2443 break; |
| 2453 } | 2444 } |
| 2454 } | 2445 } |
| 2455 context()->Plug(r0); | 2446 context()->Plug(r0); |
| 2456 } | 2447 } |
| 2457 | 2448 |
| 2458 | 2449 |
| 2459 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( | 2450 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( |
| 2460 Variable* var, MemOperand location) { | 2451 Variable* var, MemOperand location) { |
| 2461 __ str(result_register(), location); | 2452 __ str(result_register(), location); |
| 2462 if (var->IsContextSlot()) { | 2453 if (var->IsContextSlot()) { |
| 2463 // RecordWrite may destroy all its register arguments. | 2454 // RecordWrite may destroy all its register arguments. |
| 2464 __ mov(r3, result_register()); | 2455 __ mov(r3, result_register()); |
| 2465 int offset = Context::SlotOffset(var->index()); | 2456 int offset = Context::SlotOffset(var->index()); |
| 2466 __ RecordWriteContextSlot( | 2457 __ RecordWriteContextSlot( |
| 2467 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); | 2458 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); |
| 2468 } | 2459 } |
| 2469 } | 2460 } |
| 2470 | 2461 |
| 2471 | 2462 |
| 2472 void FullCodeGenerator::EmitCallStoreContextSlot( | 2463 void FullCodeGenerator::EmitCallStoreContextSlot( |
| 2473 Handle<String> name, LanguageMode mode) { | 2464 Handle<String> name, StrictMode strict_mode) { |
| 2474 __ push(r0); // Value. | 2465 __ push(r0); // Value. |
| 2475 __ mov(r1, Operand(name)); | 2466 __ mov(r1, Operand(name)); |
| 2476 __ mov(r0, Operand(Smi::FromInt(mode))); | 2467 __ mov(r0, Operand(Smi::FromInt(strict_mode))); |
| 2477 __ Push(cp, r1, r0); // Context, name, strict mode. | 2468 __ Push(cp, r1, r0); // Context, name, strict mode. |
| 2478 __ CallRuntime(Runtime::kStoreContextSlot, 4); | 2469 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
| 2479 } | 2470 } |
| 2480 | 2471 |
| 2481 | 2472 |
| 2482 void FullCodeGenerator::EmitVariableAssignment(Variable* var, | 2473 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) { |
| 2483 Token::Value op) { | |
| 2484 if (var->IsUnallocated()) { | 2474 if (var->IsUnallocated()) { |
| 2485 // Global var, const, or let. | 2475 // Global var, const, or let. |
| 2486 __ mov(r2, Operand(var->name())); | 2476 __ mov(r2, Operand(var->name())); |
| 2487 __ ldr(r1, GlobalObjectOperand()); | 2477 __ ldr(r1, GlobalObjectOperand()); |
| 2488 CallStoreIC(); | 2478 CallStoreIC(); |
| 2489 | 2479 |
| 2490 } else if (op == Token::INIT_CONST) { | 2480 } else if (op == Token::INIT_CONST_LEGACY) { |
| 2491 // Const initializers need a write barrier. | 2481 // Const initializers need a write barrier. |
| 2492 ASSERT(!var->IsParameter()); // No const parameters. | 2482 ASSERT(!var->IsParameter()); // No const parameters. |
| 2493 if (var->IsLookupSlot()) { | 2483 if (var->IsLookupSlot()) { |
| 2494 __ push(r0); | 2484 __ push(r0); |
| 2495 __ mov(r0, Operand(var->name())); | 2485 __ mov(r0, Operand(var->name())); |
| 2496 __ Push(cp, r0); // Context and name. | 2486 __ Push(cp, r0); // Context and name. |
| 2497 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); | 2487 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
| 2498 } else { | 2488 } else { |
| 2499 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | 2489 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
| 2500 Label skip; | 2490 Label skip; |
| 2501 MemOperand location = VarOperand(var, r1); | 2491 MemOperand location = VarOperand(var, r1); |
| 2502 __ ldr(r2, location); | 2492 __ ldr(r2, location); |
| 2503 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); | 2493 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); |
| 2504 __ b(ne, &skip); | 2494 __ b(ne, &skip); |
| 2505 EmitStoreToStackLocalOrContextSlot(var, location); | 2495 EmitStoreToStackLocalOrContextSlot(var, location); |
| 2506 __ bind(&skip); | 2496 __ bind(&skip); |
| 2507 } | 2497 } |
| 2508 | 2498 |
| 2509 } else if (var->mode() == LET && op != Token::INIT_LET) { | 2499 } else if (var->mode() == LET && op != Token::INIT_LET) { |
| 2510 // Non-initializing assignment to let variable needs a write barrier. | 2500 // Non-initializing assignment to let variable needs a write barrier. |
| 2511 if (var->IsLookupSlot()) { | 2501 if (var->IsLookupSlot()) { |
| 2512 EmitCallStoreContextSlot(var->name(), language_mode()); | 2502 EmitCallStoreContextSlot(var->name(), strict_mode()); |
| 2513 } else { | 2503 } else { |
| 2514 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | 2504 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
| 2515 Label assign; | 2505 Label assign; |
| 2516 MemOperand location = VarOperand(var, r1); | 2506 MemOperand location = VarOperand(var, r1); |
| 2517 __ ldr(r3, location); | 2507 __ ldr(r3, location); |
| 2518 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); | 2508 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); |
| 2519 __ b(ne, &assign); | 2509 __ b(ne, &assign); |
| 2520 __ mov(r3, Operand(var->name())); | 2510 __ mov(r3, Operand(var->name())); |
| 2521 __ push(r3); | 2511 __ push(r3); |
| 2522 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 2512 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
| 2523 // Perform the assignment. | 2513 // Perform the assignment. |
| 2524 __ bind(&assign); | 2514 __ bind(&assign); |
| 2525 EmitStoreToStackLocalOrContextSlot(var, location); | 2515 EmitStoreToStackLocalOrContextSlot(var, location); |
| 2526 } | 2516 } |
| 2527 | 2517 |
| 2528 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { | 2518 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { |
| 2529 // Assignment to var or initializing assignment to let/const | 2519 // Assignment to var or initializing assignment to let/const |
| 2530 // in harmony mode. | 2520 // in harmony mode. |
| 2531 if (var->IsLookupSlot()) { | 2521 if (var->IsLookupSlot()) { |
| 2532 EmitCallStoreContextSlot(var->name(), language_mode()); | 2522 EmitCallStoreContextSlot(var->name(), strict_mode()); |
| 2533 } else { | 2523 } else { |
| 2534 ASSERT((var->IsStackAllocated() || var->IsContextSlot())); | 2524 ASSERT((var->IsStackAllocated() || var->IsContextSlot())); |
| 2535 MemOperand location = VarOperand(var, r1); | 2525 MemOperand location = VarOperand(var, r1); |
| 2536 if (generate_debug_code_ && op == Token::INIT_LET) { | 2526 if (generate_debug_code_ && op == Token::INIT_LET) { |
| 2537 // Check for an uninitialized let binding. | 2527 // Check for an uninitialized let binding. |
| 2538 __ ldr(r2, location); | 2528 __ ldr(r2, location); |
| 2539 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); | 2529 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); |
| 2540 __ Check(eq, kLetBindingReInitialization); | 2530 __ Check(eq, kLetBindingReInitialization); |
| 2541 } | 2531 } |
| 2542 EmitStoreToStackLocalOrContextSlot(var, location); | 2532 EmitStoreToStackLocalOrContextSlot(var, location); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 2564 } | 2554 } |
| 2565 | 2555 |
| 2566 | 2556 |
| 2567 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { | 2557 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { |
| 2568 // Assignment to a property, using a keyed store IC. | 2558 // Assignment to a property, using a keyed store IC. |
| 2569 | 2559 |
| 2570 // Record source code position before IC call. | 2560 // Record source code position before IC call. |
| 2571 SetSourcePosition(expr->position()); | 2561 SetSourcePosition(expr->position()); |
| 2572 __ Pop(r2, r1); // r1 = key. | 2562 __ Pop(r2, r1); // r1 = key. |
| 2573 | 2563 |
| 2574 Handle<Code> ic = is_classic_mode() | 2564 Handle<Code> ic = strict_mode() == SLOPPY |
| 2575 ? isolate()->builtins()->KeyedStoreIC_Initialize() | 2565 ? isolate()->builtins()->KeyedStoreIC_Initialize() |
| 2576 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); | 2566 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); |
| 2577 CallIC(ic, expr->AssignmentFeedbackId()); | 2567 CallIC(ic, expr->AssignmentFeedbackId()); |
| 2578 | 2568 |
| 2579 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 2569 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 2580 context()->Plug(r0); | 2570 context()->Plug(r0); |
| 2581 } | 2571 } |
| 2582 | 2572 |
| 2583 | 2573 |
| 2584 void FullCodeGenerator::VisitProperty(Property* expr) { | 2574 void FullCodeGenerator::VisitProperty(Property* expr) { |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2617 int arg_count = args->length(); | 2607 int arg_count = args->length(); |
| 2618 | 2608 |
| 2619 CallFunctionFlags flags; | 2609 CallFunctionFlags flags; |
| 2620 // Get the target function. | 2610 // Get the target function. |
| 2621 if (callee->IsVariableProxy()) { | 2611 if (callee->IsVariableProxy()) { |
| 2622 { StackValueContext context(this); | 2612 { StackValueContext context(this); |
| 2623 EmitVariableLoad(callee->AsVariableProxy()); | 2613 EmitVariableLoad(callee->AsVariableProxy()); |
| 2624 PrepareForBailout(callee, NO_REGISTERS); | 2614 PrepareForBailout(callee, NO_REGISTERS); |
| 2625 } | 2615 } |
| 2626 // Push undefined as receiver. This is patched in the method prologue if it | 2616 // Push undefined as receiver. This is patched in the method prologue if it |
| 2627 // is a classic mode method. | 2617 // is a sloppy mode method. |
| 2628 __ Push(isolate()->factory()->undefined_value()); | 2618 __ Push(isolate()->factory()->undefined_value()); |
| 2629 flags = NO_CALL_FUNCTION_FLAGS; | 2619 flags = NO_CALL_FUNCTION_FLAGS; |
| 2630 } else { | 2620 } else { |
| 2631 // Load the function from the receiver. | 2621 // Load the function from the receiver. |
| 2632 ASSERT(callee->IsProperty()); | 2622 ASSERT(callee->IsProperty()); |
| 2633 __ ldr(r0, MemOperand(sp, 0)); | 2623 __ ldr(r0, MemOperand(sp, 0)); |
| 2634 EmitNamedPropertyLoad(callee->AsProperty()); | 2624 EmitNamedPropertyLoad(callee->AsProperty()); |
| 2635 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | 2625 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); |
| 2636 // Push the target function under the receiver. | 2626 // Push the target function under the receiver. |
| 2637 __ ldr(ip, MemOperand(sp, 0)); | 2627 __ ldr(ip, MemOperand(sp, 0)); |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2708 ZoneList<Expression*>* args = expr->arguments(); | 2698 ZoneList<Expression*>* args = expr->arguments(); |
| 2709 int arg_count = args->length(); | 2699 int arg_count = args->length(); |
| 2710 { PreservePositionScope scope(masm()->positions_recorder()); | 2700 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2711 for (int i = 0; i < arg_count; i++) { | 2701 for (int i = 0; i < arg_count; i++) { |
| 2712 VisitForStackValue(args->at(i)); | 2702 VisitForStackValue(args->at(i)); |
| 2713 } | 2703 } |
| 2714 } | 2704 } |
| 2715 // Record source position for debugger. | 2705 // Record source position for debugger. |
| 2716 SetSourcePosition(expr->position()); | 2706 SetSourcePosition(expr->position()); |
| 2717 | 2707 |
| 2718 Handle<Object> uninitialized = | |
| 2719 TypeFeedbackInfo::UninitializedSentinel(isolate()); | |
| 2720 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); | |
| 2721 __ Move(r2, FeedbackVector()); | 2708 __ Move(r2, FeedbackVector()); |
| 2722 __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); | 2709 __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); |
| 2723 | 2710 |
| 2724 // Record call targets in unoptimized code. | 2711 // Record call targets in unoptimized code. |
| 2725 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); | 2712 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); |
| 2726 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 2713 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
| 2727 __ CallStub(&stub); | 2714 __ CallStub(&stub); |
| 2728 RecordJSReturnSite(expr); | 2715 RecordJSReturnSite(expr); |
| 2729 // Restore context register. | 2716 // Restore context register. |
| 2730 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2717 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2731 context()->DropAndPlug(1, r0); | 2718 context()->DropAndPlug(1, r0); |
| 2732 } | 2719 } |
| 2733 | 2720 |
| 2734 | 2721 |
| 2735 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { | 2722 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { |
| 2736 // r4: copy of the first argument or undefined if it doesn't exist. | 2723 // r4: copy of the first argument or undefined if it doesn't exist. |
| 2737 if (arg_count > 0) { | 2724 if (arg_count > 0) { |
| 2738 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize)); | 2725 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize)); |
| 2739 } else { | 2726 } else { |
| 2740 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | 2727 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
| 2741 } | 2728 } |
| 2742 | 2729 |
| 2743 // r3: the receiver of the enclosing function. | 2730 // r3: the receiver of the enclosing function. |
| 2744 int receiver_offset = 2 + info_->scope()->num_parameters(); | 2731 int receiver_offset = 2 + info_->scope()->num_parameters(); |
| 2745 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize)); | 2732 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize)); |
| 2746 | 2733 |
| 2747 // r2: the language mode. | 2734 // r2: strict mode. |
| 2748 __ mov(r2, Operand(Smi::FromInt(language_mode()))); | 2735 __ mov(r2, Operand(Smi::FromInt(strict_mode()))); |
| 2749 | 2736 |
| 2750 // r1: the start position of the scope the calls resides in. | 2737 // r1: the start position of the scope the calls resides in. |
| 2751 __ mov(r1, Operand(Smi::FromInt(scope()->start_position()))); | 2738 __ mov(r1, Operand(Smi::FromInt(scope()->start_position()))); |
| 2752 | 2739 |
| 2753 // Do the runtime call. | 2740 // Do the runtime call. |
| 2754 __ Push(r4, r3, r2, r1); | 2741 __ Push(r4, r3, r2, r1); |
| 2755 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); | 2742 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); |
| 2756 } | 2743 } |
| 2757 | 2744 |
| 2758 | 2745 |
| (...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2897 | 2884 |
| 2898 // Call the construct call builtin that handles allocation and | 2885 // Call the construct call builtin that handles allocation and |
| 2899 // constructor invocation. | 2886 // constructor invocation. |
| 2900 SetSourcePosition(expr->position()); | 2887 SetSourcePosition(expr->position()); |
| 2901 | 2888 |
| 2902 // Load function and argument count into r1 and r0. | 2889 // Load function and argument count into r1 and r0. |
| 2903 __ mov(r0, Operand(arg_count)); | 2890 __ mov(r0, Operand(arg_count)); |
| 2904 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); | 2891 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); |
| 2905 | 2892 |
| 2906 // Record call targets in unoptimized code. | 2893 // Record call targets in unoptimized code. |
| 2907 Handle<Object> uninitialized = | |
| 2908 TypeFeedbackInfo::UninitializedSentinel(isolate()); | |
| 2909 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); | |
| 2910 __ Move(r2, FeedbackVector()); | 2894 __ Move(r2, FeedbackVector()); |
| 2911 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); | 2895 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); |
| 2912 | 2896 |
| 2913 CallConstructStub stub(RECORD_CALL_TARGET); | 2897 CallConstructStub stub(RECORD_CALL_TARGET); |
| 2914 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); | 2898 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); |
| 2915 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); | 2899 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); |
| 2916 context()->Plug(r0); | 2900 context()->Plug(r0); |
| 2917 } | 2901 } |
| 2918 | 2902 |
| 2919 | 2903 |
| (...skipping 1258 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4178 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | 4162 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
| 4179 switch (expr->op()) { | 4163 switch (expr->op()) { |
| 4180 case Token::DELETE: { | 4164 case Token::DELETE: { |
| 4181 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | 4165 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); |
| 4182 Property* property = expr->expression()->AsProperty(); | 4166 Property* property = expr->expression()->AsProperty(); |
| 4183 VariableProxy* proxy = expr->expression()->AsVariableProxy(); | 4167 VariableProxy* proxy = expr->expression()->AsVariableProxy(); |
| 4184 | 4168 |
| 4185 if (property != NULL) { | 4169 if (property != NULL) { |
| 4186 VisitForStackValue(property->obj()); | 4170 VisitForStackValue(property->obj()); |
| 4187 VisitForStackValue(property->key()); | 4171 VisitForStackValue(property->key()); |
| 4188 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE) | 4172 __ mov(r1, Operand(Smi::FromInt(strict_mode()))); |
| 4189 ? kNonStrictMode : kStrictMode; | |
| 4190 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag))); | |
| 4191 __ push(r1); | 4173 __ push(r1); |
| 4192 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 4174 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
| 4193 context()->Plug(r0); | 4175 context()->Plug(r0); |
| 4194 } else if (proxy != NULL) { | 4176 } else if (proxy != NULL) { |
| 4195 Variable* var = proxy->var(); | 4177 Variable* var = proxy->var(); |
| 4196 // Delete of an unqualified identifier is disallowed in strict mode | 4178 // Delete of an unqualified identifier is disallowed in strict mode |
| 4197 // but "delete this" is allowed. | 4179 // but "delete this" is allowed. |
| 4198 ASSERT(language_mode() == CLASSIC_MODE || var->is_this()); | 4180 ASSERT(strict_mode() == SLOPPY || var->is_this()); |
| 4199 if (var->IsUnallocated()) { | 4181 if (var->IsUnallocated()) { |
| 4200 __ ldr(r2, GlobalObjectOperand()); | 4182 __ ldr(r2, GlobalObjectOperand()); |
| 4201 __ mov(r1, Operand(var->name())); | 4183 __ mov(r1, Operand(var->name())); |
| 4202 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode))); | 4184 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); |
| 4203 __ Push(r2, r1, r0); | 4185 __ Push(r2, r1, r0); |
| 4204 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 4186 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
| 4205 context()->Plug(r0); | 4187 context()->Plug(r0); |
| 4206 } else if (var->IsStackAllocated() || var->IsContextSlot()) { | 4188 } else if (var->IsStackAllocated() || var->IsContextSlot()) { |
| 4207 // Result of deleting non-global, non-dynamic variables is false. | 4189 // Result of deleting non-global, non-dynamic variables is false. |
| 4208 // The subexpression does not have side effects. | 4190 // The subexpression does not have side effects. |
| 4209 context()->Plug(var->is_this()); | 4191 context()->Plug(var->is_this()); |
| 4210 } else { | 4192 } else { |
| 4211 // Non-global variable. Call the runtime to try to delete from the | 4193 // Non-global variable. Call the runtime to try to delete from the |
| 4212 // context where the variable was introduced. | 4194 // context where the variable was introduced. |
| (...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4446 if (!context()->IsEffect()) { | 4428 if (!context()->IsEffect()) { |
| 4447 context()->PlugTOS(); | 4429 context()->PlugTOS(); |
| 4448 } | 4430 } |
| 4449 } else { | 4431 } else { |
| 4450 context()->Plug(r0); | 4432 context()->Plug(r0); |
| 4451 } | 4433 } |
| 4452 break; | 4434 break; |
| 4453 } | 4435 } |
| 4454 case KEYED_PROPERTY: { | 4436 case KEYED_PROPERTY: { |
| 4455 __ Pop(r2, r1); // r1 = key. r2 = receiver. | 4437 __ Pop(r2, r1); // r1 = key. r2 = receiver. |
| 4456 Handle<Code> ic = is_classic_mode() | 4438 Handle<Code> ic = strict_mode() == SLOPPY |
| 4457 ? isolate()->builtins()->KeyedStoreIC_Initialize() | 4439 ? isolate()->builtins()->KeyedStoreIC_Initialize() |
| 4458 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); | 4440 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); |
| 4459 CallIC(ic, expr->CountStoreFeedbackId()); | 4441 CallIC(ic, expr->CountStoreFeedbackId()); |
| 4460 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 4442 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 4461 if (expr->is_postfix()) { | 4443 if (expr->is_postfix()) { |
| 4462 if (!context()->IsEffect()) { | 4444 if (!context()->IsEffect()) { |
| 4463 context()->PlugTOS(); | 4445 context()->PlugTOS(); |
| 4464 } | 4446 } |
| 4465 } else { | 4447 } else { |
| 4466 context()->Plug(r0); | 4448 context()->Plug(r0); |
| (...skipping 451 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4918 ASSERT(Memory::uint32_at(interrupt_address_pointer) == | 4900 ASSERT(Memory::uint32_at(interrupt_address_pointer) == |
| 4919 reinterpret_cast<uint32_t>( | 4901 reinterpret_cast<uint32_t>( |
| 4920 isolate->builtins()->OsrAfterStackCheck()->entry())); | 4902 isolate->builtins()->OsrAfterStackCheck()->entry())); |
| 4921 return OSR_AFTER_STACK_CHECK; | 4903 return OSR_AFTER_STACK_CHECK; |
| 4922 } | 4904 } |
| 4923 | 4905 |
| 4924 | 4906 |
| 4925 } } // namespace v8::internal | 4907 } } // namespace v8::internal |
| 4926 | 4908 |
| 4927 #endif // V8_TARGET_ARCH_ARM | 4909 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |