| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
| 6 | 6 |
| 7 #include "src/ast/scopes.h" | 7 #include "src/ast/scopes.h" |
| 8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 200 int num_parameters = info->scope()->num_parameters(); | 200 int num_parameters = info->scope()->num_parameters(); |
| 201 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; | 201 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; |
| 202 for (int i = first_parameter; i < num_parameters; i++) { | 202 for (int i = first_parameter; i < num_parameters; i++) { |
| 203 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); | 203 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); |
| 204 if (var->IsContextSlot()) { | 204 if (var->IsContextSlot()) { |
| 205 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 205 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
| 206 (num_parameters - 1 - i) * kPointerSize; | 206 (num_parameters - 1 - i) * kPointerSize; |
| 207 // Load parameter from stack. | 207 // Load parameter from stack. |
| 208 __ ldr(r0, MemOperand(fp, parameter_offset)); | 208 __ ldr(r0, MemOperand(fp, parameter_offset)); |
| 209 // Store it in the context. | 209 // Store it in the context. |
| 210 MemOperand target = ContextMemOperand(cp, var->index()); | 210 MemOperand target = ContextOperand(cp, var->index()); |
| 211 __ str(r0, target); | 211 __ str(r0, target); |
| 212 | 212 |
| 213 // Update the write barrier. | 213 // Update the write barrier. |
| 214 if (need_write_barrier) { | 214 if (need_write_barrier) { |
| 215 __ RecordWriteContextSlot( | 215 __ RecordWriteContextSlot( |
| 216 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs); | 216 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs); |
| 217 } else if (FLAG_debug_code) { | 217 } else if (FLAG_debug_code) { |
| 218 Label done; | 218 Label done; |
| 219 __ JumpIfInNewSpace(cp, r0, &done); | 219 __ JumpIfInNewSpace(cp, r0, &done); |
| 220 __ Abort(kExpectedNewSpaceObject); | 220 __ Abort(kExpectedNewSpaceObject); |
| (...skipping 482 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 703 } | 703 } |
| 704 return MemOperand(fp, offset); | 704 return MemOperand(fp, offset); |
| 705 } | 705 } |
| 706 | 706 |
| 707 | 707 |
| 708 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { | 708 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { |
| 709 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); | 709 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); |
| 710 if (var->IsContextSlot()) { | 710 if (var->IsContextSlot()) { |
| 711 int context_chain_length = scope()->ContextChainLength(var->scope()); | 711 int context_chain_length = scope()->ContextChainLength(var->scope()); |
| 712 __ LoadContext(scratch, context_chain_length); | 712 __ LoadContext(scratch, context_chain_length); |
| 713 return ContextMemOperand(scratch, var->index()); | 713 return ContextOperand(scratch, var->index()); |
| 714 } else { | 714 } else { |
| 715 return StackOperand(var); | 715 return StackOperand(var); |
| 716 } | 716 } |
| 717 } | 717 } |
| 718 | 718 |
| 719 | 719 |
| 720 void FullCodeGenerator::GetVar(Register dest, Variable* var) { | 720 void FullCodeGenerator::GetVar(Register dest, Variable* var) { |
| 721 // Use destination as scratch. | 721 // Use destination as scratch. |
| 722 MemOperand location = VarOperand(var, dest); | 722 MemOperand location = VarOperand(var, dest); |
| 723 __ ldr(dest, location); | 723 __ ldr(dest, location); |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 809 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); | 809 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); |
| 810 __ str(r0, StackOperand(variable)); | 810 __ str(r0, StackOperand(variable)); |
| 811 } | 811 } |
| 812 break; | 812 break; |
| 813 | 813 |
| 814 case VariableLocation::CONTEXT: | 814 case VariableLocation::CONTEXT: |
| 815 if (hole_init) { | 815 if (hole_init) { |
| 816 Comment cmnt(masm_, "[ VariableDeclaration"); | 816 Comment cmnt(masm_, "[ VariableDeclaration"); |
| 817 EmitDebugCheckDeclarationContext(variable); | 817 EmitDebugCheckDeclarationContext(variable); |
| 818 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); | 818 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); |
| 819 __ str(r0, ContextMemOperand(cp, variable->index())); | 819 __ str(r0, ContextOperand(cp, variable->index())); |
| 820 // No write barrier since the_hole_value is in old space. | 820 // No write barrier since the_hole_value is in old space. |
| 821 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 821 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
| 822 } | 822 } |
| 823 break; | 823 break; |
| 824 | 824 |
| 825 case VariableLocation::LOOKUP: { | 825 case VariableLocation::LOOKUP: { |
| 826 Comment cmnt(masm_, "[ VariableDeclaration"); | 826 Comment cmnt(masm_, "[ VariableDeclaration"); |
| 827 __ mov(r2, Operand(variable->name())); | 827 __ mov(r2, Operand(variable->name())); |
| 828 // Declaration nodes are always introduced in one of four modes. | 828 // Declaration nodes are always introduced in one of four modes. |
| 829 DCHECK(IsDeclaredVariableMode(mode)); | 829 DCHECK(IsDeclaredVariableMode(mode)); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 866 Comment cmnt(masm_, "[ FunctionDeclaration"); | 866 Comment cmnt(masm_, "[ FunctionDeclaration"); |
| 867 VisitForAccumulatorValue(declaration->fun()); | 867 VisitForAccumulatorValue(declaration->fun()); |
| 868 __ str(result_register(), StackOperand(variable)); | 868 __ str(result_register(), StackOperand(variable)); |
| 869 break; | 869 break; |
| 870 } | 870 } |
| 871 | 871 |
| 872 case VariableLocation::CONTEXT: { | 872 case VariableLocation::CONTEXT: { |
| 873 Comment cmnt(masm_, "[ FunctionDeclaration"); | 873 Comment cmnt(masm_, "[ FunctionDeclaration"); |
| 874 EmitDebugCheckDeclarationContext(variable); | 874 EmitDebugCheckDeclarationContext(variable); |
| 875 VisitForAccumulatorValue(declaration->fun()); | 875 VisitForAccumulatorValue(declaration->fun()); |
| 876 __ str(result_register(), ContextMemOperand(cp, variable->index())); | 876 __ str(result_register(), ContextOperand(cp, variable->index())); |
| 877 int offset = Context::SlotOffset(variable->index()); | 877 int offset = Context::SlotOffset(variable->index()); |
| 878 // We know that we have written a function, which is not a smi. | 878 // We know that we have written a function, which is not a smi. |
| 879 __ RecordWriteContextSlot(cp, | 879 __ RecordWriteContextSlot(cp, |
| 880 offset, | 880 offset, |
| 881 result_register(), | 881 result_register(), |
| 882 r2, | 882 r2, |
| 883 kLRHasBeenSaved, | 883 kLRHasBeenSaved, |
| 884 kDontSaveFPRegs, | 884 kDontSaveFPRegs, |
| 885 EMIT_REMEMBERED_SET, | 885 EMIT_REMEMBERED_SET, |
| 886 OMIT_SMI_CHECK); | 886 OMIT_SMI_CHECK); |
| (...skipping 376 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1263 Label* slow) { | 1263 Label* slow) { |
| 1264 Register current = cp; | 1264 Register current = cp; |
| 1265 Register next = r1; | 1265 Register next = r1; |
| 1266 Register temp = r2; | 1266 Register temp = r2; |
| 1267 | 1267 |
| 1268 Scope* s = scope(); | 1268 Scope* s = scope(); |
| 1269 while (s != NULL) { | 1269 while (s != NULL) { |
| 1270 if (s->num_heap_slots() > 0) { | 1270 if (s->num_heap_slots() > 0) { |
| 1271 if (s->calls_sloppy_eval()) { | 1271 if (s->calls_sloppy_eval()) { |
| 1272 // Check that extension is NULL. | 1272 // Check that extension is NULL. |
| 1273 __ ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX)); | 1273 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); |
| 1274 __ tst(temp, temp); | 1274 __ tst(temp, temp); |
| 1275 __ b(ne, slow); | 1275 __ b(ne, slow); |
| 1276 } | 1276 } |
| 1277 // Load next context in chain. | 1277 // Load next context in chain. |
| 1278 __ ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX)); | 1278 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); |
| 1279 // Walk the rest of the chain without clobbering cp. | 1279 // Walk the rest of the chain without clobbering cp. |
| 1280 current = next; | 1280 current = next; |
| 1281 } | 1281 } |
| 1282 // If no outer scope calls eval, we do not need to check more | 1282 // If no outer scope calls eval, we do not need to check more |
| 1283 // context extensions. | 1283 // context extensions. |
| 1284 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; | 1284 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; |
| 1285 s = s->outer_scope(); | 1285 s = s->outer_scope(); |
| 1286 } | 1286 } |
| 1287 | 1287 |
| 1288 if (s->is_eval_scope()) { | 1288 if (s->is_eval_scope()) { |
| 1289 Label loop, fast; | 1289 Label loop, fast; |
| 1290 if (!current.is(next)) { | 1290 if (!current.is(next)) { |
| 1291 __ Move(next, current); | 1291 __ Move(next, current); |
| 1292 } | 1292 } |
| 1293 __ bind(&loop); | 1293 __ bind(&loop); |
| 1294 // Terminate at native context. | 1294 // Terminate at native context. |
| 1295 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); | 1295 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); |
| 1296 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); | 1296 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); |
| 1297 __ cmp(temp, ip); | 1297 __ cmp(temp, ip); |
| 1298 __ b(eq, &fast); | 1298 __ b(eq, &fast); |
| 1299 // Check that extension is NULL. | 1299 // Check that extension is NULL. |
| 1300 __ ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX)); | 1300 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); |
| 1301 __ tst(temp, temp); | 1301 __ tst(temp, temp); |
| 1302 __ b(ne, slow); | 1302 __ b(ne, slow); |
| 1303 // Load next context in chain. | 1303 // Load next context in chain. |
| 1304 __ ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX)); | 1304 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); |
| 1305 __ b(&loop); | 1305 __ b(&loop); |
| 1306 __ bind(&fast); | 1306 __ bind(&fast); |
| 1307 } | 1307 } |
| 1308 | 1308 |
| 1309 // All extension objects were empty and it is safe to use a normal global | 1309 // All extension objects were empty and it is safe to use a normal global |
| 1310 // load machinery. | 1310 // load machinery. |
| 1311 EmitGlobalVariableLoad(proxy, typeof_mode); | 1311 EmitGlobalVariableLoad(proxy, typeof_mode); |
| 1312 } | 1312 } |
| 1313 | 1313 |
| 1314 | 1314 |
| 1315 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, | 1315 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, |
| 1316 Label* slow) { | 1316 Label* slow) { |
| 1317 DCHECK(var->IsContextSlot()); | 1317 DCHECK(var->IsContextSlot()); |
| 1318 Register context = cp; | 1318 Register context = cp; |
| 1319 Register next = r3; | 1319 Register next = r3; |
| 1320 Register temp = r4; | 1320 Register temp = r4; |
| 1321 | 1321 |
| 1322 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { | 1322 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { |
| 1323 if (s->num_heap_slots() > 0) { | 1323 if (s->num_heap_slots() > 0) { |
| 1324 if (s->calls_sloppy_eval()) { | 1324 if (s->calls_sloppy_eval()) { |
| 1325 // Check that extension is NULL. | 1325 // Check that extension is NULL. |
| 1326 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); | 1326 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
| 1327 __ tst(temp, temp); | 1327 __ tst(temp, temp); |
| 1328 __ b(ne, slow); | 1328 __ b(ne, slow); |
| 1329 } | 1329 } |
| 1330 __ ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX)); | 1330 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); |
| 1331 // Walk the rest of the chain without clobbering cp. | 1331 // Walk the rest of the chain without clobbering cp. |
| 1332 context = next; | 1332 context = next; |
| 1333 } | 1333 } |
| 1334 } | 1334 } |
| 1335 // Check that last extension is NULL. | 1335 // Check that last extension is NULL. |
| 1336 __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); | 1336 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
| 1337 __ tst(temp, temp); | 1337 __ tst(temp, temp); |
| 1338 __ b(ne, slow); | 1338 __ b(ne, slow); |
| 1339 | 1339 |
| 1340 // This function is used only for loads, not stores, so it's safe to | 1340 // This function is used only for loads, not stores, so it's safe to |
| 1341 // return an cp-based operand (the write barrier cannot be allowed to | 1341 // return an cp-based operand (the write barrier cannot be allowed to |
| 1342 // destroy the cp register). | 1342 // destroy the cp register). |
| 1343 return ContextMemOperand(context, var->index()); | 1343 return ContextOperand(context, var->index()); |
| 1344 } | 1344 } |
| 1345 | 1345 |
| 1346 | 1346 |
| 1347 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, | 1347 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, |
| 1348 TypeofMode typeof_mode, | 1348 TypeofMode typeof_mode, |
| 1349 Label* slow, Label* done) { | 1349 Label* slow, Label* done) { |
| 1350 // Generate fast-case code for variables that might be shadowed by | 1350 // Generate fast-case code for variables that might be shadowed by |
| 1351 // eval-introduced variables. Eval is used a lot without | 1351 // eval-introduced variables. Eval is used a lot without |
| 1352 // introducing variables. In those cases, we do not want to | 1352 // introducing variables. In those cases, we do not want to |
| 1353 // perform a runtime call for all variables in the scope | 1353 // perform a runtime call for all variables in the scope |
| (...skipping 20 matching lines...) Expand all Loading... |
| 1374 __ jmp(done); | 1374 __ jmp(done); |
| 1375 } | 1375 } |
| 1376 } | 1376 } |
| 1377 | 1377 |
| 1378 | 1378 |
| 1379 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, | 1379 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, |
| 1380 TypeofMode typeof_mode) { | 1380 TypeofMode typeof_mode) { |
| 1381 Variable* var = proxy->var(); | 1381 Variable* var = proxy->var(); |
| 1382 DCHECK(var->IsUnallocatedOrGlobalSlot() || | 1382 DCHECK(var->IsUnallocatedOrGlobalSlot() || |
| 1383 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); | 1383 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); |
| 1384 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister()); | 1384 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); |
| 1385 __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); | 1385 __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); |
| 1386 __ mov(LoadDescriptor::SlotRegister(), | 1386 __ mov(LoadDescriptor::SlotRegister(), |
| 1387 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); | 1387 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); |
| 1388 CallLoadIC(typeof_mode); | 1388 CallLoadIC(typeof_mode); |
| 1389 } | 1389 } |
| 1390 | 1390 |
| 1391 | 1391 |
| 1392 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, | 1392 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, |
| 1393 TypeofMode typeof_mode) { | 1393 TypeofMode typeof_mode) { |
| 1394 // Record position before possible IC call. | 1394 // Record position before possible IC call. |
| (...skipping 792 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2187 Label allocate, done_allocate; | 2187 Label allocate, done_allocate; |
| 2188 | 2188 |
| 2189 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &allocate, TAG_OBJECT); | 2189 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &allocate, TAG_OBJECT); |
| 2190 __ b(&done_allocate); | 2190 __ b(&done_allocate); |
| 2191 | 2191 |
| 2192 __ bind(&allocate); | 2192 __ bind(&allocate); |
| 2193 __ Push(Smi::FromInt(JSIteratorResult::kSize)); | 2193 __ Push(Smi::FromInt(JSIteratorResult::kSize)); |
| 2194 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | 2194 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
| 2195 | 2195 |
| 2196 __ bind(&done_allocate); | 2196 __ bind(&done_allocate); |
| 2197 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1); | 2197 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
| 2198 __ ldr(r1, FieldMemOperand(r1, JSGlobalObject::kNativeContextOffset)); |
| 2199 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX)); |
| 2198 __ pop(r2); | 2200 __ pop(r2); |
| 2199 __ LoadRoot(r3, | 2201 __ LoadRoot(r3, |
| 2200 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); | 2202 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); |
| 2201 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex); | 2203 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex); |
| 2202 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | 2204 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 2203 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); | 2205 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); |
| 2204 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); | 2206 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); |
| 2205 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset)); | 2207 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset)); |
| 2206 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset)); | 2208 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset)); |
| 2207 } | 2209 } |
| (...skipping 307 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2515 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); | 2517 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); |
| 2516 } | 2518 } |
| 2517 } | 2519 } |
| 2518 | 2520 |
| 2519 | 2521 |
| 2520 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, | 2522 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, |
| 2521 FeedbackVectorSlot slot) { | 2523 FeedbackVectorSlot slot) { |
| 2522 if (var->IsUnallocated()) { | 2524 if (var->IsUnallocated()) { |
| 2523 // Global var, const, or let. | 2525 // Global var, const, or let. |
| 2524 __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); | 2526 __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); |
| 2525 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister()); | 2527 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand()); |
| 2526 EmitLoadStoreICSlot(slot); | 2528 EmitLoadStoreICSlot(slot); |
| 2527 CallStoreIC(); | 2529 CallStoreIC(); |
| 2528 | 2530 |
| 2529 } else if (var->mode() == LET && op != Token::INIT) { | 2531 } else if (var->mode() == LET && op != Token::INIT) { |
| 2530 // Non-initializing assignment to let variable needs a write barrier. | 2532 // Non-initializing assignment to let variable needs a write barrier. |
| 2531 DCHECK(!var->IsLookupSlot()); | 2533 DCHECK(!var->IsLookupSlot()); |
| 2532 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | 2534 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
| 2533 Label assign; | 2535 Label assign; |
| 2534 MemOperand location = VarOperand(var, r1); | 2536 MemOperand location = VarOperand(var, r1); |
| 2535 __ ldr(r3, location); | 2537 __ ldr(r3, location); |
| (...skipping 1573 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4109 | 4111 |
| 4110 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { | 4112 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { |
| 4111 ZoneList<Expression*>* args = expr->arguments(); | 4113 ZoneList<Expression*>* args = expr->arguments(); |
| 4112 DCHECK_EQ(2, args->length()); | 4114 DCHECK_EQ(2, args->length()); |
| 4113 VisitForStackValue(args->at(0)); | 4115 VisitForStackValue(args->at(0)); |
| 4114 VisitForStackValue(args->at(1)); | 4116 VisitForStackValue(args->at(1)); |
| 4115 | 4117 |
| 4116 Label runtime, done; | 4118 Label runtime, done; |
| 4117 | 4119 |
| 4118 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &runtime, TAG_OBJECT); | 4120 __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &runtime, TAG_OBJECT); |
| 4119 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1); | 4121 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
| 4122 __ ldr(r1, FieldMemOperand(r1, JSGlobalObject::kNativeContextOffset)); |
| 4123 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX)); |
| 4120 __ pop(r3); | 4124 __ pop(r3); |
| 4121 __ pop(r2); | 4125 __ pop(r2); |
| 4122 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex); | 4126 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex); |
| 4123 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | 4127 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 4124 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); | 4128 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); |
| 4125 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); | 4129 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); |
| 4126 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset)); | 4130 __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset)); |
| 4127 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset)); | 4131 __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset)); |
| 4128 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); | 4132 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); |
| 4129 __ b(&done); | 4133 __ b(&done); |
| 4130 | 4134 |
| 4131 __ bind(&runtime); | 4135 __ bind(&runtime); |
| 4132 __ CallRuntime(Runtime::kCreateIterResultObject, 2); | 4136 __ CallRuntime(Runtime::kCreateIterResultObject, 2); |
| 4133 | 4137 |
| 4134 __ bind(&done); | 4138 __ bind(&done); |
| 4135 context()->Plug(r0); | 4139 context()->Plug(r0); |
| 4136 } | 4140 } |
| 4137 | 4141 |
| 4138 | 4142 |
| 4139 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { | 4143 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { |
| 4140 // Push undefined as the receiver. | 4144 // Push undefined as the receiver. |
| 4141 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 4145 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
| 4142 __ push(r0); | 4146 __ push(r0); |
| 4143 | 4147 |
| 4144 __ LoadNativeContextSlot(expr->context_index(), r0); | 4148 __ ldr(r0, GlobalObjectOperand()); |
| 4149 __ ldr(r0, FieldMemOperand(r0, JSGlobalObject::kNativeContextOffset)); |
| 4150 __ ldr(r0, ContextOperand(r0, expr->context_index())); |
| 4145 } | 4151 } |
| 4146 | 4152 |
| 4147 | 4153 |
| 4148 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { | 4154 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { |
| 4149 ZoneList<Expression*>* args = expr->arguments(); | 4155 ZoneList<Expression*>* args = expr->arguments(); |
| 4150 int arg_count = args->length(); | 4156 int arg_count = args->length(); |
| 4151 | 4157 |
| 4152 SetCallPosition(expr, arg_count); | 4158 SetCallPosition(expr, arg_count); |
| 4153 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 4159 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
| 4154 __ mov(r0, Operand(arg_count)); | 4160 __ mov(r0, Operand(arg_count)); |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4225 : Runtime::kDeleteProperty_Sloppy, | 4231 : Runtime::kDeleteProperty_Sloppy, |
| 4226 2); | 4232 2); |
| 4227 context()->Plug(r0); | 4233 context()->Plug(r0); |
| 4228 } else if (proxy != NULL) { | 4234 } else if (proxy != NULL) { |
| 4229 Variable* var = proxy->var(); | 4235 Variable* var = proxy->var(); |
| 4230 // Delete of an unqualified identifier is disallowed in strict mode but | 4236 // Delete of an unqualified identifier is disallowed in strict mode but |
| 4231 // "delete this" is allowed. | 4237 // "delete this" is allowed. |
| 4232 bool is_this = var->HasThisName(isolate()); | 4238 bool is_this = var->HasThisName(isolate()); |
| 4233 DCHECK(is_sloppy(language_mode()) || is_this); | 4239 DCHECK(is_sloppy(language_mode()) || is_this); |
| 4234 if (var->IsUnallocatedOrGlobalSlot()) { | 4240 if (var->IsUnallocatedOrGlobalSlot()) { |
| 4235 __ LoadGlobalObject(r2); | 4241 __ ldr(r2, GlobalObjectOperand()); |
| 4236 __ mov(r1, Operand(var->name())); | 4242 __ mov(r1, Operand(var->name())); |
| 4237 __ Push(r2, r1); | 4243 __ Push(r2, r1); |
| 4238 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2); | 4244 __ CallRuntime(Runtime::kDeleteProperty_Sloppy, 2); |
| 4239 context()->Plug(r0); | 4245 context()->Plug(r0); |
| 4240 } else if (var->IsStackAllocated() || var->IsContextSlot()) { | 4246 } else if (var->IsStackAllocated() || var->IsContextSlot()) { |
| 4241 // Result of deleting non-global, non-dynamic variables is false. | 4247 // Result of deleting non-global, non-dynamic variables is false. |
| 4242 // The subexpression does not have side effects. | 4248 // The subexpression does not have side effects. |
| 4243 context()->Plug(is_this); | 4249 context()->Plug(is_this); |
| 4244 } else { | 4250 } else { |
| 4245 // Non-global variable. Call the runtime to try to delete from the | 4251 // Non-global variable. Call the runtime to try to delete from the |
| (...skipping 526 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4772 } | 4778 } |
| 4773 | 4779 |
| 4774 | 4780 |
| 4775 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | 4781 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
| 4776 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); | 4782 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); |
| 4777 __ str(value, MemOperand(fp, frame_offset)); | 4783 __ str(value, MemOperand(fp, frame_offset)); |
| 4778 } | 4784 } |
| 4779 | 4785 |
| 4780 | 4786 |
| 4781 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | 4787 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { |
| 4782 __ ldr(dst, ContextMemOperand(cp, context_index)); | 4788 __ ldr(dst, ContextOperand(cp, context_index)); |
| 4783 } | 4789 } |
| 4784 | 4790 |
| 4785 | 4791 |
| 4786 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { | 4792 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { |
| 4787 Scope* closure_scope = scope()->ClosureScope(); | 4793 Scope* closure_scope = scope()->ClosureScope(); |
| 4788 if (closure_scope->is_script_scope() || | 4794 if (closure_scope->is_script_scope() || |
| 4789 closure_scope->is_module_scope()) { | 4795 closure_scope->is_module_scope()) { |
| 4790 // Contexts nested in the native context have a canonical empty function | 4796 // Contexts nested in the native context have a canonical empty function |
| 4791 // as their closure, not the anonymous closure containing the global | 4797 // as their closure, not the anonymous closure containing the global |
| 4792 // code. | 4798 // code. |
| 4793 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip); | 4799 __ ldr(ip, GlobalObjectOperand()); |
| 4800 __ ldr(ip, FieldMemOperand(ip, JSGlobalObject::kNativeContextOffset)); |
| 4801 __ ldr(ip, ContextOperand(ip, Context::CLOSURE_INDEX)); |
| 4794 } else if (closure_scope->is_eval_scope()) { | 4802 } else if (closure_scope->is_eval_scope()) { |
| 4795 // Contexts created by a call to eval have the same closure as the | 4803 // Contexts created by a call to eval have the same closure as the |
| 4796 // context calling eval, not the anonymous closure containing the eval | 4804 // context calling eval, not the anonymous closure containing the eval |
| 4797 // code. Fetch it from the context. | 4805 // code. Fetch it from the context. |
| 4798 __ ldr(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX)); | 4806 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX)); |
| 4799 } else { | 4807 } else { |
| 4800 DCHECK(closure_scope->is_function_scope()); | 4808 DCHECK(closure_scope->is_function_scope()); |
| 4801 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 4809 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 4802 } | 4810 } |
| 4803 __ push(ip); | 4811 __ push(ip); |
| 4804 } | 4812 } |
| 4805 | 4813 |
| 4806 | 4814 |
| 4807 // ---------------------------------------------------------------------------- | 4815 // ---------------------------------------------------------------------------- |
| 4808 // Non-local control flow support. | 4816 // Non-local control flow support. |
| (...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5004 DCHECK(interrupt_address == | 5012 DCHECK(interrupt_address == |
| 5005 isolate->builtins()->OsrAfterStackCheck()->entry()); | 5013 isolate->builtins()->OsrAfterStackCheck()->entry()); |
| 5006 return OSR_AFTER_STACK_CHECK; | 5014 return OSR_AFTER_STACK_CHECK; |
| 5007 } | 5015 } |
| 5008 | 5016 |
| 5009 | 5017 |
| 5010 } // namespace internal | 5018 } // namespace internal |
| 5011 } // namespace v8 | 5019 } // namespace v8 |
| 5012 | 5020 |
| 5013 #endif // V8_TARGET_ARCH_ARM | 5021 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |