| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 1237 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1248 __ sub(r0, r0, Operand(1)); | 1248 __ sub(r0, r0, Operand(1)); |
| 1249 __ pop(); | 1249 __ pop(); |
| 1250 } | 1250 } |
| 1251 | 1251 |
| 1252 // 4. Call the callable. | 1252 // 4. Call the callable. |
| 1253 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1253 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 1254 } | 1254 } |
| 1255 | 1255 |
| 1256 | 1256 |
| 1257 static void Generate_PushAppliedArguments(MacroAssembler* masm, | 1257 static void Generate_PushAppliedArguments(MacroAssembler* masm, |
| 1258 const int vectorOffset, |
| 1258 const int argumentsOffset, | 1259 const int argumentsOffset, |
| 1259 const int indexOffset, | 1260 const int indexOffset, |
| 1260 const int limitOffset) { | 1261 const int limitOffset) { |
| 1261 Label entry, loop; | 1262 Label entry, loop; |
| 1262 Register receiver = LoadDescriptor::ReceiverRegister(); | 1263 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 1263 Register key = LoadDescriptor::NameRegister(); | 1264 Register key = LoadDescriptor::NameRegister(); |
| 1264 Register slot = LoadDescriptor::SlotRegister(); | 1265 Register slot = LoadDescriptor::SlotRegister(); |
| 1265 Register vector = LoadWithVectorDescriptor::VectorRegister(); | 1266 Register vector = LoadWithVectorDescriptor::VectorRegister(); |
| 1266 | 1267 |
| 1267 __ ldr(key, MemOperand(fp, indexOffset)); | 1268 __ ldr(key, MemOperand(fp, indexOffset)); |
| 1268 __ b(&entry); | 1269 __ b(&entry); |
| 1269 | 1270 |
| 1270 // Load the current argument from the arguments array. | 1271 // Load the current argument from the arguments array. |
| 1271 __ bind(&loop); | 1272 __ bind(&loop); |
| 1272 __ ldr(receiver, MemOperand(fp, argumentsOffset)); | 1273 __ ldr(receiver, MemOperand(fp, argumentsOffset)); |
| 1273 | 1274 |
| 1274 // Use inline caching to speed up access to arguments. | 1275 // Use inline caching to speed up access to arguments. |
| 1275 Code::Kind kinds[] = {Code::KEYED_LOAD_IC}; | 1276 int slot_index = TypeFeedbackVector::PushAppliedArgumentsIndex(); |
| 1276 FeedbackVectorSpec spec(0, 1, kinds); | 1277 __ mov(slot, Operand(Smi::FromInt(slot_index))); |
| 1277 Handle<TypeFeedbackVector> feedback_vector = | 1278 __ ldr(vector, MemOperand(fp, vectorOffset)); |
| 1278 masm->isolate()->factory()->NewTypeFeedbackVector(&spec); | |
| 1279 int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0)); | |
| 1280 __ mov(slot, Operand(Smi::FromInt(index))); | |
| 1281 __ Move(vector, feedback_vector); | |
| 1282 Handle<Code> ic = | 1279 Handle<Code> ic = |
| 1283 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode(); | 1280 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode(); |
| 1284 __ Call(ic, RelocInfo::CODE_TARGET); | 1281 __ Call(ic, RelocInfo::CODE_TARGET); |
| 1285 | 1282 |
| 1286 // Push the nth argument. | 1283 // Push the nth argument. |
| 1287 __ push(r0); | 1284 __ push(r0); |
| 1288 | 1285 |
| 1289 __ ldr(key, MemOperand(fp, indexOffset)); | 1286 __ ldr(key, MemOperand(fp, indexOffset)); |
| 1290 __ add(key, key, Operand(1 << kSmiTagSize)); | 1287 __ add(key, key, Operand(1 << kSmiTagSize)); |
| 1291 __ str(key, MemOperand(fp, indexOffset)); | 1288 __ str(key, MemOperand(fp, indexOffset)); |
| (...skipping 14 matching lines...) Expand all Loading... |
| 1306 // Used by FunctionApply and ReflectApply | 1303 // Used by FunctionApply and ReflectApply |
| 1307 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { | 1304 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { |
| 1308 const int kFormalParameters = targetIsArgument ? 3 : 2; | 1305 const int kFormalParameters = targetIsArgument ? 3 : 2; |
| 1309 const int kStackSize = kFormalParameters + 1; | 1306 const int kStackSize = kFormalParameters + 1; |
| 1310 | 1307 |
| 1311 { | 1308 { |
| 1312 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); | 1309 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
| 1313 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; | 1310 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; |
| 1314 const int kReceiverOffset = kArgumentsOffset + kPointerSize; | 1311 const int kReceiverOffset = kArgumentsOffset + kPointerSize; |
| 1315 const int kFunctionOffset = kReceiverOffset + kPointerSize; | 1312 const int kFunctionOffset = kReceiverOffset + kPointerSize; |
| 1313 const int kVectorOffset = |
| 1314 InternalFrameConstants::kCodeOffset - 1 * kPointerSize; |
| 1315 |
| 1316 // Push the vector. |
| 1317 __ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 1318 __ ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kFeedbackVectorOffset)); |
| 1319 __ Push(r1); |
| 1316 | 1320 |
| 1317 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function | 1321 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function |
| 1318 __ ldr(r1, MemOperand(fp, kArgumentsOffset)); // get the args array | 1322 __ ldr(r1, MemOperand(fp, kArgumentsOffset)); // get the args array |
| 1319 __ Push(r0, r1); | 1323 __ Push(r0, r1); |
| 1320 if (targetIsArgument) { | 1324 if (targetIsArgument) { |
| 1321 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, | 1325 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, |
| 1322 CALL_FUNCTION); | 1326 CALL_FUNCTION); |
| 1323 } else { | 1327 } else { |
| 1324 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); | 1328 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); |
| 1325 } | 1329 } |
| 1326 | 1330 |
| 1327 Generate_CheckStackOverflow(masm, kFunctionOffset, r0, kArgcIsSmiTagged); | 1331 Generate_CheckStackOverflow(masm, kFunctionOffset, r0, kArgcIsSmiTagged); |
| 1328 | 1332 |
| 1329 // Push current limit and index. | 1333 // Push current limit and index. |
| 1330 const int kIndexOffset = | 1334 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); |
| 1331 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); | 1335 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); |
| 1332 const int kLimitOffset = | |
| 1333 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); | |
| 1334 __ mov(r1, Operand::Zero()); | 1336 __ mov(r1, Operand::Zero()); |
| 1335 __ ldr(r2, MemOperand(fp, kReceiverOffset)); | 1337 __ ldr(r2, MemOperand(fp, kReceiverOffset)); |
| 1336 __ Push(r0, r1, r2); // limit, initial index and receiver. | 1338 __ Push(r0, r1, r2); // limit, initial index and receiver. |
| 1337 | 1339 |
| 1338 // Copy all arguments from the array to the stack. | 1340 // Copy all arguments from the array to the stack. |
| 1339 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, | 1341 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, |
| 1340 kLimitOffset); | 1342 kIndexOffset, kLimitOffset); |
| 1341 | 1343 |
| 1342 // Call the callable. | 1344 // Call the callable. |
| 1343 // TODO(bmeurer): This should be a tail call according to ES6. | 1345 // TODO(bmeurer): This should be a tail call according to ES6. |
| 1344 __ ldr(r1, MemOperand(fp, kFunctionOffset)); | 1346 __ ldr(r1, MemOperand(fp, kFunctionOffset)); |
| 1345 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1347 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 1346 | 1348 |
| 1347 // Tear down the internal frame and remove function, receiver and args. | 1349 // Tear down the internal frame and remove function, receiver and args. |
| 1348 } | 1350 } |
| 1349 __ add(sp, sp, Operand(kStackSize * kPointerSize)); | 1351 __ add(sp, sp, Operand(kStackSize * kPointerSize)); |
| 1350 __ Jump(lr); | 1352 __ Jump(lr); |
| 1351 } | 1353 } |
| 1352 | 1354 |
| 1353 | 1355 |
| 1354 static void Generate_ConstructHelper(MacroAssembler* masm) { | 1356 static void Generate_ConstructHelper(MacroAssembler* masm) { |
| 1355 const int kFormalParameters = 3; | 1357 const int kFormalParameters = 3; |
| 1356 const int kStackSize = kFormalParameters + 1; | 1358 const int kStackSize = kFormalParameters + 1; |
| 1357 | 1359 |
| 1358 { | 1360 { |
| 1359 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); | 1361 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
| 1360 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize; | 1362 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize; |
| 1361 const int kArgumentsOffset = kNewTargetOffset + kPointerSize; | 1363 const int kArgumentsOffset = kNewTargetOffset + kPointerSize; |
| 1362 const int kFunctionOffset = kArgumentsOffset + kPointerSize; | 1364 const int kFunctionOffset = kArgumentsOffset + kPointerSize; |
| 1365 static const int kVectorOffset = |
| 1366 InternalFrameConstants::kCodeOffset - 1 * kPointerSize; |
| 1367 |
| 1368 // Push the vector. |
| 1369 __ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 1370 __ ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kFeedbackVectorOffset)); |
| 1371 __ Push(r1); |
| 1363 | 1372 |
| 1364 // If newTarget is not supplied, set it to constructor | 1373 // If newTarget is not supplied, set it to constructor |
| 1365 Label validate_arguments; | 1374 Label validate_arguments; |
| 1366 __ ldr(r0, MemOperand(fp, kNewTargetOffset)); | 1375 __ ldr(r0, MemOperand(fp, kNewTargetOffset)); |
| 1367 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); | 1376 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); |
| 1368 __ b(ne, &validate_arguments); | 1377 __ b(ne, &validate_arguments); |
| 1369 __ ldr(r0, MemOperand(fp, kFunctionOffset)); | 1378 __ ldr(r0, MemOperand(fp, kFunctionOffset)); |
| 1370 __ str(r0, MemOperand(fp, kNewTargetOffset)); | 1379 __ str(r0, MemOperand(fp, kNewTargetOffset)); |
| 1371 | 1380 |
| 1372 // Validate arguments | 1381 // Validate arguments |
| 1373 __ bind(&validate_arguments); | 1382 __ bind(&validate_arguments); |
| 1374 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function | 1383 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function |
| 1375 __ push(r0); | 1384 __ push(r0); |
| 1376 __ ldr(r0, MemOperand(fp, kArgumentsOffset)); // get the args array | 1385 __ ldr(r0, MemOperand(fp, kArgumentsOffset)); // get the args array |
| 1377 __ push(r0); | 1386 __ push(r0); |
| 1378 __ ldr(r0, MemOperand(fp, kNewTargetOffset)); // get the new.target | 1387 __ ldr(r0, MemOperand(fp, kNewTargetOffset)); // get the new.target |
| 1379 __ push(r0); | 1388 __ push(r0); |
| 1380 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, | 1389 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, |
| 1381 CALL_FUNCTION); | 1390 CALL_FUNCTION); |
| 1382 | 1391 |
| 1383 Generate_CheckStackOverflow(masm, kFunctionOffset, r0, kArgcIsSmiTagged); | 1392 Generate_CheckStackOverflow(masm, kFunctionOffset, r0, kArgcIsSmiTagged); |
| 1384 | 1393 |
| 1385 // Push current limit and index. | 1394 // Push current limit and index. |
| 1386 const int kIndexOffset = | 1395 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); |
| 1387 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); | 1396 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); |
| 1388 const int kLimitOffset = | |
| 1389 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); | |
| 1390 __ push(r0); // limit | 1397 __ push(r0); // limit |
| 1391 __ mov(r1, Operand::Zero()); // initial index | 1398 __ mov(r1, Operand::Zero()); // initial index |
| 1392 __ push(r1); | 1399 __ push(r1); |
| 1393 // Push the constructor function as callee. | 1400 // Push the constructor function as callee. |
| 1394 __ ldr(r0, MemOperand(fp, kFunctionOffset)); | 1401 __ ldr(r0, MemOperand(fp, kFunctionOffset)); |
| 1395 __ push(r0); | 1402 __ push(r0); |
| 1396 | 1403 |
| 1397 // Copy all arguments from the array to the stack. | 1404 // Copy all arguments from the array to the stack. |
| 1398 Generate_PushAppliedArguments( | 1405 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, |
| 1399 masm, kArgumentsOffset, kIndexOffset, kLimitOffset); | 1406 kIndexOffset, kLimitOffset); |
| 1400 | 1407 |
| 1401 // Use undefined feedback vector | 1408 // Use undefined feedback vector |
| 1402 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 1409 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 1403 __ ldr(r1, MemOperand(fp, kFunctionOffset)); | 1410 __ ldr(r1, MemOperand(fp, kFunctionOffset)); |
| 1404 __ ldr(r4, MemOperand(fp, kNewTargetOffset)); | 1411 __ ldr(r4, MemOperand(fp, kNewTargetOffset)); |
| 1405 | 1412 |
| 1406 // Call the function. | 1413 // Call the function. |
| 1407 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL); | 1414 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL); |
| 1408 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); | 1415 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); |
| 1409 | 1416 |
| (...skipping 372 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1782 } | 1789 } |
| 1783 } | 1790 } |
| 1784 | 1791 |
| 1785 | 1792 |
| 1786 #undef __ | 1793 #undef __ |
| 1787 | 1794 |
| 1788 } // namespace internal | 1795 } // namespace internal |
| 1789 } // namespace v8 | 1796 } // namespace v8 |
| 1790 | 1797 |
| 1791 #endif // V8_TARGET_ARCH_ARM | 1798 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |