| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 1246 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1257 __ subi(r3, r3, Operand(1)); | 1257 __ subi(r3, r3, Operand(1)); |
| 1258 __ pop(); | 1258 __ pop(); |
| 1259 } | 1259 } |
| 1260 | 1260 |
| 1261 // 4. Call the callable. | 1261 // 4. Call the callable. |
| 1262 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1262 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 1263 } | 1263 } |
| 1264 | 1264 |
| 1265 | 1265 |
| 1266 static void Generate_PushAppliedArguments(MacroAssembler* masm, | 1266 static void Generate_PushAppliedArguments(MacroAssembler* masm, |
| 1267 const int vectorOffset, |
| 1267 const int argumentsOffset, | 1268 const int argumentsOffset, |
| 1268 const int indexOffset, | 1269 const int indexOffset, |
| 1269 const int limitOffset) { | 1270 const int limitOffset) { |
| 1270 Register receiver = LoadDescriptor::ReceiverRegister(); | 1271 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 1271 Register key = LoadDescriptor::NameRegister(); | 1272 Register key = LoadDescriptor::NameRegister(); |
| 1272 Register slot = LoadDescriptor::SlotRegister(); | 1273 Register slot = LoadDescriptor::SlotRegister(); |
| 1273 Register vector = LoadWithVectorDescriptor::VectorRegister(); | 1274 Register vector = LoadWithVectorDescriptor::VectorRegister(); |
| 1274 | 1275 |
| 1275 // Copy all arguments from the array to the stack. | 1276 // Copy all arguments from the array to the stack. |
| 1276 Label entry, loop; | 1277 Label entry, loop; |
| 1277 __ LoadP(key, MemOperand(fp, indexOffset)); | 1278 __ LoadP(key, MemOperand(fp, indexOffset)); |
| 1278 __ b(&entry); | 1279 __ b(&entry); |
| 1279 __ bind(&loop); | 1280 __ bind(&loop); |
| 1280 __ LoadP(receiver, MemOperand(fp, argumentsOffset)); | 1281 __ LoadP(receiver, MemOperand(fp, argumentsOffset)); |
| 1281 | 1282 |
| 1282 // Use inline caching to speed up access to arguments. | 1283 // Use inline caching to speed up access to arguments. |
| 1283 Code::Kind kinds[] = {Code::KEYED_LOAD_IC}; | 1284 int slot_index = TypeFeedbackVector::PushAppliedArgumentsIndex(); |
| 1284 FeedbackVectorSpec spec(0, 1, kinds); | 1285 __ LoadSmiLiteral(slot, Smi::FromInt(slot_index)); |
| 1285 Handle<TypeFeedbackVector> feedback_vector = | 1286 __ LoadP(vector, MemOperand(fp, vectorOffset)); |
| 1286 masm->isolate()->factory()->NewTypeFeedbackVector(&spec); | |
| 1287 int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0)); | |
| 1288 __ LoadSmiLiteral(slot, Smi::FromInt(index)); | |
| 1289 __ Move(vector, feedback_vector); | |
| 1290 Handle<Code> ic = | 1287 Handle<Code> ic = |
| 1291 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode(); | 1288 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode(); |
| 1292 __ Call(ic, RelocInfo::CODE_TARGET); | 1289 __ Call(ic, RelocInfo::CODE_TARGET); |
| 1293 | 1290 |
| 1294 // Push the nth argument. | 1291 // Push the nth argument. |
| 1295 __ push(r3); | 1292 __ push(r3); |
| 1296 | 1293 |
| 1297 // Update the index on the stack and in register key. | 1294 // Update the index on the stack and in register key. |
| 1298 __ LoadP(key, MemOperand(fp, indexOffset)); | 1295 __ LoadP(key, MemOperand(fp, indexOffset)); |
| 1299 __ AddSmiLiteral(key, key, Smi::FromInt(1), r0); | 1296 __ AddSmiLiteral(key, key, Smi::FromInt(1), r0); |
| (...skipping 14 matching lines...) Expand all Loading... |
| 1314 // Used by FunctionApply and ReflectApply | 1311 // Used by FunctionApply and ReflectApply |
| 1315 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { | 1312 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { |
| 1316 const int kFormalParameters = targetIsArgument ? 3 : 2; | 1313 const int kFormalParameters = targetIsArgument ? 3 : 2; |
| 1317 const int kStackSize = kFormalParameters + 1; | 1314 const int kStackSize = kFormalParameters + 1; |
| 1318 | 1315 |
| 1319 { | 1316 { |
| 1320 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); | 1317 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
| 1321 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; | 1318 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; |
| 1322 const int kReceiverOffset = kArgumentsOffset + kPointerSize; | 1319 const int kReceiverOffset = kArgumentsOffset + kPointerSize; |
| 1323 const int kFunctionOffset = kReceiverOffset + kPointerSize; | 1320 const int kFunctionOffset = kReceiverOffset + kPointerSize; |
| 1321 const int kVectorOffset = |
| 1322 InternalFrameConstants::kCodeOffset - 1 * kPointerSize; |
| 1323 |
| 1324 // Push the vector. |
| 1325 __ LoadP(r4, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
| 1326 __ LoadP(r4, |
| 1327 FieldMemOperand(r4, SharedFunctionInfo::kFeedbackVectorOffset)); |
| 1328 __ push(r4); |
| 1324 | 1329 |
| 1325 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function | 1330 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function |
| 1326 __ LoadP(r4, MemOperand(fp, kArgumentsOffset)); // get the args array | 1331 __ LoadP(r4, MemOperand(fp, kArgumentsOffset)); // get the args array |
| 1327 __ Push(r3, r4); | 1332 __ Push(r3, r4); |
| 1328 if (targetIsArgument) { | 1333 if (targetIsArgument) { |
| 1329 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, | 1334 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, |
| 1330 CALL_FUNCTION); | 1335 CALL_FUNCTION); |
| 1331 } else { | 1336 } else { |
| 1332 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); | 1337 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); |
| 1333 } | 1338 } |
| 1334 | 1339 |
| 1335 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsSmiTagged); | 1340 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsSmiTagged); |
| 1336 | 1341 |
| 1337 // Push current limit and index. | 1342 // Push current limit and index. |
| 1338 const int kIndexOffset = | 1343 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); |
| 1339 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); | 1344 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); |
| 1340 const int kLimitOffset = | |
| 1341 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); | |
| 1342 __ li(r4, Operand::Zero()); | 1345 __ li(r4, Operand::Zero()); |
| 1343 __ LoadP(r5, MemOperand(fp, kReceiverOffset)); | 1346 __ LoadP(r5, MemOperand(fp, kReceiverOffset)); |
| 1344 __ Push(r3, r4, r5); // limit, initial index and receiver. | 1347 __ Push(r3, r4, r5); // limit, initial index and receiver. |
| 1345 | 1348 |
| 1346 // Copy all arguments from the array to the stack. | 1349 // Copy all arguments from the array to the stack. |
| 1347 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, | 1350 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, |
| 1348 kLimitOffset); | 1351 kIndexOffset, kLimitOffset); |
| 1349 | 1352 |
| 1350 // Call the callable. | 1353 // Call the callable. |
| 1351 // TODO(bmeurer): This should be a tail call according to ES6. | 1354 // TODO(bmeurer): This should be a tail call according to ES6. |
| 1352 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); | 1355 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); |
| 1353 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1356 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 1354 | 1357 |
| 1355 // Tear down the internal frame and remove function, receiver and args. | 1358 // Tear down the internal frame and remove function, receiver and args. |
| 1356 } | 1359 } |
| 1357 __ addi(sp, sp, Operand(kStackSize * kPointerSize)); | 1360 __ addi(sp, sp, Operand(kStackSize * kPointerSize)); |
| 1358 __ blr(); | 1361 __ blr(); |
| 1359 } | 1362 } |
| 1360 | 1363 |
| 1361 | 1364 |
| 1362 static void Generate_ConstructHelper(MacroAssembler* masm) { | 1365 static void Generate_ConstructHelper(MacroAssembler* masm) { |
| 1363 const int kFormalParameters = 3; | 1366 const int kFormalParameters = 3; |
| 1364 const int kStackSize = kFormalParameters + 1; | 1367 const int kStackSize = kFormalParameters + 1; |
| 1365 | 1368 |
| 1366 { | 1369 { |
| 1367 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); | 1370 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
| 1368 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize; | 1371 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize; |
| 1369 const int kArgumentsOffset = kNewTargetOffset + kPointerSize; | 1372 const int kArgumentsOffset = kNewTargetOffset + kPointerSize; |
| 1370 const int kFunctionOffset = kArgumentsOffset + kPointerSize; | 1373 const int kFunctionOffset = kArgumentsOffset + kPointerSize; |
| 1374 static const int kVectorOffset = |
| 1375 InternalFrameConstants::kCodeOffset - 1 * kPointerSize; |
| 1376 |
| 1377 // Push the vector. |
| 1378 __ LoadP(r4, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
| 1379 __ LoadP(r4, |
| 1380 FieldMemOperand(r4, SharedFunctionInfo::kFeedbackVectorOffset)); |
| 1381 __ push(r4); |
| 1371 | 1382 |
| 1372 // If newTarget is not supplied, set it to constructor | 1383 // If newTarget is not supplied, set it to constructor |
| 1373 Label validate_arguments; | 1384 Label validate_arguments; |
| 1374 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); | 1385 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); |
| 1375 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); | 1386 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); |
| 1376 __ bne(&validate_arguments); | 1387 __ bne(&validate_arguments); |
| 1377 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); | 1388 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); |
| 1378 __ StoreP(r3, MemOperand(fp, kNewTargetOffset)); | 1389 __ StoreP(r3, MemOperand(fp, kNewTargetOffset)); |
| 1379 | 1390 |
| 1380 // Validate arguments | 1391 // Validate arguments |
| 1381 __ bind(&validate_arguments); | 1392 __ bind(&validate_arguments); |
| 1382 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function | 1393 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function |
| 1383 __ push(r3); | 1394 __ push(r3); |
| 1384 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array | 1395 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array |
| 1385 __ push(r3); | 1396 __ push(r3); |
| 1386 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); // get the new.target | 1397 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); // get the new.target |
| 1387 __ push(r3); | 1398 __ push(r3); |
| 1388 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, | 1399 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, |
| 1389 CALL_FUNCTION); | 1400 CALL_FUNCTION); |
| 1390 | 1401 |
| 1391 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsSmiTagged); | 1402 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsSmiTagged); |
| 1392 | 1403 |
| 1393 // Push current limit and index. | 1404 // Push current limit and index. |
| 1394 const int kIndexOffset = | 1405 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); |
| 1395 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); | 1406 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); |
| 1396 const int kLimitOffset = | |
| 1397 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); | |
| 1398 __ li(r4, Operand::Zero()); | 1407 __ li(r4, Operand::Zero()); |
| 1399 __ Push(r3, r4); // limit and initial index. | 1408 __ Push(r3, r4); // limit and initial index. |
| 1400 // Push the constructor function as callee | 1409 // Push the constructor function as callee |
| 1401 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); | 1410 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); |
| 1402 __ push(r3); | 1411 __ push(r3); |
| 1403 | 1412 |
| 1404 // Copy all arguments from the array to the stack. | 1413 // Copy all arguments from the array to the stack. |
| 1405 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, | 1414 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, |
| 1406 kLimitOffset); | 1415 kIndexOffset, kLimitOffset); |
| 1407 | 1416 |
| 1408 // Use undefined feedback vector | 1417 // Use undefined feedback vector |
| 1409 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | 1418 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
| 1410 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); | 1419 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); |
| 1411 __ LoadP(r7, MemOperand(fp, kNewTargetOffset)); | 1420 __ LoadP(r7, MemOperand(fp, kNewTargetOffset)); |
| 1412 | 1421 |
| 1413 // Call the function. | 1422 // Call the function. |
| 1414 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL); | 1423 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL); |
| 1415 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); | 1424 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); |
| 1416 | 1425 |
| (...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1810 __ bkpt(0); | 1819 __ bkpt(0); |
| 1811 } | 1820 } |
| 1812 } | 1821 } |
| 1813 | 1822 |
| 1814 | 1823 |
| 1815 #undef __ | 1824 #undef __ |
| 1816 } // namespace internal | 1825 } // namespace internal |
| 1817 } // namespace v8 | 1826 } // namespace v8 |
| 1818 | 1827 |
| 1819 #endif // V8_TARGET_ARCH_PPC | 1828 #endif // V8_TARGET_ARCH_PPC |
| OLD | NEW |