OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 1327 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1338 // (which is a copy of the last argument). | 1338 // (which is a copy of the last argument). |
1339 __ subi(r3, r3, Operand(1)); | 1339 __ subi(r3, r3, Operand(1)); |
1340 __ pop(); | 1340 __ pop(); |
1341 } | 1341 } |
1342 | 1342 |
1343 // 4. Call the callable. | 1343 // 4. Call the callable. |
1344 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1344 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
1345 } | 1345 } |
1346 | 1346 |
1347 | 1347 |
1348 static void Generate_PushAppliedArguments(MacroAssembler* masm, | |
1349 const int vectorOffset, | |
1350 const int argumentsOffset, | |
1351 const int indexOffset, | |
1352 const int limitOffset) { | |
1353 Register receiver = LoadDescriptor::ReceiverRegister(); | |
1354 Register key = LoadDescriptor::NameRegister(); | |
1355 Register slot = LoadDescriptor::SlotRegister(); | |
1356 Register vector = LoadWithVectorDescriptor::VectorRegister(); | |
1357 | |
1358 // Copy all arguments from the array to the stack. | |
1359 Label entry, loop; | |
1360 __ LoadP(key, MemOperand(fp, indexOffset)); | |
1361 __ b(&entry); | |
1362 __ bind(&loop); | |
1363 __ LoadP(receiver, MemOperand(fp, argumentsOffset)); | |
1364 | |
1365 // Use inline caching to speed up access to arguments. | |
1366 int slot_index = TypeFeedbackVector::PushAppliedArgumentsIndex(); | |
1367 __ LoadSmiLiteral(slot, Smi::FromInt(slot_index)); | |
1368 __ LoadP(vector, MemOperand(fp, vectorOffset)); | |
1369 Handle<Code> ic = | |
1370 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode(); | |
1371 __ Call(ic, RelocInfo::CODE_TARGET); | |
1372 | |
1373 // Push the nth argument. | |
1374 __ push(r3); | |
1375 | |
1376 // Update the index on the stack and in register key. | |
1377 __ LoadP(key, MemOperand(fp, indexOffset)); | |
1378 __ AddSmiLiteral(key, key, Smi::FromInt(1), r0); | |
1379 __ StoreP(key, MemOperand(fp, indexOffset)); | |
1380 | |
1381 // Test if the copy loop has finished copying all the elements from the | |
1382 // arguments object. | |
1383 __ bind(&entry); | |
1384 __ LoadP(r0, MemOperand(fp, limitOffset)); | |
1385 __ cmp(key, r0); | |
1386 __ bne(&loop); | |
1387 | |
1388 // On exit, the pushed arguments count is in r3, untagged | |
1389 __ SmiUntag(r3, key); | |
1390 } | |
1391 | |
1392 | |
1393 // Used by FunctionApply and ReflectApply | |
1394 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { | |
1395 const int kFormalParameters = targetIsArgument ? 3 : 2; | |
1396 const int kStackSize = kFormalParameters + 1; | |
1397 | |
1398 { | |
1399 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); | |
1400 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; | |
1401 const int kReceiverOffset = kArgumentsOffset + kPointerSize; | |
1402 const int kFunctionOffset = kReceiverOffset + kPointerSize; | |
1403 const int kVectorOffset = | |
1404 InternalFrameConstants::kCodeOffset - 1 * kPointerSize; | |
1405 | |
1406 // Push the vector. | |
1407 __ LoadP(r4, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | |
1408 __ LoadP(r4, | |
1409 FieldMemOperand(r4, SharedFunctionInfo::kFeedbackVectorOffset)); | |
1410 __ push(r4); | |
1411 | |
1412 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function | |
1413 __ LoadP(r4, MemOperand(fp, kArgumentsOffset)); // get the args array | |
1414 __ Push(r3, r4); | |
1415 if (targetIsArgument) { | |
1416 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, | |
1417 CALL_FUNCTION); | |
1418 } else { | |
1419 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); | |
1420 } | |
1421 | |
1422 Generate_CheckStackOverflow(masm, r3, kArgcIsSmiTagged); | |
1423 | |
1424 // Push current limit and index. | |
1425 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); | |
1426 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); | |
1427 __ li(r4, Operand::Zero()); | |
1428 __ LoadP(r5, MemOperand(fp, kReceiverOffset)); | |
1429 __ Push(r3, r4, r5); // limit, initial index and receiver. | |
1430 | |
1431 // Copy all arguments from the array to the stack. | |
1432 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, | |
1433 kIndexOffset, kLimitOffset); | |
1434 | |
1435 // Call the callable. | |
1436 // TODO(bmeurer): This should be a tail call according to ES6. | |
1437 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); | |
1438 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
1439 | |
1440 // Tear down the internal frame and remove function, receiver and args. | |
1441 } | |
1442 __ addi(sp, sp, Operand(kStackSize * kPointerSize)); | |
1443 __ blr(); | |
1444 } | |
1445 | |
1446 | |
1447 static void Generate_ConstructHelper(MacroAssembler* masm) { | |
1448 const int kFormalParameters = 3; | |
1449 const int kStackSize = kFormalParameters + 1; | |
1450 | |
1451 { | |
1452 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); | |
1453 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize; | |
1454 const int kArgumentsOffset = kNewTargetOffset + kPointerSize; | |
1455 const int kFunctionOffset = kArgumentsOffset + kPointerSize; | |
1456 static const int kVectorOffset = | |
1457 InternalFrameConstants::kCodeOffset - 1 * kPointerSize; | |
1458 | |
1459 // Push the vector. | |
1460 __ LoadP(r4, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | |
1461 __ LoadP(r4, | |
1462 FieldMemOperand(r4, SharedFunctionInfo::kFeedbackVectorOffset)); | |
1463 __ push(r4); | |
1464 | |
1465 // If newTarget is not supplied, set it to constructor | |
1466 Label validate_arguments; | |
1467 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); | |
1468 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); | |
1469 __ bne(&validate_arguments); | |
1470 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); | |
1471 __ StoreP(r3, MemOperand(fp, kNewTargetOffset)); | |
1472 | |
1473 // Validate arguments | |
1474 __ bind(&validate_arguments); | |
1475 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function | |
1476 __ push(r3); | |
1477 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array | |
1478 __ push(r3); | |
1479 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); // get the new.target | |
1480 __ push(r3); | |
1481 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, | |
1482 CALL_FUNCTION); | |
1483 | |
1484 Generate_CheckStackOverflow(masm, r3, kArgcIsSmiTagged); | |
1485 | |
1486 // Push current limit and index. | |
1487 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); | |
1488 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); | |
1489 __ li(r4, Operand::Zero()); | |
1490 __ Push(r3, r4); // limit and initial index. | |
1491 // Push the constructor function as callee | |
1492 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); | |
1493 __ push(r3); | |
1494 | |
1495 // Copy all arguments from the array to the stack. | |
1496 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, | |
1497 kIndexOffset, kLimitOffset); | |
1498 | |
1499 // Use undefined feedback vector | |
1500 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | |
1501 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); | |
1502 __ LoadP(r6, MemOperand(fp, kNewTargetOffset)); | |
1503 | |
1504 // Call the function. | |
1505 __ Call(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | |
1506 | |
1507 // Leave internal frame. | |
1508 } | |
1509 __ addi(sp, sp, Operand(kStackSize * kPointerSize)); | |
1510 __ blr(); | |
1511 } | |
1512 | |
1513 | |
1514 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 1348 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
1515 Generate_ApplyHelper(masm, false); | 1349 // ----------- S t a t e ------------- |
| 1350 // -- r3 : argc |
| 1351 // -- sp[0] : argArray |
| 1352 // -- sp[4] : thisArg |
| 1353 // -- sp[8] : receiver |
| 1354 // ----------------------------------- |
| 1355 |
| 1356 // 1. Load receiver into r4, argArray into r3 (if present), remove all |
| 1357 // arguments from the stack (including the receiver), and push thisArg (if |
| 1358 // present) instead. |
| 1359 { |
| 1360 Label skip; |
| 1361 Register arg_size = r5; |
| 1362 Register new_sp = r6; |
| 1363 Register scratch = r7; |
| 1364 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); |
| 1365 __ add(new_sp, sp, arg_size); |
| 1366 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); |
| 1367 __ mr(scratch, r3); |
| 1368 __ LoadP(r4, MemOperand(new_sp, 0)); // receiver |
| 1369 __ cmpi(arg_size, Operand(kPointerSize)); |
| 1370 __ blt(&skip); |
| 1371 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg |
| 1372 __ beq(&skip); |
| 1373 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argArray |
| 1374 __ bind(&skip); |
| 1375 __ mr(sp, new_sp); |
| 1376 __ StoreP(scratch, MemOperand(sp, 0)); |
| 1377 } |
| 1378 |
| 1379 // ----------- S t a t e ------------- |
| 1380 // -- r3 : argArray |
| 1381 // -- r4 : receiver |
| 1382 // -- sp[0] : thisArg |
| 1383 // ----------------------------------- |
| 1384 |
| 1385 // 2. Make sure the receiver is actually callable. |
| 1386 Label receiver_not_callable; |
| 1387 __ JumpIfSmi(r4, &receiver_not_callable); |
| 1388 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); |
| 1389 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); |
| 1390 __ TestBit(r7, Map::kIsCallable, r0); |
| 1391 __ beq(&receiver_not_callable, cr0); |
| 1392 |
| 1393 // 3. Tail call with no arguments if argArray is null or undefined. |
| 1394 Label no_arguments; |
| 1395 __ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments); |
| 1396 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments); |
| 1397 |
| 1398 // 4a. Apply the receiver to the given argArray (passing undefined for |
| 1399 // new.target). |
| 1400 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); |
| 1401 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
| 1402 |
| 1403 // 4b. The argArray is either null or undefined, so we tail call without any |
| 1404 // arguments to the receiver. |
| 1405 __ bind(&no_arguments); |
| 1406 { |
| 1407 __ li(r3, Operand::Zero()); |
| 1408 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 1409 } |
| 1410 |
| 1411 // 4c. The receiver is not callable, throw an appropriate TypeError. |
| 1412 __ bind(&receiver_not_callable); |
| 1413 { |
| 1414 __ StoreP(r4, MemOperand(sp, 0)); |
| 1415 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1, 1); |
| 1416 } |
1516 } | 1417 } |
1517 | 1418 |
1518 | 1419 |
1519 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { | 1420 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { |
1520 Generate_ApplyHelper(masm, true); | 1421 // ----------- S t a t e ------------- |
| 1422 // -- r3 : argc |
| 1423 // -- sp[0] : argumentsList |
| 1424 // -- sp[4] : thisArgument |
| 1425 // -- sp[8] : target |
| 1426 // -- sp[12] : receiver |
| 1427 // ----------------------------------- |
| 1428 |
| 1429 // 1. Load target into r4 (if present), argumentsList into r3 (if present), |
| 1430 // remove all arguments from the stack (including the receiver), and push |
| 1431 // thisArgument (if present) instead. |
| 1432 { |
| 1433 Label skip; |
| 1434 Register arg_size = r5; |
| 1435 Register new_sp = r6; |
| 1436 Register scratch = r7; |
| 1437 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); |
| 1438 __ add(new_sp, sp, arg_size); |
| 1439 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
| 1440 __ mr(scratch, r4); |
| 1441 __ mr(r3, r4); |
| 1442 __ cmpi(arg_size, Operand(kPointerSize)); |
| 1443 __ blt(&skip); |
| 1444 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target |
| 1445 __ beq(&skip); |
| 1446 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument |
| 1447 __ cmpi(arg_size, Operand(2 * kPointerSize)); |
| 1448 __ beq(&skip); |
| 1449 __ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList |
| 1450 __ bind(&skip); |
| 1451 __ mr(sp, new_sp); |
| 1452 __ StoreP(scratch, MemOperand(sp, 0)); |
| 1453 } |
| 1454 |
| 1455 // ----------- S t a t e ------------- |
| 1456 // -- r3 : argumentsList |
| 1457 // -- r4 : target |
| 1458 // -- sp[0] : thisArgument |
| 1459 // ----------------------------------- |
| 1460 |
| 1461 // 2. Make sure the target is actually callable. |
| 1462 Label target_not_callable; |
| 1463 __ JumpIfSmi(r4, &target_not_callable); |
| 1464 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); |
| 1465 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); |
| 1466 __ TestBit(r7, Map::kIsCallable, r0); |
| 1467 __ beq(&target_not_callable, cr0); |
| 1468 |
| 1469 // 3a. Apply the target to the given argumentsList (passing undefined for |
| 1470 // new.target). |
| 1471 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); |
| 1472 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
| 1473 |
| 1474 // 3b. The target is not callable, throw an appropriate TypeError. |
| 1475 __ bind(&target_not_callable); |
| 1476 { |
| 1477 __ StoreP(r4, MemOperand(sp, 0)); |
| 1478 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1, 1); |
| 1479 } |
1521 } | 1480 } |
1522 | 1481 |
1523 | 1482 |
1524 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | 1483 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { |
1525 Generate_ConstructHelper(masm); | 1484 // ----------- S t a t e ------------- |
1526 } | 1485 // -- r3 : argc |
1527 | 1486 // -- sp[0] : new.target (optional) |
1528 | 1487 // -- sp[4] : argumentsList |
| 1488 // -- sp[8] : target |
| 1489 // -- sp[12] : receiver |
| 1490 // ----------------------------------- |
| 1491 |
| 1492 // 1. Load target into r4 (if present), argumentsList into r3 (if present), |
| 1493 // new.target into r6 (if present, otherwise use target), remove all |
| 1494 // arguments from the stack (including the receiver), and push thisArgument |
| 1495 // (if present) instead. |
| 1496 { |
| 1497 Label skip; |
| 1498 Register arg_size = r5; |
| 1499 Register new_sp = r7; |
| 1500 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); |
| 1501 __ add(new_sp, sp, arg_size); |
| 1502 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
| 1503 __ mr(r3, r4); |
| 1504 __ mr(r6, r4); |
| 1505 __ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined) |
| 1506 __ cmpi(arg_size, Operand(kPointerSize)); |
| 1507 __ blt(&skip); |
| 1508 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target |
| 1509 __ mr(r6, r4); // new.target defaults to target |
| 1510 __ beq(&skip); |
| 1511 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList |
| 1512 __ cmpi(arg_size, Operand(2 * kPointerSize)); |
| 1513 __ beq(&skip); |
| 1514 __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize)); // new.target |
| 1515 __ bind(&skip); |
| 1516 __ mr(sp, new_sp); |
| 1517 } |
| 1518 |
| 1519 // ----------- S t a t e ------------- |
| 1520 // -- r3 : argumentsList |
| 1521 // -- r6 : new.target |
| 1522 // -- r4 : target |
| 1523 // -- sp[0] : receiver (undefined) |
| 1524 // ----------------------------------- |
| 1525 |
| 1526 // 2. Make sure the target is actually a constructor. |
| 1527 Label target_not_constructor; |
| 1528 __ JumpIfSmi(r4, &target_not_constructor); |
| 1529 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); |
| 1530 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); |
| 1531 __ TestBit(r7, Map::kIsConstructor, r0); |
| 1532 __ beq(&target_not_constructor, cr0); |
| 1533 |
| 1534 // 3. Make sure the target is actually a constructor. |
| 1535 Label new_target_not_constructor; |
| 1536 __ JumpIfSmi(r6, &new_target_not_constructor); |
| 1537 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset)); |
| 1538 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); |
| 1539 __ TestBit(r7, Map::kIsConstructor, r0); |
| 1540 __ beq(&new_target_not_constructor, cr0); |
| 1541 |
| 1542 // 4a. Construct the target with the given new.target and argumentsList. |
| 1543 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
| 1544 |
| 1545 // 4b. The target is not a constructor, throw an appropriate TypeError. |
| 1546 __ bind(&target_not_constructor); |
| 1547 { |
| 1548 __ StoreP(r4, MemOperand(sp, 0)); |
| 1549 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1, 1); |
| 1550 } |
| 1551 |
| 1552 // 4c. The new.target is not a constructor, throw an appropriate TypeError. |
| 1553 __ bind(&new_target_not_constructor); |
| 1554 { |
| 1555 __ StoreP(r6, MemOperand(sp, 0)); |
| 1556 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1, 1); |
| 1557 } |
| 1558 } |
| 1559 |
| 1560 |
1529 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | 1561 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
1530 Label* stack_overflow) { | 1562 Label* stack_overflow) { |
1531 // ----------- S t a t e ------------- | 1563 // ----------- S t a t e ------------- |
1532 // -- r3 : actual number of arguments | 1564 // -- r3 : actual number of arguments |
1533 // -- r4 : function (passed through to callee) | 1565 // -- r4 : function (passed through to callee) |
1534 // -- r5 : expected number of arguments | 1566 // -- r5 : expected number of arguments |
1535 // -- r6 : new target (passed through to callee) | 1567 // -- r6 : new target (passed through to callee) |
1536 // ----------------------------------- | 1568 // ----------------------------------- |
1537 // Check the stack for overflow. We are not trying to catch | 1569 // Check the stack for overflow. We are not trying to catch |
1538 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1570 // interruptions (e.g. debug break and preemption) here, so the "real stack |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1572 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | 1604 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + |
1573 kPointerSize))); | 1605 kPointerSize))); |
1574 int stack_adjustment = kPointerSize; // adjust for receiver | 1606 int stack_adjustment = kPointerSize; // adjust for receiver |
1575 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment); | 1607 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment); |
1576 __ SmiToPtrArrayOffset(r0, r4); | 1608 __ SmiToPtrArrayOffset(r0, r4); |
1577 __ add(sp, sp, r0); | 1609 __ add(sp, sp, r0); |
1578 } | 1610 } |
1579 | 1611 |
1580 | 1612 |
1581 // static | 1613 // static |
| 1614 void Builtins::Generate_Apply(MacroAssembler* masm) { |
| 1615 // ----------- S t a t e ------------- |
| 1616 // -- r3 : argumentsList |
| 1617 // -- r4 : target |
| 1618 // -- r6 : new.target (checked to be constructor or undefined) |
| 1619 // -- sp[0] : thisArgument |
| 1620 // ----------------------------------- |
| 1621 |
| 1622 // Create the list of arguments from the array-like argumentsList. |
| 1623 { |
| 1624 Label create_arguments, create_array, create_runtime, done_create; |
| 1625 __ JumpIfSmi(r3, &create_runtime); |
| 1626 |
| 1627 // Load the map of argumentsList into r5. |
| 1628 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 1629 |
| 1630 // Load native context into r7. |
| 1631 __ LoadP(r7, NativeContextMemOperand()); |
| 1632 |
| 1633 // Check if argumentsList is an (unmodified) arguments object. |
| 1634 __ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); |
| 1635 __ cmp(ip, r5); |
| 1636 __ beq(&create_arguments); |
| 1637 __ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX)); |
| 1638 __ cmp(ip, r5); |
| 1639 __ beq(&create_arguments); |
| 1640 |
| 1641 // Check if argumentsList is a fast JSArray. |
| 1642 __ CompareInstanceType(r5, ip, JS_ARRAY_TYPE); |
| 1643 __ beq(&create_array); |
| 1644 |
| 1645 // Ask the runtime to create the list (actually a FixedArray). |
| 1646 __ bind(&create_runtime); |
| 1647 { |
| 1648 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1649 __ Push(r4, r6, r3); |
| 1650 __ CallRuntime(Runtime::kCreateListFromArrayLike, 1); |
| 1651 __ Pop(r4, r6); |
| 1652 __ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset)); |
| 1653 __ SmiUntag(r5); |
| 1654 } |
| 1655 __ b(&done_create); |
| 1656 |
| 1657 // Try to create the list from an arguments object. |
| 1658 __ bind(&create_arguments); |
| 1659 __ LoadP(r5, FieldMemOperand( |
| 1660 r3, JSObject::kHeaderSize + |
| 1661 Heap::kArgumentsLengthIndex * kPointerSize)); |
| 1662 __ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset)); |
| 1663 __ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset)); |
| 1664 __ cmp(r5, ip); |
| 1665 __ bne(&create_runtime); |
| 1666 __ SmiUntag(r5); |
| 1667 __ mr(r3, r7); |
| 1668 __ b(&done_create); |
| 1669 |
| 1670 // Try to create the list from a JSArray object. |
| 1671 __ bind(&create_array); |
| 1672 __ lbz(r5, FieldMemOperand(r5, Map::kBitField2Offset)); |
| 1673 __ DecodeField<Map::ElementsKindBits>(r5); |
| 1674 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); |
| 1675 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); |
| 1676 STATIC_ASSERT(FAST_ELEMENTS == 2); |
| 1677 __ cmpi(r5, Operand(FAST_ELEMENTS)); |
| 1678 __ bgt(&create_runtime); |
| 1679 __ cmpi(r5, Operand(FAST_HOLEY_SMI_ELEMENTS)); |
| 1680 __ beq(&create_runtime); |
| 1681 __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset)); |
| 1682 __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); |
| 1683 __ SmiUntag(r5); |
| 1684 |
| 1685 __ bind(&done_create); |
| 1686 } |
| 1687 |
| 1688 // Check for stack overflow. |
| 1689 { |
| 1690 // Check the stack for overflow. We are not trying to catch interruptions |
| 1691 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 1692 Label done; |
| 1693 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex); |
| 1694 // Make ip the space we have left. The stack might already be overflowed |
| 1695 // here which will cause ip to become negative. |
| 1696 __ sub(ip, sp, ip); |
| 1697 // Check if the arguments will overflow the stack. |
| 1698 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2)); |
| 1699 __ cmp(ip, r0); // Signed comparison. |
| 1700 __ bgt(&done); |
| 1701 __ TailCallRuntime(Runtime::kThrowStackOverflow, 1, 1); |
| 1702 __ bind(&done); |
| 1703 } |
| 1704 |
| 1705 // ----------- S t a t e ------------- |
| 1706 // -- r4 : target |
| 1707 // -- r3 : args (a FixedArray built from argumentsList) |
| 1708 // -- r5 : len (number of elements to push from args) |
| 1709 // -- r6 : new.target (checked to be constructor or undefined) |
| 1710 // -- sp[0] : thisArgument |
| 1711 // ----------------------------------- |
| 1712 |
| 1713 // Push arguments onto the stack (thisArgument is already on the stack). |
| 1714 { |
| 1715 Label loop, no_args; |
| 1716 __ cmpi(r5, Operand::Zero()); |
| 1717 __ beq(&no_args); |
| 1718 __ addi(r3, r3, |
| 1719 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize)); |
| 1720 __ mtctr(r5); |
| 1721 __ bind(&loop); |
| 1722 __ LoadPU(r0, MemOperand(r3, kPointerSize)); |
| 1723 __ push(r0); |
| 1724 __ bdnz(&loop); |
| 1725 __ bind(&no_args); |
| 1726 __ mr(r3, r5); |
| 1727 } |
| 1728 |
| 1729 // Dispatch to Call or Construct depending on whether new.target is undefined. |
| 1730 { |
| 1731 __ CompareRoot(r6, Heap::kUndefinedValueRootIndex); |
| 1732 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq); |
| 1733 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 1734 } |
| 1735 } |
| 1736 |
| 1737 |
| 1738 // static |
1582 void Builtins::Generate_CallFunction(MacroAssembler* masm, | 1739 void Builtins::Generate_CallFunction(MacroAssembler* masm, |
1583 ConvertReceiverMode mode) { | 1740 ConvertReceiverMode mode) { |
1584 // ----------- S t a t e ------------- | 1741 // ----------- S t a t e ------------- |
1585 // -- r3 : the number of arguments (not including the receiver) | 1742 // -- r3 : the number of arguments (not including the receiver) |
1586 // -- r4 : the function to call (checked to be a JSFunction) | 1743 // -- r4 : the function to call (checked to be a JSFunction) |
1587 // ----------------------------------- | 1744 // ----------------------------------- |
1588 __ AssertFunction(r4); | 1745 __ AssertFunction(r4); |
1589 | 1746 |
1590 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) | 1747 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) |
1591 // Check that the function is not a "classConstructor". | 1748 // Check that the function is not a "classConstructor". |
(...skipping 389 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1981 __ bkpt(0); | 2138 __ bkpt(0); |
1982 } | 2139 } |
1983 } | 2140 } |
1984 | 2141 |
1985 | 2142 |
1986 #undef __ | 2143 #undef __ |
1987 } // namespace internal | 2144 } // namespace internal |
1988 } // namespace v8 | 2145 } // namespace v8 |
1989 | 2146 |
1990 #endif // V8_TARGET_ARCH_PPC | 2147 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |