OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 616 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
627 | 627 |
628 // Clobbers a2; preserves all other registers. | 628 // Clobbers a2; preserves all other registers. |
629 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, | 629 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
630 IsTagged argc_is_tagged) { | 630 IsTagged argc_is_tagged) { |
631 // Check the stack for overflow. We are not trying to catch | 631 // Check the stack for overflow. We are not trying to catch |
632 // interruptions (e.g. debug break and preemption) here, so the "real stack | 632 // interruptions (e.g. debug break and preemption) here, so the "real stack |
633 // limit" is checked. | 633 // limit" is checked. |
634 Label okay; | 634 Label okay; |
635 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | 635 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); |
636 // Make a2 the space we have left. The stack might already be overflowed | 636 // Make a2 the space we have left. The stack might already be overflowed |
637 // here which will cause r2 to become negative. | 637 // here which will cause a2 to become negative. |
638 __ Subu(a2, sp, a2); | 638 __ Subu(a2, sp, a2); |
639 // Check if the arguments will overflow the stack. | 639 // Check if the arguments will overflow the stack. |
640 if (argc_is_tagged == kArgcIsSmiTagged) { | 640 if (argc_is_tagged == kArgcIsSmiTagged) { |
641 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); | 641 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); |
642 } else { | 642 } else { |
643 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | 643 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); |
644 __ sll(t3, argc, kPointerSizeLog2); | 644 __ sll(t3, argc, kPointerSizeLog2); |
645 } | 645 } |
646 // Signed comparison. | 646 // Signed comparison. |
647 __ Branch(&okay, gt, a2, Operand(t3)); | 647 __ Branch(&okay, gt, a2, Operand(t3)); |
(...skipping 681 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1329 // (which is a copy of the last argument). | 1329 // (which is a copy of the last argument). |
1330 __ Subu(a0, a0, Operand(1)); | 1330 __ Subu(a0, a0, Operand(1)); |
1331 __ Pop(); | 1331 __ Pop(); |
1332 } | 1332 } |
1333 | 1333 |
1334 // 4. Call the callable. | 1334 // 4. Call the callable. |
1335 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 1335 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
1336 } | 1336 } |
1337 | 1337 |
1338 | 1338 |
1339 static void Generate_PushAppliedArguments(MacroAssembler* masm, | 1339 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
1340 const int vectorOffset, | 1340 // ----------- S t a t e ------------- |
1341 const int argumentsOffset, | 1341 // -- a0 : argc |
1342 const int indexOffset, | 1342 // -- sp[0] : argArray |
1343 const int limitOffset) { | 1343 // -- sp[4] : thisArg |
1344 Label entry, loop; | 1344 // -- sp[8] : receiver |
1345 Register receiver = LoadDescriptor::ReceiverRegister(); | 1345 // ----------------------------------- |
1346 Register key = LoadDescriptor::NameRegister(); | 1346 |
1347 Register slot = LoadDescriptor::SlotRegister(); | 1347 // 1. Load receiver into a1, argArray into a0 (if present), remove all |
1348 Register vector = LoadWithVectorDescriptor::VectorRegister(); | 1348 // arguments from the stack (including the receiver), and push thisArg (if |
1349 | 1349 // present) instead. |
1350 __ lw(key, MemOperand(fp, indexOffset)); | 1350 { |
1351 __ Branch(&entry); | 1351 Label no_arg; |
1352 | 1352 Register scratch = t0; |
1353 // Load the current argument from the arguments array. | |
1354 __ bind(&loop); | |
1355 __ lw(receiver, MemOperand(fp, argumentsOffset)); | |
1356 | |
1357 // Use inline caching to speed up access to arguments. | |
1358 int slot_index = TypeFeedbackVector::PushAppliedArgumentsIndex(); | |
1359 __ li(slot, Operand(Smi::FromInt(slot_index))); | |
1360 __ lw(vector, MemOperand(fp, vectorOffset)); | |
1361 Handle<Code> ic = | |
1362 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode(); | |
1363 __ Call(ic, RelocInfo::CODE_TARGET); | |
1364 | |
1365 __ push(v0); | |
1366 | |
1367 // Use inline caching to access the arguments. | |
1368 __ lw(key, MemOperand(fp, indexOffset)); | |
1369 __ Addu(key, key, Operand(1 << kSmiTagSize)); | |
1370 __ sw(key, MemOperand(fp, indexOffset)); | |
1371 | |
1372 // Test if the copy loop has finished copying all the elements from the | |
1373 // arguments object. | |
1374 __ bind(&entry); | |
1375 __ lw(a1, MemOperand(fp, limitOffset)); | |
1376 __ Branch(&loop, ne, key, Operand(a1)); | |
1377 | |
1378 // On exit, the pushed arguments count is in a0, untagged | |
1379 __ mov(a0, key); | |
1380 __ SmiUntag(a0); | |
1381 } | |
1382 | |
1383 | |
1384 // Used by FunctionApply and ReflectApply | |
1385 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { | |
1386 const int kFormalParameters = targetIsArgument ? 3 : 2; | |
1387 const int kStackSize = kFormalParameters + 1; | |
1388 | |
1389 { | |
1390 FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
1391 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; | |
1392 const int kReceiverOffset = kArgumentsOffset + kPointerSize; | |
1393 const int kFunctionOffset = kReceiverOffset + kPointerSize; | |
1394 const int kVectorOffset = | |
1395 InternalFrameConstants::kCodeOffset - 1 * kPointerSize; | |
1396 | |
1397 // Push the vector. | |
1398 __ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
1399 __ lw(a1, FieldMemOperand(a1, SharedFunctionInfo::kFeedbackVectorOffset)); | |
1400 __ Push(a1); | |
1401 | |
1402 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. | |
1403 __ lw(a1, MemOperand(fp, kArgumentsOffset)); // Get the args array. | |
1404 __ Push(a0, a1); | |
1405 // Returns (in v0) number of arguments to copy to stack as Smi. | |
1406 if (targetIsArgument) { | |
1407 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, | |
1408 CALL_FUNCTION); | |
1409 } else { | |
1410 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); | |
1411 } | |
1412 | |
1413 // Returns the result in v0. | |
1414 Generate_CheckStackOverflow(masm, v0, kArgcIsSmiTagged); | |
1415 | |
1416 // Push current limit and index. | |
1417 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); | |
1418 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); | |
1419 __ mov(a1, zero_reg); | |
1420 __ lw(a2, MemOperand(fp, kReceiverOffset)); | |
1421 __ Push(v0, a1, a2); // limit, initial index and receiver. | |
1422 | |
1423 // Copy all arguments from the array to the stack. | |
1424 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, | |
1425 kIndexOffset, kLimitOffset); | |
1426 | |
1427 // Call the callable. | |
1428 // TODO(bmeurer): This should be a tail call according to ES6. | |
1429 __ lw(a1, MemOperand(fp, kFunctionOffset)); | |
1430 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
1431 | |
1432 // Tear down the internal frame and remove function, receiver and args. | |
1433 } | |
1434 | |
1435 __ Ret(USE_DELAY_SLOT); | |
1436 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot. | |
1437 } | |
1438 | |
1439 | |
1440 static void Generate_ConstructHelper(MacroAssembler* masm) { | |
1441 const int kFormalParameters = 3; | |
1442 const int kStackSize = kFormalParameters + 1; | |
1443 | |
1444 { | |
1445 FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
1446 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize; | |
1447 const int kArgumentsOffset = kNewTargetOffset + kPointerSize; | |
1448 const int kFunctionOffset = kArgumentsOffset + kPointerSize; | |
1449 const int kVectorOffset = | |
1450 InternalFrameConstants::kCodeOffset - 1 * kPointerSize; | |
1451 | |
1452 // Push the vector. | |
1453 __ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
1454 __ lw(a1, FieldMemOperand(a1, SharedFunctionInfo::kFeedbackVectorOffset)); | |
1455 __ Push(a1); | |
1456 | |
1457 // If newTarget is not supplied, set it to constructor | |
1458 Label validate_arguments; | |
1459 __ lw(a0, MemOperand(fp, kNewTargetOffset)); | |
1460 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
1461 __ Branch(&validate_arguments, ne, a0, Operand(at)); | |
1462 __ lw(a0, MemOperand(fp, kFunctionOffset)); | |
1463 __ sw(a0, MemOperand(fp, kNewTargetOffset)); | |
1464 | |
1465 // Validate arguments | |
1466 __ bind(&validate_arguments); | |
1467 __ lw(a0, MemOperand(fp, kFunctionOffset)); // get the function | |
1468 __ push(a0); | |
1469 __ lw(a0, MemOperand(fp, kArgumentsOffset)); // get the args array | |
1470 __ push(a0); | |
1471 __ lw(a0, MemOperand(fp, kNewTargetOffset)); // get the new.target | |
1472 __ push(a0); | |
1473 // Returns argument count in v0. | |
1474 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX, | |
1475 CALL_FUNCTION); | |
1476 | |
1477 // Returns result in v0. | |
1478 Generate_CheckStackOverflow(masm, v0, kArgcIsSmiTagged); | |
1479 | |
1480 // Push current limit and index. | |
1481 const int kIndexOffset = kVectorOffset - (2 * kPointerSize); | |
1482 const int kLimitOffset = kVectorOffset - (1 * kPointerSize); | |
1483 __ push(v0); // limit | |
1484 __ mov(a1, zero_reg); // initial index | |
1485 __ push(a1); | |
1486 // Push the constructor function as callee. | |
1487 __ lw(a0, MemOperand(fp, kFunctionOffset)); | |
1488 __ push(a0); | |
1489 | |
1490 // Copy all arguments from the array to the stack. | |
1491 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset, | |
1492 kIndexOffset, kLimitOffset); | |
1493 | |
1494 // Use undefined feedback vector | |
1495 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | 1353 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
1496 __ lw(a1, MemOperand(fp, kFunctionOffset)); | 1354 __ mov(a3, a2); |
1497 __ lw(a3, MemOperand(fp, kNewTargetOffset)); | 1355 __ sll(scratch, a0, kPointerSizeLog2); |
1498 | 1356 __ Addu(a0, sp, Operand(scratch)); |
1499 // Call the function. | 1357 __ lw(a1, MemOperand(a0)); // receiver |
1500 __ Call(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 1358 __ Subu(a0, a0, Operand(kPointerSize)); |
1501 | 1359 __ Branch(&no_arg, lt, a0, Operand(sp)); |
1502 // Leave internal frame. | 1360 __ lw(a2, MemOperand(a0)); // thisArg |
1503 } | 1361 __ Subu(a0, a0, Operand(kPointerSize)); |
1504 __ jr(ra); | 1362 __ Branch(&no_arg, lt, a0, Operand(sp)); |
1505 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot. | 1363 __ lw(a3, MemOperand(a0)); // argArray |
1506 } | 1364 __ bind(&no_arg); |
1507 | 1365 __ Addu(sp, sp, Operand(scratch)); |
1508 | 1366 __ sw(a2, MemOperand(sp)); |
1509 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 1367 __ mov(a0, a3); |
1510 Generate_ApplyHelper(masm, false); | 1368 } |
| 1369 |
| 1370 // ----------- S t a t e ------------- |
| 1371 // -- a0 : argArray |
| 1372 // -- a1 : receiver |
| 1373 // -- sp[0] : thisArg |
| 1374 // ----------------------------------- |
| 1375 |
| 1376 // 2. Make sure the receiver is actually callable. |
| 1377 Label receiver_not_callable; |
| 1378 __ JumpIfSmi(a1, &receiver_not_callable); |
| 1379 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); |
| 1380 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); |
| 1381 __ And(t0, t0, Operand(1 << Map::kIsCallable)); |
| 1382 __ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg)); |
| 1383 |
| 1384 // 3. Tail call with no arguments if argArray is null or undefined. |
| 1385 Label no_arguments; |
| 1386 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments); |
| 1387 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments); |
| 1388 |
| 1389 // 4a. Apply the receiver to the given argArray (passing undefined for |
| 1390 // new.target). |
| 1391 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); |
| 1392 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
| 1393 |
| 1394 // 4b. The argArray is either null or undefined, so we tail call without any |
| 1395 // arguments to the receiver. |
| 1396 __ bind(&no_arguments); |
| 1397 { |
| 1398 __ mov(a0, zero_reg); |
| 1399 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 1400 } |
| 1401 |
| 1402 // 4c. The receiver is not callable, throw an appropriate TypeError. |
| 1403 __ bind(&receiver_not_callable); |
| 1404 { |
| 1405 __ sw(a1, MemOperand(sp)); |
| 1406 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1, 1); |
| 1407 } |
1511 } | 1408 } |
1512 | 1409 |
1513 | 1410 |
1514 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { | 1411 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { |
1515 Generate_ApplyHelper(masm, true); | 1412 // ----------- S t a t e ------------- |
| 1413 // -- a0 : argc |
| 1414 // -- sp[0] : argumentsList |
| 1415 // -- sp[4] : thisArgument |
| 1416 // -- sp[8] : target |
| 1417 // -- sp[12] : receiver |
| 1418 // ----------------------------------- |
| 1419 |
| 1420 // 1. Load target into a1 (if present), argumentsList into a0 (if present), |
| 1421 // remove all arguments from the stack (including the receiver), and push |
| 1422 // thisArgument (if present) instead. |
| 1423 { |
| 1424 Label no_arg; |
| 1425 Register scratch = t0; |
| 1426 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); |
| 1427 __ mov(a2, a1); |
| 1428 __ mov(a3, a1); |
| 1429 __ sll(scratch, a0, kPointerSizeLog2); |
| 1430 __ mov(a0, scratch); |
| 1431 __ Subu(a0, a0, Operand(kPointerSize)); |
| 1432 __ Branch(&no_arg, lt, a0, Operand(zero_reg)); |
| 1433 __ Addu(a0, sp, Operand(a0)); |
| 1434 __ lw(a1, MemOperand(a0)); // target |
| 1435 __ Subu(a0, a0, Operand(kPointerSize)); |
| 1436 __ Branch(&no_arg, lt, a0, Operand(sp)); |
| 1437 __ lw(a2, MemOperand(a0)); // thisArgument |
| 1438 __ Subu(a0, a0, Operand(kPointerSize)); |
| 1439 __ Branch(&no_arg, lt, a0, Operand(sp)); |
| 1440 __ lw(a3, MemOperand(a0)); // argumentsList |
| 1441 __ bind(&no_arg); |
| 1442 __ Addu(sp, sp, Operand(scratch)); |
| 1443 __ sw(a2, MemOperand(sp)); |
| 1444 __ mov(a0, a3); |
| 1445 } |
| 1446 |
| 1447 // ----------- S t a t e ------------- |
| 1448 // -- a0 : argumentsList |
| 1449 // -- a1 : target |
| 1450 // -- sp[0] : thisArgument |
| 1451 // ----------------------------------- |
| 1452 |
| 1453 // 2. Make sure the target is actually callable. |
| 1454 Label target_not_callable; |
| 1455 __ JumpIfSmi(a1, &target_not_callable); |
| 1456 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); |
| 1457 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); |
| 1458 __ And(t0, t0, Operand(1 << Map::kIsCallable)); |
| 1459 __ Branch(&target_not_callable, eq, t0, Operand(zero_reg)); |
| 1460 |
| 1461 // 3a. Apply the target to the given argumentsList (passing undefined for |
| 1462 // new.target). |
| 1463 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); |
| 1464 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
| 1465 |
| 1466 // 3b. The target is not callable, throw an appropriate TypeError. |
| 1467 __ bind(&target_not_callable); |
| 1468 { |
| 1469 __ sw(a1, MemOperand(sp)); |
| 1470 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1, 1); |
| 1471 } |
1516 } | 1472 } |
1517 | 1473 |
1518 | 1474 |
1519 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | 1475 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { |
1520 Generate_ConstructHelper(masm); | 1476 // ----------- S t a t e ------------- |
1521 } | 1477 // -- a0 : argc |
1522 | 1478 // -- sp[0] : new.target (optional) |
1523 | 1479 // -- sp[4] : argumentsList |
| 1480 // -- sp[8] : target |
| 1481 // -- sp[12] : receiver |
| 1482 // ----------------------------------- |
| 1483 |
| 1484 // 1. Load target into a1 (if present), argumentsList into a0 (if present), |
| 1485 // new.target into a3 (if present, otherwise use target), remove all |
| 1486 // arguments from the stack (including the receiver), and push thisArgument |
| 1487 // (if present) instead. |
| 1488 { |
| 1489 Label no_arg; |
| 1490 Register scratch = t0; |
| 1491 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); |
| 1492 __ mov(a2, a1); |
| 1493 __ sll(scratch, a0, kPointerSizeLog2); |
| 1494 __ Addu(a0, sp, Operand(scratch)); |
| 1495 __ sw(a2, MemOperand(a0)); // receiver |
| 1496 __ Subu(a0, a0, Operand(kPointerSize)); |
| 1497 __ Branch(&no_arg, lt, a0, Operand(sp)); |
| 1498 __ lw(a1, MemOperand(a0)); // target |
| 1499 __ mov(a3, a1); // new.target defaults to target |
| 1500 __ Subu(a0, a0, Operand(kPointerSize)); |
| 1501 __ Branch(&no_arg, lt, a0, Operand(sp)); |
| 1502 __ lw(a2, MemOperand(a0)); // argumentsList |
| 1503 __ Subu(a0, a0, Operand(kPointerSize)); |
| 1504 __ Branch(&no_arg, lt, a0, Operand(sp)); |
| 1505 __ lw(a3, MemOperand(a0)); // new.target |
| 1506 __ bind(&no_arg); |
| 1507 __ Addu(sp, sp, Operand(scratch)); |
| 1508 __ mov(a0, a2); |
| 1509 } |
| 1510 |
| 1511 // ----------- S t a t e ------------- |
| 1512 // -- a0 : argumentsList |
| 1513 // -- a3 : new.target |
| 1514 // -- a1 : target |
| 1515 // -- sp[0] : receiver (undefined) |
| 1516 // ----------------------------------- |
| 1517 |
| 1518 // 2. Make sure the target is actually a constructor. |
| 1519 Label target_not_constructor; |
| 1520 __ JumpIfSmi(a1, &target_not_constructor); |
| 1521 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); |
| 1522 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); |
| 1523 __ And(t0, t0, Operand(1 << Map::kIsConstructor)); |
| 1524 __ Branch(&target_not_constructor, eq, t0, Operand(zero_reg)); |
| 1525 |
| 1526 // 3. Make sure the target is actually a constructor. |
| 1527 Label new_target_not_constructor; |
| 1528 __ JumpIfSmi(a3, &new_target_not_constructor); |
| 1529 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset)); |
| 1530 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); |
| 1531 __ And(t0, t0, Operand(1 << Map::kIsConstructor)); |
| 1532 __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg)); |
| 1533 |
| 1534 // 4a. Construct the target with the given new.target and argumentsList. |
| 1535 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); |
| 1536 |
| 1537 // 4b. The target is not a constructor, throw an appropriate TypeError. |
| 1538 __ bind(&target_not_constructor); |
| 1539 { |
| 1540 __ sw(a1, MemOperand(sp)); |
| 1541 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1, 1); |
| 1542 } |
| 1543 |
| 1544 // 4c. The new.target is not a constructor, throw an appropriate TypeError. |
| 1545 __ bind(&new_target_not_constructor); |
| 1546 { |
| 1547 __ sw(a3, MemOperand(sp)); |
| 1548 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1, 1); |
| 1549 } |
| 1550 } |
| 1551 |
| 1552 |
1524 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | 1553 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
1525 Label* stack_overflow) { | 1554 Label* stack_overflow) { |
1526 // ----------- S t a t e ------------- | 1555 // ----------- S t a t e ------------- |
1527 // -- a0 : actual number of arguments | 1556 // -- a0 : actual number of arguments |
1528 // -- a1 : function (passed through to callee) | 1557 // -- a1 : function (passed through to callee) |
1529 // -- a2 : expected number of arguments | 1558 // -- a2 : expected number of arguments |
1530 // -- a3 : new target (passed through to callee) | 1559 // -- a3 : new target (passed through to callee) |
1531 // ----------------------------------- | 1560 // ----------------------------------- |
1532 // Check the stack for overflow. We are not trying to catch | 1561 // Check the stack for overflow. We are not trying to catch |
1533 // interruptions (e.g. debug break and preemption) here, so the "real stack | 1562 // interruptions (e.g. debug break and preemption) here, so the "real stack |
(...skipping 29 matching lines...) Expand all Loading... |
1563 __ mov(sp, fp); | 1592 __ mov(sp, fp); |
1564 __ MultiPop(fp.bit() | ra.bit()); | 1593 __ MultiPop(fp.bit() | ra.bit()); |
1565 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize); | 1594 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize); |
1566 __ Addu(sp, sp, t0); | 1595 __ Addu(sp, sp, t0); |
1567 // Adjust for the receiver. | 1596 // Adjust for the receiver. |
1568 __ Addu(sp, sp, Operand(kPointerSize)); | 1597 __ Addu(sp, sp, Operand(kPointerSize)); |
1569 } | 1598 } |
1570 | 1599 |
1571 | 1600 |
1572 // static | 1601 // static |
| 1602 void Builtins::Generate_Apply(MacroAssembler* masm) { |
| 1603 // ----------- S t a t e ------------- |
| 1604 // -- a0 : argumentsList |
| 1605 // -- a1 : target |
| 1606 // -- a3 : new.target (checked to be constructor or undefined) |
| 1607 // -- sp[0] : thisArgument |
| 1608 // ----------------------------------- |
| 1609 |
| 1610 // Create the list of arguments from the array-like argumentsList. |
| 1611 { |
| 1612 Label create_arguments, create_array, create_runtime, done_create; |
| 1613 __ JumpIfSmi(a0, &create_runtime); |
| 1614 |
| 1615 // Load the map of argumentsList into a2. |
| 1616 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); |
| 1617 |
| 1618 // Load native context into t0. |
| 1619 __ lw(t0, NativeContextMemOperand()); |
| 1620 |
| 1621 // Check if argumentsList is an (unmodified) arguments object. |
| 1622 __ lw(at, ContextMemOperand(t0, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); |
| 1623 __ Branch(&create_arguments, eq, a2, Operand(at)); |
| 1624 __ lw(at, ContextMemOperand(t0, Context::STRICT_ARGUMENTS_MAP_INDEX)); |
| 1625 __ Branch(&create_arguments, eq, a2, Operand(at)); |
| 1626 |
| 1627 // Check if argumentsList is a fast JSArray. |
| 1628 __ lw(v0, FieldMemOperand(a2, HeapObject::kMapOffset)); |
| 1629 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset)); |
| 1630 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); |
| 1631 |
| 1632 // Ask the runtime to create the list (actually a FixedArray). |
| 1633 __ bind(&create_runtime); |
| 1634 { |
| 1635 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1636 __ Push(a1, a3, a0); |
| 1637 __ CallRuntime(Runtime::kCreateListFromArrayLike, 1); |
| 1638 __ mov(a0, v0); |
| 1639 __ Pop(a1, a3); |
| 1640 __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); |
| 1641 __ SmiUntag(a2); |
| 1642 } |
| 1643 __ Branch(&done_create); |
| 1644 |
| 1645 // Try to create the list from an arguments object. |
| 1646 __ bind(&create_arguments); |
| 1647 __ lw(a2, |
| 1648 FieldMemOperand(a0, JSObject::kHeaderSize + |
| 1649 Heap::kArgumentsLengthIndex * kPointerSize)); |
| 1650 __ lw(t0, FieldMemOperand(a0, JSObject::kElementsOffset)); |
| 1651 __ lw(at, FieldMemOperand(t0, FixedArray::kLengthOffset)); |
| 1652 __ Branch(&create_runtime, ne, a2, Operand(at)); |
| 1653 __ SmiUntag(a2); |
| 1654 __ mov(a0, t0); |
| 1655 __ Branch(&done_create); |
| 1656 |
| 1657 // Try to create the list from a JSArray object. |
| 1658 __ bind(&create_array); |
| 1659 __ lw(a2, FieldMemOperand(a2, Map::kBitField2Offset)); |
| 1660 __ DecodeField<Map::ElementsKindBits>(a2); |
| 1661 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); |
| 1662 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); |
| 1663 STATIC_ASSERT(FAST_ELEMENTS == 2); |
| 1664 __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS)); |
| 1665 __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS)); |
| 1666 __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset)); |
| 1667 __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset)); |
| 1668 __ SmiUntag(a2); |
| 1669 |
| 1670 __ bind(&done_create); |
| 1671 } |
| 1672 |
| 1673 // Check for stack overflow. |
| 1674 { |
| 1675 // Check the stack for overflow. We are not trying to catch interruptions |
| 1676 // (i.e. debug break and preemption) here, so check the "real stack limit". |
| 1677 Label done; |
| 1678 __ LoadRoot(t0, Heap::kRealStackLimitRootIndex); |
| 1679 // Make ip the space we have left. The stack might already be overflowed |
| 1680 // here which will cause ip to become negative. |
| 1681 __ Subu(t0, sp, t0); |
| 1682 // Check if the arguments will overflow the stack. |
| 1683 __ sll(at, a2, kPointerSizeLog2); |
| 1684 __ Branch(&done, gt, t0, Operand(at)); // Signed comparison. |
| 1685 __ TailCallRuntime(Runtime::kThrowStackOverflow, 1, 1); |
| 1686 __ bind(&done); |
| 1687 } |
| 1688 |
| 1689 // ----------- S t a t e ------------- |
| 1690 // -- a1 : target |
| 1691 // -- a0 : args (a FixedArray built from argumentsList) |
| 1692 // -- a2 : len (number of elements to push from args) |
| 1693 // -- a3 : new.target (checked to be constructor or undefined) |
| 1694 // -- sp[0] : thisArgument |
| 1695 // ----------------------------------- |
| 1696 |
| 1697 // Push arguments onto the stack (thisArgument is already on the stack). |
| 1698 { |
| 1699 __ mov(t0, zero_reg); |
| 1700 Label done, loop; |
| 1701 __ bind(&loop); |
| 1702 __ Branch(&done, eq, t0, Operand(a2)); |
| 1703 __ sll(at, t0, kPointerSizeLog2); |
| 1704 __ Addu(at, a0, at); |
| 1705 __ lw(at, FieldMemOperand(at, FixedArray::kHeaderSize)); |
| 1706 __ Push(at); |
| 1707 __ Addu(t0, t0, Operand(1)); |
| 1708 __ Branch(&loop); |
| 1709 __ bind(&done); |
| 1710 __ Move(a0, t0); |
| 1711 } |
| 1712 |
| 1713 // Dispatch to Call or Construct depending on whether new.target is undefined. |
| 1714 { |
| 1715 Label construct; |
| 1716 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 1717 __ Branch(&construct, ne, a3, Operand(at)); |
| 1718 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 1719 __ bind(&construct); |
| 1720 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 1721 } |
| 1722 } |
| 1723 |
| 1724 |
| 1725 // static |
1573 void Builtins::Generate_CallFunction(MacroAssembler* masm, | 1726 void Builtins::Generate_CallFunction(MacroAssembler* masm, |
1574 ConvertReceiverMode mode) { | 1727 ConvertReceiverMode mode) { |
1575 // ----------- S t a t e ------------- | 1728 // ----------- S t a t e ------------- |
1576 // -- a0 : the number of arguments (not including the receiver) | 1729 // -- a0 : the number of arguments (not including the receiver) |
1577 // -- a1 : the function to call (checked to be a JSFunction) | 1730 // -- a1 : the function to call (checked to be a JSFunction) |
1578 // ----------------------------------- | 1731 // ----------------------------------- |
1579 __ AssertFunction(a1); | 1732 __ AssertFunction(a1); |
1580 | 1733 |
1581 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) | 1734 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) |
1582 // Check that the function is not a "classConstructor". | 1735 // Check that the function is not a "classConstructor". |
(...skipping 390 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1973 } | 2126 } |
1974 } | 2127 } |
1975 | 2128 |
1976 | 2129 |
1977 #undef __ | 2130 #undef __ |
1978 | 2131 |
1979 } // namespace internal | 2132 } // namespace internal |
1980 } // namespace v8 | 2133 } // namespace v8 |
1981 | 2134 |
1982 #endif // V8_TARGET_ARCH_MIPS | 2135 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |