Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/mips/builtins-mips.cc

Issue 1533803002: Revert of [es6] Correct Function.prototype.apply, Reflect.construct and Reflect.apply. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/js/runtime.js ('k') | src/mips64/builtins-mips64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS 5 #if V8_TARGET_ARCH_MIPS
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/debug/debug.h" 8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 616 matching lines...) Expand 10 before | Expand all | Expand 10 after
627 627
628 // Clobbers a2; preserves all other registers. 628 // Clobbers a2; preserves all other registers.
629 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, 629 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
630 IsTagged argc_is_tagged) { 630 IsTagged argc_is_tagged) {
631 // Check the stack for overflow. We are not trying to catch 631 // Check the stack for overflow. We are not trying to catch
632 // interruptions (e.g. debug break and preemption) here, so the "real stack 632 // interruptions (e.g. debug break and preemption) here, so the "real stack
633 // limit" is checked. 633 // limit" is checked.
634 Label okay; 634 Label okay;
635 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); 635 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
636 // Make a2 the space we have left. The stack might already be overflowed 636 // Make a2 the space we have left. The stack might already be overflowed
637 // here which will cause a2 to become negative. 637 // here which will cause r2 to become negative.
638 __ Subu(a2, sp, a2); 638 __ Subu(a2, sp, a2);
639 // Check if the arguments will overflow the stack. 639 // Check if the arguments will overflow the stack.
640 if (argc_is_tagged == kArgcIsSmiTagged) { 640 if (argc_is_tagged == kArgcIsSmiTagged) {
641 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); 641 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
642 } else { 642 } else {
643 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); 643 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
644 __ sll(t3, argc, kPointerSizeLog2); 644 __ sll(t3, argc, kPointerSizeLog2);
645 } 645 }
646 // Signed comparison. 646 // Signed comparison.
647 __ Branch(&okay, gt, a2, Operand(t3)); 647 __ Branch(&okay, gt, a2, Operand(t3));
(...skipping 683 matching lines...) Expand 10 before | Expand all | Expand 10 after
1331 // (which is a copy of the last argument). 1331 // (which is a copy of the last argument).
1332 __ Subu(a0, a0, Operand(1)); 1332 __ Subu(a0, a0, Operand(1));
1333 __ Pop(); 1333 __ Pop();
1334 } 1334 }
1335 1335
1336 // 4. Call the callable. 1336 // 4. Call the callable.
1337 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1337 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1338 } 1338 }
1339 1339
1340 1340
1341 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1342 const int vectorOffset,
1343 const int argumentsOffset,
1344 const int indexOffset,
1345 const int limitOffset) {
1346 Label entry, loop;
1347 Register receiver = LoadDescriptor::ReceiverRegister();
1348 Register key = LoadDescriptor::NameRegister();
1349 Register slot = LoadDescriptor::SlotRegister();
1350 Register vector = LoadWithVectorDescriptor::VectorRegister();
1351
1352 __ lw(key, MemOperand(fp, indexOffset));
1353 __ Branch(&entry);
1354
1355 // Load the current argument from the arguments array.
1356 __ bind(&loop);
1357 __ lw(receiver, MemOperand(fp, argumentsOffset));
1358
1359 // Use inline caching to speed up access to arguments.
1360 int slot_index = TypeFeedbackVector::PushAppliedArgumentsIndex();
1361 __ li(slot, Operand(Smi::FromInt(slot_index)));
1362 __ lw(vector, MemOperand(fp, vectorOffset));
1363 Handle<Code> ic =
1364 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1365 __ Call(ic, RelocInfo::CODE_TARGET);
1366
1367 __ push(v0);
1368
1369 // Use inline caching to access the arguments.
1370 __ lw(key, MemOperand(fp, indexOffset));
1371 __ Addu(key, key, Operand(1 << kSmiTagSize));
1372 __ sw(key, MemOperand(fp, indexOffset));
1373
1374 // Test if the copy loop has finished copying all the elements from the
1375 // arguments object.
1376 __ bind(&entry);
1377 __ lw(a1, MemOperand(fp, limitOffset));
1378 __ Branch(&loop, ne, key, Operand(a1));
1379
1380 // On exit, the pushed arguments count is in a0, untagged
1381 __ mov(a0, key);
1382 __ SmiUntag(a0);
1383 }
1384
1385
1386 // Used by FunctionApply and ReflectApply
1387 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1388 const int kFormalParameters = targetIsArgument ? 3 : 2;
1389 const int kStackSize = kFormalParameters + 1;
1390
1391 {
1392 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1393 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1394 const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1395 const int kFunctionOffset = kReceiverOffset + kPointerSize;
1396 const int kVectorOffset =
1397 InternalFrameConstants::kCodeOffset - 1 * kPointerSize;
1398
1399 // Push the vector.
1400 __ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1401 __ lw(a1, FieldMemOperand(a1, SharedFunctionInfo::kFeedbackVectorOffset));
1402 __ Push(a1);
1403
1404 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1405 __ lw(a1, MemOperand(fp, kArgumentsOffset)); // Get the args array.
1406 __ Push(a0, a1);
1407 // Returns (in v0) number of arguments to copy to stack as Smi.
1408 if (targetIsArgument) {
1409 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX,
1410 CALL_FUNCTION);
1411 } else {
1412 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION);
1413 }
1414
1415 // Returns the result in v0.
1416 Generate_CheckStackOverflow(masm, v0, kArgcIsSmiTagged);
1417
1418 // Push current limit and index.
1419 const int kIndexOffset = kVectorOffset - (2 * kPointerSize);
1420 const int kLimitOffset = kVectorOffset - (1 * kPointerSize);
1421 __ mov(a1, zero_reg);
1422 __ lw(a2, MemOperand(fp, kReceiverOffset));
1423 __ Push(v0, a1, a2); // limit, initial index and receiver.
1424
1425 // Copy all arguments from the array to the stack.
1426 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset,
1427 kIndexOffset, kLimitOffset);
1428
1429 // Call the callable.
1430 // TODO(bmeurer): This should be a tail call according to ES6.
1431 __ lw(a1, MemOperand(fp, kFunctionOffset));
1432 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1433
1434 // Tear down the internal frame and remove function, receiver and args.
1435 }
1436
1437 __ Ret(USE_DELAY_SLOT);
1438 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot.
1439 }
1440
1441
1442 static void Generate_ConstructHelper(MacroAssembler* masm) {
1443 const int kFormalParameters = 3;
1444 const int kStackSize = kFormalParameters + 1;
1445
1446 {
1447 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1448 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1449 const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1450 const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1451 const int kVectorOffset =
1452 InternalFrameConstants::kCodeOffset - 1 * kPointerSize;
1453
1454 // Push the vector.
1455 __ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1456 __ lw(a1, FieldMemOperand(a1, SharedFunctionInfo::kFeedbackVectorOffset));
1457 __ Push(a1);
1458
1459 // If newTarget is not supplied, set it to constructor
1460 Label validate_arguments;
1461 __ lw(a0, MemOperand(fp, kNewTargetOffset));
1462 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1463 __ Branch(&validate_arguments, ne, a0, Operand(at));
1464 __ lw(a0, MemOperand(fp, kFunctionOffset));
1465 __ sw(a0, MemOperand(fp, kNewTargetOffset));
1466
1467 // Validate arguments
1468 __ bind(&validate_arguments);
1469 __ lw(a0, MemOperand(fp, kFunctionOffset)); // get the function
1470 __ push(a0);
1471 __ lw(a0, MemOperand(fp, kArgumentsOffset)); // get the args array
1472 __ push(a0);
1473 __ lw(a0, MemOperand(fp, kNewTargetOffset)); // get the new.target
1474 __ push(a0);
1475 // Returns argument count in v0.
1476 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX,
1477 CALL_FUNCTION);
1478
1479 // Returns result in v0.
1480 Generate_CheckStackOverflow(masm, v0, kArgcIsSmiTagged);
1481
1482 // Push current limit and index.
1483 const int kIndexOffset = kVectorOffset - (2 * kPointerSize);
1484 const int kLimitOffset = kVectorOffset - (1 * kPointerSize);
1485 __ push(v0); // limit
1486 __ mov(a1, zero_reg); // initial index
1487 __ push(a1);
1488 // Push the constructor function as callee.
1489 __ lw(a0, MemOperand(fp, kFunctionOffset));
1490 __ push(a0);
1491
1492 // Copy all arguments from the array to the stack.
1493 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset,
1494 kIndexOffset, kLimitOffset);
1495
1496 // Use undefined feedback vector
1497 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1498 __ lw(a1, MemOperand(fp, kFunctionOffset));
1499 __ lw(a3, MemOperand(fp, kNewTargetOffset));
1500
1501 // Call the function.
1502 __ Call(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1503
1504 // Leave internal frame.
1505 }
1506 __ jr(ra);
1507 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot.
1508 }
1509
1510
1341 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 1511 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1342 // ----------- S t a t e ------------- 1512 Generate_ApplyHelper(masm, false);
1343 // -- a0 : argc
1344 // -- sp[0] : argArray
1345 // -- sp[4] : thisArg
1346 // -- sp[8] : receiver
1347 // -----------------------------------
1348
1349 // 1. Load receiver into a1, argArray into a0 (if present), remove all
1350 // arguments from the stack (including the receiver), and push thisArg (if
1351 // present) instead.
1352 {
1353 Label no_arg;
1354 Register scratch = t0;
1355 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1356 __ mov(a3, a2);
1357 __ sll(scratch, a0, kPointerSizeLog2);
1358 __ Addu(a0, sp, Operand(scratch));
1359 __ lw(a1, MemOperand(a0)); // receiver
1360 __ Subu(a0, a0, Operand(kPointerSize));
1361 __ Branch(&no_arg, lt, a0, Operand(sp));
1362 __ lw(a2, MemOperand(a0)); // thisArg
1363 __ Subu(a0, a0, Operand(kPointerSize));
1364 __ Branch(&no_arg, lt, a0, Operand(sp));
1365 __ lw(a3, MemOperand(a0)); // argArray
1366 __ bind(&no_arg);
1367 __ Addu(sp, sp, Operand(scratch));
1368 __ sw(a2, MemOperand(sp));
1369 __ mov(a0, a3);
1370 }
1371
1372 // ----------- S t a t e -------------
1373 // -- a0 : argArray
1374 // -- a1 : receiver
1375 // -- sp[0] : thisArg
1376 // -----------------------------------
1377
1378 // 2. Make sure the receiver is actually callable.
1379 Label receiver_not_callable;
1380 __ JumpIfSmi(a1, &receiver_not_callable);
1381 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1382 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1383 __ And(t0, t0, Operand(1 << Map::kIsCallable));
1384 __ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg));
1385
1386 // 3. Tail call with no arguments if argArray is null or undefined.
1387 Label no_arguments;
1388 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
1389 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
1390
1391 // 4a. Apply the receiver to the given argArray (passing undefined for
1392 // new.target).
1393 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1394 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1395
1396 // 4b. The argArray is either null or undefined, so we tail call without any
1397 // arguments to the receiver.
1398 __ bind(&no_arguments);
1399 {
1400 __ mov(a0, zero_reg);
1401 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1402 }
1403
1404 // 4c. The receiver is not callable, throw an appropriate TypeError.
1405 __ bind(&receiver_not_callable);
1406 {
1407 __ sw(a1, MemOperand(sp));
1408 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1, 1);
1409 }
1410 } 1513 }
1411 1514
1412 1515
1413 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { 1516 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1414 // ----------- S t a t e ------------- 1517 Generate_ApplyHelper(masm, true);
1415 // -- a0 : argc
1416 // -- sp[0] : argumentsList
1417 // -- sp[4] : thisArgument
1418 // -- sp[8] : target
1419 // -- sp[12] : receiver
1420 // -----------------------------------
1421
1422 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1423 // remove all arguments from the stack (including the receiver), and push
1424 // thisArgument (if present) instead.
1425 {
1426 Label no_arg;
1427 Register scratch = t0;
1428 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1429 __ mov(a2, a1);
1430 __ mov(a3, a1);
1431 __ sll(scratch, a0, kPointerSizeLog2);
1432 __ mov(a0, scratch);
1433 __ Subu(a0, a0, Operand(kPointerSize));
1434 __ Branch(&no_arg, lt, a0, Operand(zero_reg));
1435 __ Addu(a0, sp, Operand(a0));
1436 __ lw(a1, MemOperand(a0)); // target
1437 __ Subu(a0, a0, Operand(kPointerSize));
1438 __ Branch(&no_arg, lt, a0, Operand(sp));
1439 __ lw(a2, MemOperand(a0)); // thisArgument
1440 __ Subu(a0, a0, Operand(kPointerSize));
1441 __ Branch(&no_arg, lt, a0, Operand(sp));
1442 __ lw(a3, MemOperand(a0)); // argumentsList
1443 __ bind(&no_arg);
1444 __ Addu(sp, sp, Operand(scratch));
1445 __ sw(a2, MemOperand(sp));
1446 __ mov(a0, a3);
1447 }
1448
1449 // ----------- S t a t e -------------
1450 // -- a0 : argumentsList
1451 // -- a1 : target
1452 // -- sp[0] : thisArgument
1453 // -----------------------------------
1454
1455 // 2. Make sure the target is actually callable.
1456 Label target_not_callable;
1457 __ JumpIfSmi(a1, &target_not_callable);
1458 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1459 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1460 __ And(t0, t0, Operand(1 << Map::kIsCallable));
1461 __ Branch(&target_not_callable, eq, t0, Operand(zero_reg));
1462
1463 // 3a. Apply the target to the given argumentsList (passing undefined for
1464 // new.target).
1465 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1466 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1467
1468 // 3b. The target is not callable, throw an appropriate TypeError.
1469 __ bind(&target_not_callable);
1470 {
1471 __ sw(a1, MemOperand(sp));
1472 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1, 1);
1473 }
1474 } 1518 }
1475 1519
1476 1520
1477 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { 1521 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1478 // ----------- S t a t e ------------- 1522 Generate_ConstructHelper(masm);
1479 // -- a0 : argc
1480 // -- sp[0] : new.target (optional)
1481 // -- sp[4] : argumentsList
1482 // -- sp[8] : target
1483 // -- sp[12] : receiver
1484 // -----------------------------------
1485
1486 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1487 // new.target into a3 (if present, otherwise use target), remove all
1488 // arguments from the stack (including the receiver), and push thisArgument
1489 // (if present) instead.
1490 {
1491 Label no_arg;
1492 Register scratch = t0;
1493 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1494 __ mov(a2, a1);
1495 __ sll(scratch, a0, kPointerSizeLog2);
1496 __ Addu(a0, sp, Operand(scratch));
1497 __ sw(a2, MemOperand(a0)); // receiver
1498 __ Subu(a0, a0, Operand(kPointerSize));
1499 __ Branch(&no_arg, lt, a0, Operand(sp));
1500 __ lw(a1, MemOperand(a0)); // target
1501 __ mov(a3, a1); // new.target defaults to target
1502 __ Subu(a0, a0, Operand(kPointerSize));
1503 __ Branch(&no_arg, lt, a0, Operand(sp));
1504 __ lw(a2, MemOperand(a0)); // argumentsList
1505 __ Subu(a0, a0, Operand(kPointerSize));
1506 __ Branch(&no_arg, lt, a0, Operand(sp));
1507 __ lw(a3, MemOperand(a0)); // new.target
1508 __ bind(&no_arg);
1509 __ Addu(sp, sp, Operand(scratch));
1510 __ mov(a0, a2);
1511 }
1512
1513 // ----------- S t a t e -------------
1514 // -- a0 : argumentsList
1515 // -- a3 : new.target
1516 // -- a1 : target
1517 // -- sp[0] : receiver (undefined)
1518 // -----------------------------------
1519
1520 // 2. Make sure the target is actually a constructor.
1521 Label target_not_constructor;
1522 __ JumpIfSmi(a1, &target_not_constructor);
1523 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1524 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1525 __ And(t0, t0, Operand(1 << Map::kIsConstructor));
1526 __ Branch(&target_not_constructor, eq, t0, Operand(zero_reg));
1527
1528 // 3. Make sure the target is actually a constructor.
1529 Label new_target_not_constructor;
1530 __ JumpIfSmi(a3, &new_target_not_constructor);
1531 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset));
1532 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1533 __ And(t0, t0, Operand(1 << Map::kIsConstructor));
1534 __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg));
1535
1536 // 4a. Construct the target with the given new.target and argumentsList.
1537 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1538
1539 // 4b. The target is not a constructor, throw an appropriate TypeError.
1540 __ bind(&target_not_constructor);
1541 {
1542 __ sw(a1, MemOperand(sp));
1543 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1, 1);
1544 }
1545
1546 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1547 __ bind(&new_target_not_constructor);
1548 {
1549 __ sw(a3, MemOperand(sp));
1550 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1, 1);
1551 }
1552 } 1523 }
1553 1524
1554 1525
1555 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, 1526 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1556 Label* stack_overflow) { 1527 Label* stack_overflow) {
1557 // ----------- S t a t e ------------- 1528 // ----------- S t a t e -------------
1558 // -- a0 : actual number of arguments 1529 // -- a0 : actual number of arguments
1559 // -- a1 : function (passed through to callee) 1530 // -- a1 : function (passed through to callee)
1560 // -- a2 : expected number of arguments 1531 // -- a2 : expected number of arguments
1561 // -- a3 : new target (passed through to callee) 1532 // -- a3 : new target (passed through to callee)
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
1593 kPointerSize))); 1564 kPointerSize)));
1594 __ mov(sp, fp); 1565 __ mov(sp, fp);
1595 __ MultiPop(fp.bit() | ra.bit()); 1566 __ MultiPop(fp.bit() | ra.bit());
1596 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize); 1567 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1597 __ Addu(sp, sp, t0); 1568 __ Addu(sp, sp, t0);
1598 // Adjust for the receiver. 1569 // Adjust for the receiver.
1599 __ Addu(sp, sp, Operand(kPointerSize)); 1570 __ Addu(sp, sp, Operand(kPointerSize));
1600 } 1571 }
1601 1572
1602 1573
1603 // static
1604 void Builtins::Generate_Apply(MacroAssembler* masm) {
1605 // ----------- S t a t e -------------
1606 // -- a0 : argumentsList
1607 // -- a1 : target
1608 // -- a3 : new.target (checked to be constructor or undefined)
1609 // -- sp[0] : thisArgument
1610 // -----------------------------------
1611
1612 // Create the list of arguments from the array-like argumentsList.
1613 {
1614 Label create_arguments, create_array, create_runtime, done_create;
1615 __ JumpIfSmi(a0, &create_runtime);
1616
1617 // Load the map of argumentsList into a2.
1618 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
1619
1620 // Load native context into t0.
1621 __ lw(t0, NativeContextMemOperand());
1622
1623 // Check if argumentsList is an (unmodified) arguments object.
1624 __ lw(at, ContextMemOperand(t0, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1625 __ Branch(&create_arguments, eq, a2, Operand(at));
1626 __ lw(at, ContextMemOperand(t0, Context::STRICT_ARGUMENTS_MAP_INDEX));
1627 __ Branch(&create_arguments, eq, a2, Operand(at));
1628
1629 // Check if argumentsList is a fast JSArray.
1630 __ lw(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
1631 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
1632 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
1633
1634 // Ask the runtime to create the list (actually a FixedArray).
1635 __ bind(&create_runtime);
1636 {
1637 FrameScope scope(masm, StackFrame::INTERNAL);
1638 __ Push(a1, a3, a0);
1639 __ CallRuntime(Runtime::kCreateListFromArrayLike, 1);
1640 __ mov(a0, v0);
1641 __ Pop(a1, a3);
1642 __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
1643 __ SmiUntag(a2);
1644 }
1645 __ Branch(&done_create);
1646
1647 // Try to create the list from an arguments object.
1648 __ bind(&create_arguments);
1649 __ lw(a2,
1650 FieldMemOperand(a0, JSObject::kHeaderSize +
1651 Heap::kArgumentsLengthIndex * kPointerSize));
1652 __ lw(t0, FieldMemOperand(a0, JSObject::kElementsOffset));
1653 __ lw(at, FieldMemOperand(t0, FixedArray::kLengthOffset));
1654 __ Branch(&create_runtime, ne, a2, Operand(at));
1655 __ SmiUntag(a2);
1656 __ mov(a0, t0);
1657 __ Branch(&done_create);
1658
1659 // Try to create the list from a JSArray object.
1660 __ bind(&create_array);
1661 __ lw(a2, FieldMemOperand(a2, Map::kBitField2Offset));
1662 __ DecodeField<Map::ElementsKindBits>(a2);
1663 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1664 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1665 STATIC_ASSERT(FAST_ELEMENTS == 2);
1666 __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
1667 __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
1668 __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
1669 __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
1670 __ SmiUntag(a2);
1671
1672 __ bind(&done_create);
1673 }
1674
1675 // Check for stack overflow.
1676 {
1677 // Check the stack for overflow. We are not trying to catch interruptions
1678 // (i.e. debug break and preemption) here, so check the "real stack limit".
1679 Label done;
1680 __ LoadRoot(t0, Heap::kRealStackLimitRootIndex);
1681 // Make ip the space we have left. The stack might already be overflowed
1682 // here which will cause ip to become negative.
1683 __ Subu(t0, sp, t0);
1684 // Check if the arguments will overflow the stack.
1685 __ sll(at, a2, kPointerSizeLog2);
1686 __ Branch(&done, gt, t0, Operand(at)); // Signed comparison.
1687 __ TailCallRuntime(Runtime::kThrowStackOverflow, 1, 1);
1688 __ bind(&done);
1689 }
1690
1691 // ----------- S t a t e -------------
1692 // -- a1 : target
1693 // -- a0 : args (a FixedArray built from argumentsList)
1694 // -- a2 : len (number of elements to push from args)
1695 // -- a3 : new.target (checked to be constructor or undefined)
1696 // -- sp[0] : thisArgument
1697 // -----------------------------------
1698
1699 // Push arguments onto the stack (thisArgument is already on the stack).
1700 {
1701 __ mov(t0, zero_reg);
1702 Label done, loop;
1703 __ bind(&loop);
1704 __ Branch(&done, eq, t0, Operand(a2));
1705 __ sll(at, t0, kPointerSizeLog2);
1706 __ Addu(at, a0, at);
1707 __ lw(at, FieldMemOperand(at, FixedArray::kHeaderSize));
1708 __ Push(at);
1709 __ Addu(t0, t0, Operand(1));
1710 __ Branch(&loop);
1711 __ bind(&done);
1712 __ Move(a0, t0);
1713 }
1714
1715 // Dispatch to Call or Construct depending on whether new.target is undefined.
1716 {
1717 Label construct;
1718 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1719 __ Branch(&construct, ne, a3, Operand(at));
1720 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1721 __ bind(&construct);
1722 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1723 }
1724 }
1725
1726
1727 // static 1574 // static
1728 void Builtins::Generate_CallFunction(MacroAssembler* masm, 1575 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1729 ConvertReceiverMode mode) { 1576 ConvertReceiverMode mode) {
1730 // ----------- S t a t e ------------- 1577 // ----------- S t a t e -------------
1731 // -- a0 : the number of arguments (not including the receiver) 1578 // -- a0 : the number of arguments (not including the receiver)
1732 // -- a1 : the function to call (checked to be a JSFunction) 1579 // -- a1 : the function to call (checked to be a JSFunction)
1733 // ----------------------------------- 1580 // -----------------------------------
1734 __ AssertFunction(a1); 1581 __ AssertFunction(a1);
1735 1582
1736 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) 1583 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after
2128 } 1975 }
2129 } 1976 }
2130 1977
2131 1978
2132 #undef __ 1979 #undef __
2133 1980
2134 } // namespace internal 1981 } // namespace internal
2135 } // namespace v8 1982 } // namespace v8
2136 1983
2137 #endif // V8_TARGET_ARCH_MIPS 1984 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/js/runtime.js ('k') | src/mips64/builtins-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698