Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(56)

Side by Side Diff: src/mips64/builtins-mips64.cc

Issue 1526253002: MIPS: [es6] Correct Function.prototype.apply, Reflect.construct and Reflect.apply. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: minor fixes Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/builtins-mips.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS64 5 #if V8_TARGET_ARCH_MIPS64
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/debug/debug.h" 8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 1309 matching lines...) Expand 10 before | Expand all | Expand 10 after
1320 // (which is a copy of the last argument). 1320 // (which is a copy of the last argument).
1321 __ Dsubu(a0, a0, Operand(1)); 1321 __ Dsubu(a0, a0, Operand(1));
1322 __ Pop(); 1322 __ Pop();
1323 } 1323 }
1324 1324
1325 // 4. Call the callable. 1325 // 4. Call the callable.
1326 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1326 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1327 } 1327 }
1328 1328
1329 1329
1330 static void Generate_PushAppliedArguments(MacroAssembler* masm, 1330 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1331 const int vectorOffset, 1331 // ----------- S t a t e -------------
1332 const int argumentsOffset, 1332 // -- a0 : argc
1333 const int indexOffset, 1333 // -- sp[0] : argArray
1334 const int limitOffset) { 1334 // -- sp[4] : thisArg
1335 Label entry, loop; 1335 // -- sp[8] : receiver
1336 Register receiver = LoadDescriptor::ReceiverRegister(); 1336 // -----------------------------------
1337 Register key = LoadDescriptor::NameRegister(); 1337
1338 Register slot = LoadDescriptor::SlotRegister(); 1338 // 1. Load receiver into a1, argArray into a0 (if present), remove all
1339 Register vector = LoadWithVectorDescriptor::VectorRegister(); 1339 // arguments from the stack (including the receiver), and push thisArg (if
1340 1340 // present) instead.
1341 __ ld(key, MemOperand(fp, indexOffset)); 1341 {
1342 __ Branch(&entry); 1342 Label no_arg;
1343 1343 Register scratch = a4;
1344 // Load the current argument from the arguments array.
1345 __ bind(&loop);
1346 __ ld(receiver, MemOperand(fp, argumentsOffset));
1347
1348 // Use inline caching to speed up access to arguments.
1349 int slot_index = TypeFeedbackVector::PushAppliedArgumentsIndex();
1350 __ li(slot, Operand(Smi::FromInt(slot_index)));
1351 __ ld(vector, MemOperand(fp, vectorOffset));
1352 Handle<Code> ic =
1353 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1354 __ Call(ic, RelocInfo::CODE_TARGET);
1355
1356 __ push(v0);
1357
1358 // Use inline caching to access the arguments.
1359 __ ld(key, MemOperand(fp, indexOffset));
1360 __ Daddu(key, key, Operand(Smi::FromInt(1)));
1361 __ sd(key, MemOperand(fp, indexOffset));
1362
1363 // Test if the copy loop has finished copying all the elements from the
1364 // arguments object.
1365 __ bind(&entry);
1366 __ ld(a1, MemOperand(fp, limitOffset));
1367 __ Branch(&loop, ne, key, Operand(a1));
1368
1369 // On exit, the pushed arguments count is in a0, untagged
1370 __ mov(a0, key);
1371 __ SmiUntag(a0);
1372 }
1373
1374
1375 // Used by FunctionApply and ReflectApply
1376 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1377 const int kFormalParameters = targetIsArgument ? 3 : 2;
1378 const int kStackSize = kFormalParameters + 1;
1379
1380 {
1381 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1382 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1383 const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1384 const int kFunctionOffset = kReceiverOffset + kPointerSize;
1385 const int kVectorOffset =
1386 InternalFrameConstants::kCodeOffset - 1 * kPointerSize;
1387
1388 // Push the vector.
1389 __ ld(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1390 __ ld(a1, FieldMemOperand(a1, SharedFunctionInfo::kFeedbackVectorOffset));
1391 __ Push(a1);
1392
1393 __ ld(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1394 __ ld(a1, MemOperand(fp, kArgumentsOffset)); // Get the args array.
1395 __ Push(a0, a1);
1396
1397 // Returns (in v0) number of arguments to copy to stack as Smi.
1398 if (targetIsArgument) {
1399 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX,
1400 CALL_FUNCTION);
1401 } else {
1402 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION);
1403 }
1404
1405 // Returns the result in v0.
1406 Generate_CheckStackOverflow(masm, v0, kArgcIsSmiTagged);
1407
1408 // Push current limit and index.
1409 const int kIndexOffset = kVectorOffset - (2 * kPointerSize);
1410 const int kLimitOffset = kVectorOffset - (1 * kPointerSize);
1411 __ mov(a1, zero_reg);
1412 __ ld(a2, MemOperand(fp, kReceiverOffset));
1413 __ Push(v0, a1, a2); // limit, initial index and receiver.
1414
1415 // Copy all arguments from the array to the stack.
1416 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset,
1417 kIndexOffset, kLimitOffset);
1418
1419 // Call the callable.
1420 // TODO(bmeurer): This should be a tail call according to ES6.
1421 __ ld(a1, MemOperand(fp, kFunctionOffset));
1422 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1423
1424 // Tear down the internal frame and remove function, receiver and args.
1425 }
1426
1427 __ Ret(USE_DELAY_SLOT);
1428 __ Daddu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot.
1429 }
1430
1431
1432 static void Generate_ConstructHelper(MacroAssembler* masm) {
1433 const int kFormalParameters = 3;
1434 const int kStackSize = kFormalParameters + 1;
1435
1436 {
1437 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1438 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1439 const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1440 const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1441 const int kVectorOffset =
1442 InternalFrameConstants::kCodeOffset - 1 * kPointerSize;
1443
1444 // Push the vector.
1445 __ ld(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1446 __ ld(a1, FieldMemOperand(a1, SharedFunctionInfo::kFeedbackVectorOffset));
1447 __ Push(a1);
1448
1449 // If newTarget is not supplied, set it to constructor
1450 Label validate_arguments;
1451 __ ld(a0, MemOperand(fp, kNewTargetOffset));
1452 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1453 __ Branch(&validate_arguments, ne, a0, Operand(at));
1454 __ ld(a0, MemOperand(fp, kFunctionOffset));
1455 __ sd(a0, MemOperand(fp, kNewTargetOffset));
1456
1457 // Validate arguments
1458 __ bind(&validate_arguments);
1459 __ ld(a0, MemOperand(fp, kFunctionOffset)); // get the function
1460 __ push(a0);
1461 __ ld(a0, MemOperand(fp, kArgumentsOffset)); // get the args array
1462 __ push(a0);
1463 __ ld(a0, MemOperand(fp, kNewTargetOffset)); // get the new.target
1464 __ push(a0);
1465 // Returns argument count in v0.
1466 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX,
1467 CALL_FUNCTION);
1468
1469 // Returns result in v0.
1470 Generate_CheckStackOverflow(masm, v0, kArgcIsSmiTagged);
1471
1472 // Push current limit and index.
1473 const int kIndexOffset = kVectorOffset - (2 * kPointerSize);
1474 const int kLimitOffset = kVectorOffset - (1 * kPointerSize);
1475 __ push(v0); // limit
1476 __ mov(a1, zero_reg); // initial index
1477 __ push(a1);
1478 // Push the constructor function as callee.
1479 __ ld(a0, MemOperand(fp, kFunctionOffset));
1480 __ push(a0);
1481
1482 // Copy all arguments from the array to the stack.
1483 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset,
1484 kIndexOffset, kLimitOffset);
1485
1486 // Use undefined feedback vector
1487 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 1344 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1488 __ ld(a1, MemOperand(fp, kFunctionOffset)); 1345 __ mov(a3, a2);
1489 __ ld(a3, MemOperand(fp, kNewTargetOffset)); 1346 __ dsll(scratch, a0, kPointerSizeLog2);
1490 1347 __ Daddu(a0, sp, Operand(scratch));
1491 // Call the function. 1348 __ ld(a1, MemOperand(a0)); // receiver
1492 __ Call(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1349 __ Dsubu(a0, a0, Operand(kPointerSize));
1493 1350 __ Branch(&no_arg, lt, a0, Operand(sp));
1494 // Leave internal frame. 1351 __ ld(a2, MemOperand(a0)); // thisArg
1495 } 1352 __ Dsubu(a0, a0, Operand(kPointerSize));
1496 __ jr(ra); 1353 __ Branch(&no_arg, lt, a0, Operand(sp));
1497 __ Daddu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot. 1354 __ ld(a3, MemOperand(a0)); // argArray
1498 } 1355 __ bind(&no_arg);
1499 1356 __ Daddu(sp, sp, Operand(scratch));
1500 1357 __ sd(a2, MemOperand(sp));
1501 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 1358 __ mov(a0, a3);
1502 Generate_ApplyHelper(masm, false); 1359 }
1360
1361 // ----------- S t a t e -------------
1362 // -- a0 : argArray
1363 // -- a1 : receiver
1364 // -- sp[0] : thisArg
1365 // -----------------------------------
1366
1367 // 2. Make sure the receiver is actually callable.
1368 Label receiver_not_callable;
1369 __ JumpIfSmi(a1, &receiver_not_callable);
1370 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1371 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1372 __ And(a4, a4, Operand(1 << Map::kIsCallable));
1373 __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg));
1374
1375 // 3. Tail call with no arguments if argArray is null or undefined.
1376 Label no_arguments;
1377 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
1378 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
1379
1380 // 4a. Apply the receiver to the given argArray (passing undefined for
1381 // new.target).
1382 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1383 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1384
1385 // 4b. The argArray is either null or undefined, so we tail call without any
1386 // arguments to the receiver.
1387 __ bind(&no_arguments);
1388 {
1389 __ mov(a0, zero_reg);
1390 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1391 }
1392
1393 // 4c. The receiver is not callable, throw an appropriate TypeError.
1394 __ bind(&receiver_not_callable);
1395 {
1396 __ sd(a1, MemOperand(sp));
1397 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1, 1);
1398 }
1503 } 1399 }
1504 1400
1505 1401
1506 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { 1402 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1507 Generate_ApplyHelper(masm, true); 1403 // ----------- S t a t e -------------
1404 // -- a0 : argc
1405 // -- sp[0] : argumentsList
1406 // -- sp[4] : thisArgument
1407 // -- sp[8] : target
1408 // -- sp[12] : receiver
1409 // -----------------------------------
1410
1411 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1412 // remove all arguments from the stack (including the receiver), and push
1413 // thisArgument (if present) instead.
1414 {
1415 Label no_arg;
1416 Register scratch = a4;
1417 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1418 __ mov(a2, a1);
1419 __ mov(a3, a1);
1420 __ dsll(scratch, a0, kPointerSizeLog2);
1421 __ mov(a0, scratch);
1422 __ Dsubu(a0, a0, Operand(kPointerSize));
1423 __ Branch(&no_arg, lt, a0, Operand(zero_reg));
1424 __ Daddu(a0, sp, Operand(a0));
1425 __ ld(a1, MemOperand(a0)); // target
1426 __ Dsubu(a0, a0, Operand(kPointerSize));
1427 __ Branch(&no_arg, lt, a0, Operand(sp));
1428 __ ld(a2, MemOperand(a0)); // thisArgument
1429 __ Dsubu(a0, a0, Operand(kPointerSize));
1430 __ Branch(&no_arg, lt, a0, Operand(sp));
1431 __ ld(a3, MemOperand(a0)); // argumentsList
1432 __ bind(&no_arg);
1433 __ Daddu(sp, sp, Operand(scratch));
1434 __ sd(a2, MemOperand(sp));
1435 __ mov(a0, a3);
1436 }
1437
1438 // ----------- S t a t e -------------
1439 // -- a0 : argumentsList
1440 // -- a1 : target
1441 // -- sp[0] : thisArgument
1442 // -----------------------------------
1443
1444 // 2. Make sure the target is actually callable.
1445 Label target_not_callable;
1446 __ JumpIfSmi(a1, &target_not_callable);
1447 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1448 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1449 __ And(a4, a4, Operand(1 << Map::kIsCallable));
1450 __ Branch(&target_not_callable, eq, a4, Operand(zero_reg));
1451
1452 // 3a. Apply the target to the given argumentsList (passing undefined for
1453 // new.target).
1454 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1455 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1456
1457 // 3b. The target is not callable, throw an appropriate TypeError.
1458 __ bind(&target_not_callable);
1459 {
1460 __ sd(a1, MemOperand(sp));
1461 __ TailCallRuntime(Runtime::kThrowApplyNonFunction, 1, 1);
1462 }
1508 } 1463 }
1509 1464
1510 1465
1511 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { 1466 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1512 Generate_ConstructHelper(masm); 1467 // ----------- S t a t e -------------
1513 } 1468 // -- a0 : argc
1514 1469 // -- sp[0] : new.target (optional)
1515 1470 // -- sp[4] : argumentsList
1471 // -- sp[8] : target
1472 // -- sp[12] : receiver
1473 // -----------------------------------
1474
1475 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1476 // new.target into a3 (if present, otherwise use target), remove all
1477 // arguments from the stack (including the receiver), and push thisArgument
1478 // (if present) instead.
1479 {
1480 Label no_arg;
1481 Register scratch = a4;
1482 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1483 __ mov(a2, a1);
1484 __ dsll(scratch, a0, kPointerSizeLog2);
1485 __ Daddu(a0, sp, Operand(scratch));
1486 __ sd(a2, MemOperand(a0)); // receiver
1487 __ Dsubu(a0, a0, Operand(kPointerSize));
1488 __ Branch(&no_arg, lt, a0, Operand(sp));
1489 __ ld(a1, MemOperand(a0)); // target
1490 __ mov(a3, a1); // new.target defaults to target
1491 __ Dsubu(a0, a0, Operand(kPointerSize));
1492 __ Branch(&no_arg, lt, a0, Operand(sp));
1493 __ ld(a2, MemOperand(a0)); // argumentsList
1494 __ Dsubu(a0, a0, Operand(kPointerSize));
1495 __ Branch(&no_arg, lt, a0, Operand(sp));
1496 __ ld(a3, MemOperand(a0)); // new.target
1497 __ bind(&no_arg);
1498 __ Daddu(sp, sp, Operand(scratch));
1499 __ mov(a0, a2);
1500 }
1501
1502 // ----------- S t a t e -------------
1503 // -- a0 : argumentsList
1504 // -- a3 : new.target
1505 // -- a1 : target
1506 // -- sp[0] : receiver (undefined)
1507 // -----------------------------------
1508
1509 // 2. Make sure the target is actually a constructor.
1510 Label target_not_constructor;
1511 __ JumpIfSmi(a1, &target_not_constructor);
1512 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1513 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1514 __ And(a4, a4, Operand(1 << Map::kIsConstructor));
1515 __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg));
1516
1517 // 3. Make sure the target is actually a constructor.
1518 Label new_target_not_constructor;
1519 __ JumpIfSmi(a3, &new_target_not_constructor);
1520 __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset));
1521 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1522 __ And(a4, a4, Operand(1 << Map::kIsConstructor));
1523 __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg));
1524
1525 // 4a. Construct the target with the given new.target and argumentsList.
1526 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1527
1528 // 4b. The target is not a constructor, throw an appropriate TypeError.
1529 __ bind(&target_not_constructor);
1530 {
1531 __ sd(a1, MemOperand(sp));
1532 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1, 1);
1533 }
1534
1535 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1536 __ bind(&new_target_not_constructor);
1537 {
1538 __ sd(a3, MemOperand(sp));
1539 __ TailCallRuntime(Runtime::kThrowCalledNonCallable, 1, 1);
1540 }
1541 }
1542
1543
1516 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, 1544 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1517 Label* stack_overflow) { 1545 Label* stack_overflow) {
1518 // ----------- S t a t e ------------- 1546 // ----------- S t a t e -------------
1519 // -- a0 : actual number of arguments 1547 // -- a0 : actual number of arguments
1520 // -- a1 : function (passed through to callee) 1548 // -- a1 : function (passed through to callee)
1521 // -- a2 : expected number of arguments 1549 // -- a2 : expected number of arguments
1522 // -- a3 : new target (passed through to callee) 1550 // -- a3 : new target (passed through to callee)
1523 // ----------------------------------- 1551 // -----------------------------------
1524 // Check the stack for overflow. We are not trying to catch 1552 // Check the stack for overflow. We are not trying to catch
1525 // interruptions (e.g. debug break and preemption) here, so the "real stack 1553 // interruptions (e.g. debug break and preemption) here, so the "real stack
(...skipping 30 matching lines...) Expand all
1556 __ mov(sp, fp); 1584 __ mov(sp, fp);
1557 __ MultiPop(fp.bit() | ra.bit()); 1585 __ MultiPop(fp.bit() | ra.bit());
1558 __ SmiScale(a4, a1, kPointerSizeLog2); 1586 __ SmiScale(a4, a1, kPointerSizeLog2);
1559 __ Daddu(sp, sp, a4); 1587 __ Daddu(sp, sp, a4);
1560 // Adjust for the receiver. 1588 // Adjust for the receiver.
1561 __ Daddu(sp, sp, Operand(kPointerSize)); 1589 __ Daddu(sp, sp, Operand(kPointerSize));
1562 } 1590 }
1563 1591
1564 1592
1565 // static 1593 // static
1594 void Builtins::Generate_Apply(MacroAssembler* masm) {
1595 // ----------- S t a t e -------------
1596 // -- a0 : argumentsList
1597 // -- a1 : target
1598 // -- a3 : new.target (checked to be constructor or undefined)
1599 // -- sp[0] : thisArgument
1600 // -----------------------------------
1601
1602 // Create the list of arguments from the array-like argumentsList.
1603 {
1604 Label create_arguments, create_array, create_runtime, done_create;
1605 __ JumpIfSmi(a0, &create_runtime);
1606
1607 // Load the map of argumentsList into a2.
1608 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
1609
1610 // Load native context into a4.
1611 __ ld(a4, NativeContextMemOperand());
1612
1613 // Check if argumentsList is an (unmodified) arguments object.
1614 __ ld(at, ContextMemOperand(a4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1615 __ Branch(&create_arguments, eq, a2, Operand(at));
1616 __ ld(at, ContextMemOperand(a4, Context::STRICT_ARGUMENTS_MAP_INDEX));
1617 __ Branch(&create_arguments, eq, a2, Operand(at));
1618
1619 // Check if argumentsList is a fast JSArray.
1620 __ ld(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
1621 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
1622 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
1623
1624 // Ask the runtime to create the list (actually a FixedArray).
1625 __ bind(&create_runtime);
1626 {
1627 FrameScope scope(masm, StackFrame::INTERNAL);
1628 __ Push(a1, a3, a0);
1629 __ CallRuntime(Runtime::kCreateListFromArrayLike, 1);
1630 __ mov(a0, v0);
1631 __ Pop(a1, a3);
1632 __ ld(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
1633 __ SmiUntag(a2);
1634 }
1635 __ Branch(&done_create);
1636
1637 // Try to create the list from an arguments object.
1638 __ bind(&create_arguments);
1639 __ ld(a2,
1640 FieldMemOperand(a0, JSObject::kHeaderSize +
1641 Heap::kArgumentsLengthIndex * kPointerSize));
1642 __ ld(a4, FieldMemOperand(a0, JSObject::kElementsOffset));
1643 __ ld(at, FieldMemOperand(a4, FixedArray::kLengthOffset));
1644 __ Branch(&create_runtime, ne, a2, Operand(at));
1645 __ SmiUntag(a2);
1646 __ mov(a0, a4);
1647 __ Branch(&done_create);
1648
1649 // Try to create the list from a JSArray object.
1650 __ bind(&create_array);
1651 __ ld(a2, FieldMemOperand(a2, Map::kBitField2Offset));
1652 __ DecodeField<Map::ElementsKindBits>(a2);
1653 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1654 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1655 STATIC_ASSERT(FAST_ELEMENTS == 2);
1656 __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
1657 __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
1658 __ ld(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
1659 __ ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
1660 __ SmiUntag(a2);
1661
1662 __ bind(&done_create);
1663 }
1664
1665 // Check for stack overflow.
1666 {
1667 // Check the stack for overflow. We are not trying to catch interruptions
1668 // (i.e. debug break and preemption) here, so check the "real stack limit".
1669 Label done;
1670 __ LoadRoot(a4, Heap::kRealStackLimitRootIndex);
1671 // Make ip the space we have left. The stack might already be overflowed
1672 // here which will cause ip to become negative.
1673 __ Dsubu(a4, sp, a4);
1674 // Check if the arguments will overflow the stack.
1675 __ dsll(at, a2, kPointerSizeLog2);
1676 __ Branch(&done, gt, a4, Operand(at)); // Signed comparison.
1677 __ TailCallRuntime(Runtime::kThrowStackOverflow, 1, 1);
1678 __ bind(&done);
1679 }
1680
1681 // ----------- S t a t e -------------
1682 // -- a1 : target
1683 // -- a0 : args (a FixedArray built from argumentsList)
1684 // -- a2 : len (number of elements to push from args)
1685 // -- a3 : new.target (checked to be constructor or undefined)
1686 // -- sp[0] : thisArgument
1687 // -----------------------------------
1688
1689 // Push arguments onto the stack (thisArgument is already on the stack).
1690 {
1691 __ mov(a4, zero_reg);
1692 Label done, loop;
1693 __ bind(&loop);
1694 __ Branch(&done, eq, a4, Operand(a2));
1695 __ dsll(at, a4, kPointerSizeLog2);
1696 __ Daddu(at, a0, at);
1697 __ ld(at, FieldMemOperand(at, FixedArray::kHeaderSize));
1698 __ Push(at);
1699 __ Daddu(a4, a4, Operand(1));
1700 __ Branch(&loop);
1701 __ bind(&done);
1702 __ Move(a0, a4);
1703 }
1704
1705 // Dispatch to Call or Construct depending on whether new.target is undefined.
1706 {
1707 Label construct;
1708 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1709 __ Branch(&construct, ne, a3, Operand(at));
1710 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1711 __ bind(&construct);
1712 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1713 }
1714 }
1715
1716
1717 // static
1566 void Builtins::Generate_CallFunction(MacroAssembler* masm, 1718 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1567 ConvertReceiverMode mode) { 1719 ConvertReceiverMode mode) {
1568 // ----------- S t a t e ------------- 1720 // ----------- S t a t e -------------
1569 // -- a0 : the number of arguments (not including the receiver) 1721 // -- a0 : the number of arguments (not including the receiver)
1570 // -- a1 : the function to call (checked to be a JSFunction) 1722 // -- a1 : the function to call (checked to be a JSFunction)
1571 // ----------------------------------- 1723 // -----------------------------------
1572 __ AssertFunction(a1); 1724 __ AssertFunction(a1);
1573 1725
1574 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) 1726 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1575 // Check that function is not a "classConstructor". 1727 // Check that function is not a "classConstructor".
(...skipping 388 matching lines...) Expand 10 before | Expand all | Expand 10 after
1964 } 2116 }
1965 } 2117 }
1966 2118
1967 2119
1968 #undef __ 2120 #undef __
1969 2121
1970 } // namespace internal 2122 } // namespace internal
1971 } // namespace v8 2123 } // namespace v8
1972 2124
1973 #endif // V8_TARGET_ARCH_MIPS64 2125 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « src/mips/builtins-mips.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698