Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: src/mips/builtins-mips.cc

Issue 1328963004: Revert of [builtins] Unify the various versions of [[Call]] with a Call builtin. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ia32/macro-assembler-ia32.cc ('k') | src/mips/code-stubs-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS 5 #if V8_TARGET_ARCH_MIPS
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/debug/debug.h" 8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 1234 matching lines...) Expand 10 before | Expand all | Expand 10 after
1245 __ CallRuntime(Runtime::kStackGuard, 0); 1245 __ CallRuntime(Runtime::kStackGuard, 0);
1246 } 1246 }
1247 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), 1247 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1248 RelocInfo::CODE_TARGET); 1248 RelocInfo::CODE_TARGET);
1249 1249
1250 __ bind(&ok); 1250 __ bind(&ok);
1251 __ Ret(); 1251 __ Ret();
1252 } 1252 }
1253 1253
1254 1254
1255 // static
1256 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { 1255 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1257 // 1. Make sure we have at least one argument. 1256 // 1. Make sure we have at least one argument.
1258 // a0: actual number of arguments 1257 // a0: actual number of arguments
1259 { 1258 { Label done;
1260 Label done;
1261 __ Branch(&done, ne, a0, Operand(zero_reg)); 1259 __ Branch(&done, ne, a0, Operand(zero_reg));
1262 __ PushRoot(Heap::kUndefinedValueRootIndex); 1260 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1261 __ push(t2);
1263 __ Addu(a0, a0, Operand(1)); 1262 __ Addu(a0, a0, Operand(1));
1264 __ bind(&done); 1263 __ bind(&done);
1265 } 1264 }
1266 1265
1267 // 2. Get the function to call (passed as receiver) from the stack. 1266 // 2. Get the function to call (passed as receiver) from the stack, check
1267 // if it is a function.
1268 // a0: actual number of arguments 1268 // a0: actual number of arguments
1269 Label slow, non_function;
1269 __ sll(at, a0, kPointerSizeLog2); 1270 __ sll(at, a0, kPointerSizeLog2);
1270 __ addu(at, sp, at); 1271 __ addu(at, sp, at);
1271 __ lw(a1, MemOperand(at)); 1272 __ lw(a1, MemOperand(at));
1273 __ JumpIfSmi(a1, &non_function);
1274 __ GetObjectType(a1, a2, a2);
1275 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1272 1276
1273 // 3. Shift arguments and return address one slot down on the stack 1277 // 3a. Patch the first argument if necessary when calling a function.
1278 // a0: actual number of arguments
1279 // a1: function
1280 Label shift_arguments;
1281 __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1282 { Label convert_to_object, use_global_proxy, patch_receiver;
1283 // Change context eagerly in case we need the global receiver.
1284 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1285
1286 // Do not transform the receiver for strict mode functions.
1287 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1288 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1289 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1290 kSmiTagSize)));
1291 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1292
1293 // Do not transform the receiver for native (Compilerhints already in a3).
1294 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1295 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1296
1297 // Compute the receiver in sloppy mode.
1298 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1299 __ sll(at, a0, kPointerSizeLog2);
1300 __ addu(a2, sp, at);
1301 __ lw(a2, MemOperand(a2, -kPointerSize));
1302 // a0: actual number of arguments
1303 // a1: function
1304 // a2: first argument
1305 __ JumpIfSmi(a2, &convert_to_object, t2);
1306
1307 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1308 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1309 __ LoadRoot(a3, Heap::kNullValueRootIndex);
1310 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1311
1312 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1313 __ GetObjectType(a2, a3, a3);
1314 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1315
1316 __ bind(&convert_to_object);
1317 // Enter an internal frame in order to preserve argument count.
1318 {
1319 FrameScope scope(masm, StackFrame::INTERNAL);
1320 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1321 __ push(a0);
1322 __ mov(a0, a2);
1323 ToObjectStub stub(masm->isolate());
1324 __ CallStub(&stub);
1325 __ mov(a2, v0);
1326
1327 __ pop(a0);
1328 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1329 // Leave internal frame.
1330 }
1331
1332 // Restore the function to a1, and the flag to t0.
1333 __ sll(at, a0, kPointerSizeLog2);
1334 __ addu(at, sp, at);
1335 __ lw(a1, MemOperand(at));
1336 __ Branch(USE_DELAY_SLOT, &patch_receiver);
1337 __ li(t0, Operand(0, RelocInfo::NONE32)); // In delay slot.
1338
1339 __ bind(&use_global_proxy);
1340 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1341 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
1342
1343 __ bind(&patch_receiver);
1344 __ sll(at, a0, kPointerSizeLog2);
1345 __ addu(a3, sp, at);
1346 __ sw(a2, MemOperand(a3, -kPointerSize));
1347
1348 __ Branch(&shift_arguments);
1349 }
1350
1351 // 3b. Check for function proxy.
1352 __ bind(&slow);
1353 __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
1354 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1355
1356 __ bind(&non_function);
1357 __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
1358
1359 // 3c. Patch the first argument when calling a non-function. The
1360 // CALL_NON_FUNCTION builtin expects the non-function callee as
1361 // receiver, so overwrite the first argument which will ultimately
1362 // become the receiver.
1363 // a0: actual number of arguments
1364 // a1: function
1365 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1366 __ sll(at, a0, kPointerSizeLog2);
1367 __ addu(a2, sp, at);
1368 __ sw(a1, MemOperand(a2, -kPointerSize));
1369
1370 // 4. Shift arguments and return address one slot down on the stack
1274 // (overwriting the original receiver). Adjust argument count to make 1371 // (overwriting the original receiver). Adjust argument count to make
1275 // the original first argument the new receiver. 1372 // the original first argument the new receiver.
1276 // a0: actual number of arguments 1373 // a0: actual number of arguments
1277 // a1: function 1374 // a1: function
1278 { 1375 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1279 Label loop; 1376 __ bind(&shift_arguments);
1377 { Label loop;
1280 // Calculate the copy start address (destination). Copy end address is sp. 1378 // Calculate the copy start address (destination). Copy end address is sp.
1281 __ sll(at, a0, kPointerSizeLog2); 1379 __ sll(at, a0, kPointerSizeLog2);
1282 __ addu(a2, sp, at); 1380 __ addu(a2, sp, at);
1283 1381
1284 __ bind(&loop); 1382 __ bind(&loop);
1285 __ lw(at, MemOperand(a2, -kPointerSize)); 1383 __ lw(at, MemOperand(a2, -kPointerSize));
1286 __ sw(at, MemOperand(a2)); 1384 __ sw(at, MemOperand(a2));
1287 __ Subu(a2, a2, Operand(kPointerSize)); 1385 __ Subu(a2, a2, Operand(kPointerSize));
1288 __ Branch(&loop, ne, a2, Operand(sp)); 1386 __ Branch(&loop, ne, a2, Operand(sp));
1289 // Adjust the actual number of arguments and remove the top element 1387 // Adjust the actual number of arguments and remove the top element
1290 // (which is a copy of the last argument). 1388 // (which is a copy of the last argument).
1291 __ Subu(a0, a0, Operand(1)); 1389 __ Subu(a0, a0, Operand(1));
1292 __ Pop(); 1390 __ Pop();
1293 } 1391 }
1294 1392
1295 // 4. Call the callable. 1393 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1296 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1394 // or a function proxy via CALL_FUNCTION_PROXY.
1395 // a0: actual number of arguments
1396 // a1: function
1397 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1398 { Label function, non_proxy;
1399 __ Branch(&function, eq, t0, Operand(zero_reg));
1400 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1401 __ mov(a2, zero_reg);
1402 __ Branch(&non_proxy, ne, t0, Operand(1));
1403
1404 __ push(a1); // Re-add proxy object as additional argument.
1405 __ Addu(a0, a0, Operand(1));
1406 __ GetBuiltinFunction(a1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX);
1407 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1408 RelocInfo::CODE_TARGET);
1409
1410 __ bind(&non_proxy);
1411 __ GetBuiltinFunction(a1, Context::CALL_NON_FUNCTION_BUILTIN_INDEX);
1412 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1413 RelocInfo::CODE_TARGET);
1414 __ bind(&function);
1415 }
1416
1417 // 5b. Get the code to call from the function and check that the number of
1418 // expected arguments matches what we're providing. If so, jump
1419 // (tail-call) to the code in register edx without checking arguments.
1420 // a0: actual number of arguments
1421 // a1: function
1422 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1423 __ lw(a2,
1424 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1425 __ sra(a2, a2, kSmiTagSize);
1426 // Check formal and actual parameter counts.
1427 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1428 RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1429
1430 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1431 ParameterCount expected(0);
1432 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1297 } 1433 }
1298 1434
1299 1435
1300 static void Generate_PushAppliedArguments(MacroAssembler* masm, 1436 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1301 const int argumentsOffset, 1437 const int argumentsOffset,
1302 const int indexOffset, 1438 const int indexOffset,
1303 const int limitOffset) { 1439 const int limitOffset) {
1304 Label entry, loop; 1440 Label entry, loop;
1305 Register receiver = LoadDescriptor::ReceiverRegister(); 1441 Register receiver = LoadDescriptor::ReceiverRegister();
1306 Register key = LoadDescriptor::NameRegister(); 1442 Register key = LoadDescriptor::NameRegister();
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1350 const int kFormalParameters = targetIsArgument ? 3 : 2; 1486 const int kFormalParameters = targetIsArgument ? 3 : 2;
1351 const int kStackSize = kFormalParameters + 1; 1487 const int kStackSize = kFormalParameters + 1;
1352 1488
1353 { 1489 {
1354 FrameScope frame_scope(masm, StackFrame::INTERNAL); 1490 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1355 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; 1491 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1356 const int kReceiverOffset = kArgumentsOffset + kPointerSize; 1492 const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1357 const int kFunctionOffset = kReceiverOffset + kPointerSize; 1493 const int kFunctionOffset = kReceiverOffset + kPointerSize;
1358 1494
1359 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. 1495 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1360 __ lw(a1, MemOperand(fp, kArgumentsOffset)); // Get the args array. 1496 __ push(a0);
1361 __ Push(a0, a1); 1497 __ lw(a0, MemOperand(fp, kArgumentsOffset)); // Get the args array.
1498 __ push(a0);
1362 // Returns (in v0) number of arguments to copy to stack as Smi. 1499 // Returns (in v0) number of arguments to copy to stack as Smi.
1363 if (targetIsArgument) { 1500 if (targetIsArgument) {
1364 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, 1501 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX,
1365 CALL_FUNCTION); 1502 CALL_FUNCTION);
1366 } else { 1503 } else {
1367 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); 1504 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION);
1368 } 1505 }
1369 1506
1370 // Returns the result in v0. 1507 // Returns the result in v0.
1371 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged); 1508 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged);
1372 1509
1373 // Push current limit and index. 1510 // Push current limit and index.
1374 const int kIndexOffset = 1511 const int kIndexOffset =
1375 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); 1512 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1376 const int kLimitOffset = 1513 const int kLimitOffset =
1377 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); 1514 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1378 __ mov(a1, zero_reg); 1515 __ mov(a1, zero_reg);
1379 __ lw(a2, MemOperand(fp, kReceiverOffset)); 1516 __ Push(v0, a1); // Limit and initial index.
1380 __ Push(v0, a1, a2); // limit, initial index and receiver. 1517
1518 // Get the receiver.
1519 __ lw(a0, MemOperand(fp, kReceiverOffset));
1520
1521 // Check that the function is a JS function (otherwise it must be a proxy).
1522 Label push_receiver;
1523 __ lw(a1, MemOperand(fp, kFunctionOffset));
1524 __ GetObjectType(a1, a2, a2);
1525 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1526
1527 // Change context eagerly to get the right global object if necessary.
1528 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1529 // Load the shared function info while the function is still in a1.
1530 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1531
1532 // Compute the receiver.
1533 // Do not transform the receiver for strict mode functions.
1534 Label call_to_object, use_global_proxy;
1535 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1536 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1537 kSmiTagSize)));
1538 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1539
1540 // Do not transform the receiver for native (Compilerhints already in a2).
1541 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1542 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1543
1544 // Compute the receiver in sloppy mode.
1545 __ JumpIfSmi(a0, &call_to_object);
1546 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1547 __ Branch(&use_global_proxy, eq, a0, Operand(a1));
1548 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1549 __ Branch(&use_global_proxy, eq, a0, Operand(a2));
1550
1551 // Check if the receiver is already a JavaScript object.
1552 // a0: receiver
1553 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1554 __ GetObjectType(a0, a1, a1);
1555 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1556
1557 // Convert the receiver to a regular object.
1558 // a0: receiver
1559 __ bind(&call_to_object);
1560 ToObjectStub stub(masm->isolate());
1561 __ CallStub(&stub);
1562 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1563 __ Branch(&push_receiver);
1564
1565 __ bind(&use_global_proxy);
1566 __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1567 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset));
1568
1569 // Push the receiver.
1570 // a0: receiver
1571 __ bind(&push_receiver);
1572 __ push(a0);
1381 1573
1382 // Copy all arguments from the array to the stack. 1574 // Copy all arguments from the array to the stack.
1383 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, 1575 Generate_PushAppliedArguments(
1384 kLimitOffset); 1576 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1385 1577
1386 // Call the callable. 1578 // Call the function.
1387 // TODO(bmeurer): This should be a tail call according to ES6. 1579 Label call_proxy;
1580 ParameterCount actual(a0);
1388 __ lw(a1, MemOperand(fp, kFunctionOffset)); 1581 __ lw(a1, MemOperand(fp, kFunctionOffset));
1389 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1582 __ GetObjectType(a1, a2, a2);
1583 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1390 1584
1585 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1586
1587 frame_scope.GenerateLeaveFrame();
1588 __ Ret(USE_DELAY_SLOT);
1589 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot.
1590
1591 // Call the function proxy.
1592 __ bind(&call_proxy);
1593 __ push(a1); // Add function proxy as last argument.
1594 __ Addu(a0, a0, Operand(1));
1595 __ li(a2, Operand(0, RelocInfo::NONE32));
1596 __ GetBuiltinFunction(a1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX);
1597 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1598 RelocInfo::CODE_TARGET);
1391 // Tear down the internal frame and remove function, receiver and args. 1599 // Tear down the internal frame and remove function, receiver and args.
1392 } 1600 }
1393 1601
1394 __ Ret(USE_DELAY_SLOT); 1602 __ Ret(USE_DELAY_SLOT);
1395 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot. 1603 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot.
1396 } 1604 }
1397 1605
1398 1606
1399 static void Generate_ConstructHelper(MacroAssembler* masm) { 1607 static void Generate_ConstructHelper(MacroAssembler* masm) {
1400 const int kFormalParameters = 3; 1608 const int kFormalParameters = 3;
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
1516 kPointerSize))); 1724 kPointerSize)));
1517 __ mov(sp, fp); 1725 __ mov(sp, fp);
1518 __ MultiPop(fp.bit() | ra.bit()); 1726 __ MultiPop(fp.bit() | ra.bit());
1519 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize); 1727 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1520 __ Addu(sp, sp, t0); 1728 __ Addu(sp, sp, t0);
1521 // Adjust for the receiver. 1729 // Adjust for the receiver.
1522 __ Addu(sp, sp, Operand(kPointerSize)); 1730 __ Addu(sp, sp, Operand(kPointerSize));
1523 } 1731 }
1524 1732
1525 1733
1526 // static
1527 void Builtins::Generate_CallFunction(MacroAssembler* masm) {
1528 // ----------- S t a t e -------------
1529 // -- a0 : the number of arguments (not including the receiver)
1530 // -- a1 : the function to call (checked to be a JSFunction)
1531 // -----------------------------------
1532
1533 Label convert, convert_global_proxy, convert_to_object, done_convert;
1534 __ AssertFunction(a1);
1535 // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal
1536 // slot is "classConstructor".
1537 // Enter the context of the function; ToObject has to run in the function
1538 // context, and we also need to take the global proxy from the function
1539 // context in case of conversion.
1540 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1541 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
1542 SharedFunctionInfo::kStrictModeByteOffset);
1543 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1544 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1545 // We need to convert the receiver for non-native sloppy mode functions.
1546 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
1547 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
1548 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
1549 __ Branch(&done_convert, ne, at, Operand(zero_reg));
1550 {
1551 __ sll(at, a0, kPointerSizeLog2);
1552 __ addu(at, sp, at);
1553 __ lw(a3, MemOperand(at));
1554
1555 // ----------- S t a t e -------------
1556 // -- a0 : the number of arguments (not including the receiver)
1557 // -- a1 : the function to call (checked to be a JSFunction)
1558 // -- a2 : the shared function info.
1559 // -- a3 : the receiver
1560 // -- cp : the function context.
1561 // -----------------------------------
1562
1563 Label convert_receiver;
1564 __ JumpIfSmi(a3, &convert_to_object);
1565 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1566 __ GetObjectType(a3, t0, t0);
1567 __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE));
1568 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex, &convert_global_proxy);
1569 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
1570 __ bind(&convert_global_proxy);
1571 {
1572 // Patch receiver to global proxy.
1573 __ LoadGlobalProxy(a3);
1574 }
1575 __ Branch(&convert_receiver);
1576 __ bind(&convert_to_object);
1577 {
1578 // Convert receiver using ToObject.
1579 // TODO(bmeurer): Inline the allocation here to avoid building the frame
1580 // in the fast case? (fall back to AllocateInNewSpace?)
1581 FrameScope scope(masm, StackFrame::INTERNAL);
1582 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1583 __ Push(a0, a1);
1584 __ mov(a0, a3);
1585 ToObjectStub stub(masm->isolate());
1586 __ CallStub(&stub);
1587 __ mov(a3, v0);
1588 __ Pop(a0, a1);
1589 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1590 }
1591 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1592 __ bind(&convert_receiver);
1593 __ sll(at, a0, kPointerSizeLog2);
1594 __ addu(at, sp, at);
1595 __ sw(a3, MemOperand(at));
1596 }
1597 __ bind(&done_convert);
1598
1599 // ----------- S t a t e -------------
1600 // -- a0 : the number of arguments (not including the receiver)
1601 // -- a1 : the function to call (checked to be a JSFunction)
1602 // -- a2 : the shared function info.
1603 // -- cp : the function context.
1604 // -----------------------------------
1605
1606 __ lw(a2,
1607 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
1608 __ sra(a2, a2, kSmiTagSize); // Un-tag.
1609 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1610 ParameterCount actual(a0);
1611 ParameterCount expected(a2);
1612 __ InvokeCode(a3, expected, actual, JUMP_FUNCTION, NullCallWrapper());
1613 }
1614
1615
1616 // static
1617 void Builtins::Generate_Call(MacroAssembler* masm) {
1618 // ----------- S t a t e -------------
1619 // -- a0 : the number of arguments (not including the receiver)
1620 // -- a1 : the target to call (can be any Object).
1621 // -----------------------------------
1622
1623 Label non_smi, non_function;
1624 __ JumpIfSmi(a1, &non_function);
1625 __ bind(&non_smi);
1626 __ GetObjectType(a1, a2, a2);
1627 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET,
1628 eq, a2, Operand(JS_FUNCTION_TYPE));
1629 __ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1630
1631
1632 // 1. Call to function proxy.
1633 // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies.
1634 __ lw(a1, FieldMemOperand(a1, JSFunctionProxy::kCallTrapOffset));
1635 __ AssertNotSmi(a1);
1636 __ Branch(&non_smi);
1637
1638 // 2. Call to something else, which might have a [[Call]] internal method (if
1639 // not we raise an exception).
1640 __ bind(&non_function);
1641 // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could
1642 // be awesome instead; i.e. a trivial improvement would be to call into the
1643 // runtime and just deal with the API function there instead of returning a
1644 // delegate from a runtime call that just jumps back to the runtime once
1645 // called. Or, bonus points, call directly into the C API function here, as
1646 // we do in some Crankshaft fast cases.
1647 // Overwrite the original receiver with the (original) target.
1648 __ sll(at, a0, kPointerSizeLog2);
1649 __ addu(at, sp, at);
1650 __ sw(a1, MemOperand(at));
1651 {
1652 // Determine the delegate for the target (if any).
1653 FrameScope scope(masm, StackFrame::INTERNAL);
1654 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1655 __ Push(a0, a1);
1656 __ CallRuntime(Runtime::kGetFunctionDelegate, 1);
1657 __ mov(a1, v0);
1658 __ Pop(a0);
1659 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1660 }
1661 // The delegate is always a regular function.
1662 __ AssertFunction(a1);
1663 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
1664 }
1665
1666
1667 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 1734 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1668 // State setup as expected by MacroAssembler::InvokePrologue. 1735 // State setup as expected by MacroAssembler::InvokePrologue.
1669 // ----------- S t a t e ------------- 1736 // ----------- S t a t e -------------
1670 // -- a0: actual arguments count 1737 // -- a0: actual arguments count
1671 // -- a1: function (passed through to callee) 1738 // -- a1: function (passed through to callee)
1672 // -- a2: expected arguments count 1739 // -- a2: expected arguments count
1673 // ----------------------------------- 1740 // -----------------------------------
1674 1741
1675 Label stack_overflow; 1742 Label stack_overflow;
1676 ArgumentAdaptorStackCheck(masm, &stack_overflow); 1743 ArgumentAdaptorStackCheck(masm, &stack_overflow);
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
1816 } 1883 }
1817 } 1884 }
1818 1885
1819 1886
1820 #undef __ 1887 #undef __
1821 1888
1822 } // namespace internal 1889 } // namespace internal
1823 } // namespace v8 1890 } // namespace v8
1824 1891
1825 #endif // V8_TARGET_ARCH_MIPS 1892 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/ia32/macro-assembler-ia32.cc ('k') | src/mips/code-stubs-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698