Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/mips64/code-stubs-mips64.cc

Issue 735033002: MIPS64: Prepare additonal code for turbofan landing. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips64/builtins-mips64.cc ('k') | src/mips64/constants-mips64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_MIPS64 7 #if V8_TARGET_ARCH_MIPS64
8 8
9 #include "src/bootstrapper.h" 9 #include "src/bootstrapper.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 1015 matching lines...) Expand 10 before | Expand all | Expand 10 after
1026 1026
1027 1027
1028 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 1028 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1029 CEntryStub stub(isolate, 1, kDontSaveFPRegs); 1029 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1030 stub.GetCode(); 1030 stub.GetCode();
1031 } 1031 }
1032 1032
1033 1033
1034 void CEntryStub::Generate(MacroAssembler* masm) { 1034 void CEntryStub::Generate(MacroAssembler* masm) {
1035 // Called from JavaScript; parameters are on stack as if calling JS function 1035 // Called from JavaScript; parameters are on stack as if calling JS function
1036 // s0: number of arguments including receiver 1036 // a0: number of arguments including receiver
1037 // s1: size of arguments excluding receiver 1037 // a1: pointer to builtin function
1038 // s2: pointer to builtin function
1039 // fp: frame pointer (restored after C call) 1038 // fp: frame pointer (restored after C call)
1040 // sp: stack pointer (restored as callee's sp after C call) 1039 // sp: stack pointer (restored as callee's sp after C call)
1041 // cp: current context (C callee-saved) 1040 // cp: current context (C callee-saved)
1042 1041
1043 ProfileEntryHookStub::MaybeCallEntryHook(masm); 1042 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1044 1043
1045 // NOTE: s0-s2 hold the arguments of this function instead of a0-a2.
1046 // The reason for this is that these arguments would need to be saved anyway
1047 // so it's faster to set them up directly.
1048 // See MacroAssembler::PrepareCEntryArgs and PrepareCEntryFunction.
1049
1050 // Compute the argv pointer in a callee-saved register. 1044 // Compute the argv pointer in a callee-saved register.
1045 __ dsll(s1, a0, kPointerSizeLog2);
1051 __ Daddu(s1, sp, s1); 1046 __ Daddu(s1, sp, s1);
1047 __ Dsubu(s1, s1, kPointerSize);
1052 1048
1053 // Enter the exit frame that transitions from JavaScript to C++. 1049 // Enter the exit frame that transitions from JavaScript to C++.
1054 FrameScope scope(masm, StackFrame::MANUAL); 1050 FrameScope scope(masm, StackFrame::MANUAL);
1055 __ EnterExitFrame(save_doubles()); 1051 __ EnterExitFrame(save_doubles());
1056 1052
1057 // s0: number of arguments including receiver (C callee-saved) 1053 // s0: number of arguments including receiver (C callee-saved)
1058 // s1: pointer to first argument (C callee-saved) 1054 // s1: pointer to first argument (C callee-saved)
1059 // s2: pointer to builtin function (C callee-saved) 1055 // s2: pointer to builtin function (C callee-saved)
1060 1056
1061 // Prepare arguments for C routine. 1057 // Prepare arguments for C routine.
1062 // a0 = argc 1058 // a0 = argc
1063 __ mov(a0, s0); 1059 __ mov(s0, a0);
1060 __ mov(s2, a1);
1064 // a1 = argv (set in the delay slot after find_ra below). 1061 // a1 = argv (set in the delay slot after find_ra below).
1065 1062
1066 // We are calling compiled C/C++ code. a0 and a1 hold our two arguments. We 1063 // We are calling compiled C/C++ code. a0 and a1 hold our two arguments. We
1067 // also need to reserve the 4 argument slots on the stack. 1064 // also need to reserve the 4 argument slots on the stack.
1068 1065
1069 __ AssertStackIsAligned(); 1066 __ AssertStackIsAligned();
1070 1067
1071 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); 1068 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
1072 1069
1073 // To let the GC traverse the return address of the exit frames, we need to 1070 // To let the GC traverse the return address of the exit frames, we need to
(...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after
1376 // Expected input (depending on whether args are in registers or on the stack): 1373 // Expected input (depending on whether args are in registers or on the stack):
1377 // * object: a0 or at sp + 1 * kPointerSize. 1374 // * object: a0 or at sp + 1 * kPointerSize.
1378 // * function: a1 or at sp. 1375 // * function: a1 or at sp.
1379 // 1376 //
1380 // An inlined call site may have been generated before calling this stub. 1377 // An inlined call site may have been generated before calling this stub.
1381 // In this case the offset to the inline site to patch is passed on the stack, 1378 // In this case the offset to the inline site to patch is passed on the stack,
1382 // in the safepoint slot for register a4. 1379 // in the safepoint slot for register a4.
1383 void InstanceofStub::Generate(MacroAssembler* masm) { 1380 void InstanceofStub::Generate(MacroAssembler* masm) {
1384 // Call site inlining and patching implies arguments in registers. 1381 // Call site inlining and patching implies arguments in registers.
1385 DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck()); 1382 DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
1386 // ReturnTrueFalse is only implemented for inlined call sites.
1387 DCHECK(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
1388 1383
1389 // Fixed register usage throughout the stub: 1384 // Fixed register usage throughout the stub:
1390 const Register object = a0; // Object (lhs). 1385 const Register object = a0; // Object (lhs).
1391 Register map = a3; // Map of the object. 1386 Register map = a3; // Map of the object.
1392 const Register function = a1; // Function (rhs). 1387 const Register function = a1; // Function (rhs).
1393 const Register prototype = a4; // Prototype of the function. 1388 const Register prototype = a4; // Prototype of the function.
1394 const Register inline_site = t1; 1389 const Register inline_site = t1;
1395 const Register scratch = a2; 1390 const Register scratch = a2;
1396 1391
1397 const int32_t kDeltaToLoadBoolResult = 7 * Assembler::kInstrSize; 1392 const int32_t kDeltaToLoadBoolResult = 7 * Assembler::kInstrSize;
1398 1393
1399 Label slow, loop, is_instance, is_not_instance, not_js_object; 1394 Label slow, loop, is_instance, is_not_instance, not_js_object;
1400 1395
1401 if (!HasArgsInRegisters()) { 1396 if (!HasArgsInRegisters()) {
1402 __ ld(object, MemOperand(sp, 1 * kPointerSize)); 1397 __ ld(object, MemOperand(sp, 1 * kPointerSize));
1403 __ ld(function, MemOperand(sp, 0)); 1398 __ ld(function, MemOperand(sp, 0));
1404 } 1399 }
1405 1400
1406 // Check that the left hand is a JS object and load map. 1401 // Check that the left hand is a JS object and load map.
1407 __ JumpIfSmi(object, &not_js_object); 1402 __ JumpIfSmi(object, &not_js_object);
1408 __ IsObjectJSObjectType(object, map, scratch, &not_js_object); 1403 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
1409 1404
1410 // If there is a call site cache don't look in the global cache, but do the 1405 // If there is a call site cache don't look in the global cache, but do the
1411 // real lookup and update the call site cache. 1406 // real lookup and update the call site cache.
1412 if (!HasCallSiteInlineCheck()) { 1407 if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) {
1413 Label miss; 1408 Label miss;
1414 __ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex); 1409 __ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex);
1415 __ Branch(&miss, ne, function, Operand(at)); 1410 __ Branch(&miss, ne, function, Operand(at));
1416 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex); 1411 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
1417 __ Branch(&miss, ne, map, Operand(at)); 1412 __ Branch(&miss, ne, map, Operand(at));
1418 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); 1413 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
1419 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); 1414 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
1420 1415
1421 __ bind(&miss); 1416 __ bind(&miss);
1422 } 1417 }
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
1461 __ Branch(&is_not_instance, eq, scratch, Operand(scratch2)); 1456 __ Branch(&is_not_instance, eq, scratch, Operand(scratch2));
1462 __ ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); 1457 __ ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
1463 __ ld(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset)); 1458 __ ld(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
1464 __ Branch(&loop); 1459 __ Branch(&loop);
1465 1460
1466 __ bind(&is_instance); 1461 __ bind(&is_instance);
1467 DCHECK(Smi::FromInt(0) == 0); 1462 DCHECK(Smi::FromInt(0) == 0);
1468 if (!HasCallSiteInlineCheck()) { 1463 if (!HasCallSiteInlineCheck()) {
1469 __ mov(v0, zero_reg); 1464 __ mov(v0, zero_reg);
1470 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); 1465 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
1466 if (ReturnTrueFalseObject()) {
1467 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
1468 }
1471 } else { 1469 } else {
1472 // Patch the call site to return true. 1470 // Patch the call site to return true.
1473 __ LoadRoot(v0, Heap::kTrueValueRootIndex); 1471 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
1474 __ Daddu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult)); 1472 __ Daddu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
1475 // Get the boolean result location in scratch and patch it. 1473 // Get the boolean result location in scratch and patch it.
1476 __ PatchRelocatedValue(inline_site, scratch, v0); 1474 __ PatchRelocatedValue(inline_site, scratch, v0);
1477 1475
1478 if (!ReturnTrueFalseObject()) { 1476 if (!ReturnTrueFalseObject()) {
1479 DCHECK_EQ(Smi::FromInt(0), 0); 1477 DCHECK_EQ(Smi::FromInt(0), 0);
1480 __ mov(v0, zero_reg); 1478 __ mov(v0, zero_reg);
1481 } 1479 }
1482 } 1480 }
1483 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); 1481 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
1484 1482
1485 __ bind(&is_not_instance); 1483 __ bind(&is_not_instance);
1486 if (!HasCallSiteInlineCheck()) { 1484 if (!HasCallSiteInlineCheck()) {
1487 __ li(v0, Operand(Smi::FromInt(1))); 1485 __ li(v0, Operand(Smi::FromInt(1)));
1488 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); 1486 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
1487 if (ReturnTrueFalseObject()) {
1488 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1489 }
1489 } else { 1490 } else {
1490 // Patch the call site to return false. 1491 // Patch the call site to return false.
1491 __ LoadRoot(v0, Heap::kFalseValueRootIndex); 1492 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1492 __ Daddu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult)); 1493 __ Daddu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
1493 // Get the boolean result location in scratch and patch it. 1494 // Get the boolean result location in scratch and patch it.
1494 __ PatchRelocatedValue(inline_site, scratch, v0); 1495 __ PatchRelocatedValue(inline_site, scratch, v0);
1495 1496
1496 if (!ReturnTrueFalseObject()) { 1497 if (!ReturnTrueFalseObject()) {
1497 __ li(v0, Operand(Smi::FromInt(1))); 1498 __ li(v0, Operand(Smi::FromInt(1)));
1498 } 1499 }
1499 } 1500 }
1500 1501
1501 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); 1502 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
1502 1503
1503 Label object_not_null, object_not_null_or_smi; 1504 Label object_not_null, object_not_null_or_smi;
1504 __ bind(&not_js_object); 1505 __ bind(&not_js_object);
1505 // Before null, smi and string value checks, check that the rhs is a function 1506 // Before null, smi and string value checks, check that the rhs is a function
1506 // as for a non-function rhs an exception needs to be thrown. 1507 // as for a non-function rhs an exception needs to be thrown.
1507 __ JumpIfSmi(function, &slow); 1508 __ JumpIfSmi(function, &slow);
1508 __ GetObjectType(function, scratch2, scratch); 1509 __ GetObjectType(function, scratch2, scratch);
1509 __ Branch(&slow, ne, scratch, Operand(JS_FUNCTION_TYPE)); 1510 __ Branch(&slow, ne, scratch, Operand(JS_FUNCTION_TYPE));
1510 1511
1511 // Null is not instance of anything. 1512 // Null is not instance of anything.
1512 __ Branch(&object_not_null, ne, object, 1513 __ Branch(&object_not_null, ne, object,
1513 Operand(isolate()->factory()->null_value())); 1514 Operand(isolate()->factory()->null_value()));
1514 __ li(v0, Operand(Smi::FromInt(1))); 1515 if (ReturnTrueFalseObject()) {
1516 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1517 } else {
1518 __ li(v0, Operand(Smi::FromInt(1)));
1519 }
1515 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); 1520 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
1516 1521
1517 __ bind(&object_not_null); 1522 __ bind(&object_not_null);
1518 // Smi values are not instances of anything. 1523 // Smi values are not instances of anything.
1519 __ JumpIfNotSmi(object, &object_not_null_or_smi); 1524 __ JumpIfNotSmi(object, &object_not_null_or_smi);
1520 __ li(v0, Operand(Smi::FromInt(1))); 1525 if (ReturnTrueFalseObject()) {
1526 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1527 } else {
1528 __ li(v0, Operand(Smi::FromInt(1)));
1529 }
1521 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); 1530 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
1522 1531
1523 __ bind(&object_not_null_or_smi); 1532 __ bind(&object_not_null_or_smi);
1524 // String values are not instances of anything. 1533 // String values are not instances of anything.
1525 __ IsObjectJSStringType(object, scratch, &slow); 1534 __ IsObjectJSStringType(object, scratch, &slow);
1526 __ li(v0, Operand(Smi::FromInt(1))); 1535 if (ReturnTrueFalseObject()) {
1536 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1537 } else {
1538 __ li(v0, Operand(Smi::FromInt(1)));
1539 }
1527 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); 1540 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
1528 1541
1529 // Slow-case. Tail call builtin. 1542 // Slow-case. Tail call builtin.
1530 __ bind(&slow); 1543 __ bind(&slow);
1531 if (!ReturnTrueFalseObject()) { 1544 if (!ReturnTrueFalseObject()) {
1532 if (HasArgsInRegisters()) { 1545 if (HasArgsInRegisters()) {
1533 __ Push(a0, a1); 1546 __ Push(a0, a1);
1534 } 1547 }
1535 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 1548 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
1536 } else { 1549 } else {
(...skipping 3421 matching lines...) Expand 10 before | Expand all | Expand 10 after
4958 MemOperand(fp, 6 * kPointerSize), 4971 MemOperand(fp, 6 * kPointerSize),
4959 NULL); 4972 NULL);
4960 } 4973 }
4961 4974
4962 4975
4963 #undef __ 4976 #undef __
4964 4977
4965 } } // namespace v8::internal 4978 } } // namespace v8::internal
4966 4979
4967 #endif // V8_TARGET_ARCH_MIPS64 4980 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « src/mips64/builtins-mips64.cc ('k') | src/mips64/constants-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698