OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 1395 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1406 | 1406 |
1407 StubRuntimeCallHelper call_helper; | 1407 StubRuntimeCallHelper call_helper; |
1408 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper); | 1408 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper); |
1409 | 1409 |
1410 __ bind(&miss); | 1410 __ bind(&miss); |
1411 PropertyAccessCompiler::TailCallBuiltin( | 1411 PropertyAccessCompiler::TailCallBuiltin( |
1412 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 1412 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
1413 } | 1413 } |
1414 | 1414 |
1415 | 1415 |
1416 // Uses registers a0 to a4. | 1416 void InstanceOfStub::Generate(MacroAssembler* masm) { |
1417 // Expected input (depending on whether args are in registers or on the stack): | 1417 Register const object = a1; // Object (lhs). |
1418 // * object: a0 or at sp + 1 * kPointerSize. | 1418 Register const function = a0; // Function (rhs). |
1419 // * function: a1 or at sp. | 1419 Register const object_map = a2; // Map of {object}. |
1420 // | 1420 Register const function_map = a3; // Map of {function}. |
1421 // An inlined call site may have been generated before calling this stub. | 1421 Register const function_prototype = a4; // Prototype of {function}. |
1422 // In this case the offset to the inline site to patch is passed on the stack, | 1422 Register const scratch = a5; |
1423 // in the safepoint slot for register a4. | |
1424 void InstanceofStub::Generate(MacroAssembler* masm) { | |
1425 // Call site inlining and patching implies arguments in registers. | |
1426 DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck()); | |
1427 | 1423 |
1428 // Fixed register usage throughout the stub: | 1424 DCHECK(object.is(InstanceOfDescriptor::LeftRegister())); |
1429 const Register object = a0; // Object (lhs). | 1425 DCHECK(function.is(InstanceOfDescriptor::RightRegister())); |
1430 Register map = a3; // Map of the object. | |
1431 const Register function = a1; // Function (rhs). | |
1432 const Register prototype = a4; // Prototype of the function. | |
1433 const Register inline_site = t1; | |
1434 const Register scratch = a2; | |
1435 | 1426 |
1436 const int32_t kDeltaToLoadBoolResult = 7 * Assembler::kInstrSize; | 1427 // Check if {object} is a smi. |
| 1428 Label object_is_smi; |
| 1429 __ JumpIfSmi(object, &object_is_smi); |
1437 | 1430 |
1438 Label slow, loop, is_instance, is_not_instance, not_js_object; | 1431 // Lookup the {function} and the {object} map in the global instanceof cache. |
| 1432 // Note: This is safe because we clear the global instanceof cache whenever |
| 1433 // we change the prototype of any object. |
| 1434 Label fast_case, slow_case; |
| 1435 __ ld(object_map, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 1436 __ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex); |
| 1437 __ Branch(&fast_case, ne, function, Operand(at)); |
| 1438 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex); |
| 1439 __ Branch(&fast_case, ne, object_map, Operand(at)); |
| 1440 __ Ret(USE_DELAY_SLOT); |
| 1441 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); // In delay slot. |
1439 | 1442 |
1440 if (!HasArgsInRegisters()) { | 1443 // If {object} is a smi we can safely return false if {function} is a JS |
1441 __ ld(object, MemOperand(sp, 1 * kPointerSize)); | 1444 // function, otherwise we have to miss to the runtime and throw an exception. |
1442 __ ld(function, MemOperand(sp, 0)); | 1445 __ bind(&object_is_smi); |
1443 } | 1446 __ JumpIfSmi(function, &slow_case); |
| 1447 __ GetObjectType(function, function_map, scratch); |
| 1448 __ Branch(&slow_case, ne, scratch, Operand(JS_FUNCTION_TYPE)); |
| 1449 __ Ret(USE_DELAY_SLOT); |
| 1450 __ LoadRoot(v0, Heap::kFalseValueRootIndex); // In delay slot. |
1444 | 1451 |
1445 // Check that the left hand is a JS object and load map. | 1452 // Fast-case: The {function} must be a valid JSFunction. |
1446 __ JumpIfSmi(object, ¬_js_object); | 1453 __ bind(&fast_case); |
1447 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); | 1454 __ JumpIfSmi(function, &slow_case); |
| 1455 __ GetObjectType(function, function_map, scratch); |
| 1456 __ Branch(&slow_case, ne, scratch, Operand(JS_FUNCTION_TYPE)); |
1448 | 1457 |
1449 // If there is a call site cache don't look in the global cache, but do the | 1458 // Ensure that {function} has an instance prototype. |
1450 // real lookup and update the call site cache. | 1459 __ lbu(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset)); |
1451 if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) { | 1460 __ And(at, scratch, Operand(1 << Map::kHasNonInstancePrototype)); |
1452 Label miss; | 1461 __ Branch(&slow_case, ne, at, Operand(zero_reg)); |
1453 __ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex); | |
1454 __ Branch(&miss, ne, function, Operand(at)); | |
1455 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex); | |
1456 __ Branch(&miss, ne, map, Operand(at)); | |
1457 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); | |
1458 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); | |
1459 | 1462 |
1460 __ bind(&miss); | 1463 // Ensure that {function} is not bound. |
1461 } | 1464 Register const shared_info = scratch; |
| 1465 __ ld(shared_info, |
| 1466 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
| 1467 __ lbu(scratch, |
| 1468 FieldMemOperand(shared_info, SharedFunctionInfo::kBoundByteOffset)); |
| 1469 __ And(at, scratch, Operand(1 << SharedFunctionInfo::kBoundBitWithinByte)); |
| 1470 __ Branch(&slow_case, ne, at, Operand(zero_reg)); |
1462 | 1471 |
1463 // Get the prototype of the function. | 1472 // Get the "prototype" (or initial map) of the {function}. |
1464 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); | 1473 __ ld(function_prototype, |
| 1474 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
| 1475 __ AssertNotSmi(function_prototype); |
1465 | 1476 |
1466 // Check that the function prototype is a JS object. | 1477 // Resolve the prototype if the {function} has an initial map. Afterwards the |
1467 __ JumpIfSmi(prototype, &slow); | 1478 // {function_prototype} will be either the JSReceiver prototype object or the |
1468 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); | 1479 // hole value, which means that no instances of the {function} were created so |
| 1480 // far and hence we should return false. |
| 1481 Label function_prototype_valid; |
| 1482 __ GetObjectType(function_prototype, scratch, scratch); |
| 1483 __ Branch(&function_prototype_valid, ne, scratch, Operand(MAP_TYPE)); |
| 1484 __ ld(function_prototype, |
| 1485 FieldMemOperand(function_prototype, Map::kPrototypeOffset)); |
| 1486 __ bind(&function_prototype_valid); |
| 1487 __ AssertNotSmi(function_prototype); |
1469 | 1488 |
1470 // Update the global instanceof or call site inlined cache with the current | 1489 // Update the global instanceof cache with the current {object} map and |
1471 // map and function. The cached answer will be set when it is known below. | 1490 // {function}. The cached answer will be set when it is known below. |
1472 if (!HasCallSiteInlineCheck()) { | 1491 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); |
1473 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); | 1492 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex); |
1474 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); | |
1475 } else { | |
1476 DCHECK(HasArgsInRegisters()); | |
1477 // Patch the (relocated) inlined map check. | |
1478 | 1493 |
1479 // The offset was stored in a4 safepoint slot. | 1494 // Loop through the prototype chain looking for the {function} prototype. |
1480 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal). | 1495 // Assume true, and change to false if not found. |
1481 __ LoadFromSafepointRegisterSlot(scratch, a4); | 1496 Register const object_prototype = object_map; |
1482 __ Dsubu(inline_site, ra, scratch); | 1497 Register const null = scratch; |
1483 // Get the map location in scratch and patch it. | 1498 Label done, loop; |
1484 __ GetRelocatedValue(inline_site, scratch, v1); // v1 used as scratch. | 1499 __ LoadRoot(v0, Heap::kTrueValueRootIndex); |
1485 __ sd(map, FieldMemOperand(scratch, Cell::kValueOffset)); | 1500 __ LoadRoot(null, Heap::kNullValueRootIndex); |
| 1501 __ bind(&loop); |
| 1502 __ ld(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset)); |
| 1503 __ Branch(&done, eq, object_prototype, Operand(function_prototype)); |
| 1504 __ Branch(USE_DELAY_SLOT, &loop, ne, object_prototype, Operand(null)); |
| 1505 __ ld(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset)); |
| 1506 __ LoadRoot(v0, Heap::kFalseValueRootIndex); |
| 1507 __ bind(&done); |
| 1508 __ Ret(USE_DELAY_SLOT); |
| 1509 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); // In delay slot. |
1486 | 1510 |
1487 __ mov(t0, map); | 1511 // Slow-case: Call the runtime function. |
1488 // |scratch| points at the beginning of the cell. Calculate the | 1512 __ bind(&slow_case); |
1489 // field containing the map. | 1513 __ Push(object, function); |
1490 __ Daddu(function, scratch, Operand(Cell::kValueOffset - 1)); | 1514 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1); |
1491 __ RecordWriteField(scratch, Cell::kValueOffset, t0, function, | |
1492 kRAHasNotBeenSaved, kDontSaveFPRegs, | |
1493 OMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
1494 } | |
1495 | |
1496 // Register mapping: a3 is object map and a4 is function prototype. | |
1497 // Get prototype of object into a2. | |
1498 __ ld(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); | |
1499 | |
1500 // We don't need map any more. Use it as a scratch register. | |
1501 Register scratch2 = map; | |
1502 map = no_reg; | |
1503 | |
1504 // Loop through the prototype chain looking for the function prototype. | |
1505 __ LoadRoot(scratch2, Heap::kNullValueRootIndex); | |
1506 __ bind(&loop); | |
1507 __ Branch(&is_instance, eq, scratch, Operand(prototype)); | |
1508 __ Branch(&is_not_instance, eq, scratch, Operand(scratch2)); | |
1509 __ ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); | |
1510 __ ld(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset)); | |
1511 __ Branch(&loop); | |
1512 | |
1513 __ bind(&is_instance); | |
1514 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0)); | |
1515 if (!HasCallSiteInlineCheck()) { | |
1516 __ mov(v0, zero_reg); | |
1517 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); | |
1518 if (ReturnTrueFalseObject()) { | |
1519 __ LoadRoot(v0, Heap::kTrueValueRootIndex); | |
1520 } | |
1521 } else { | |
1522 // Patch the call site to return true. | |
1523 __ LoadRoot(v0, Heap::kTrueValueRootIndex); | |
1524 __ Daddu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult)); | |
1525 // Get the boolean result location in scratch and patch it. | |
1526 __ PatchRelocatedValue(inline_site, scratch, v0); | |
1527 | |
1528 if (!ReturnTrueFalseObject()) { | |
1529 __ mov(v0, zero_reg); | |
1530 } | |
1531 } | |
1532 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); | |
1533 | |
1534 __ bind(&is_not_instance); | |
1535 if (!HasCallSiteInlineCheck()) { | |
1536 __ li(v0, Operand(Smi::FromInt(1))); | |
1537 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); | |
1538 if (ReturnTrueFalseObject()) { | |
1539 __ LoadRoot(v0, Heap::kFalseValueRootIndex); | |
1540 } | |
1541 } else { | |
1542 // Patch the call site to return false. | |
1543 __ LoadRoot(v0, Heap::kFalseValueRootIndex); | |
1544 __ Daddu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult)); | |
1545 // Get the boolean result location in scratch and patch it. | |
1546 __ PatchRelocatedValue(inline_site, scratch, v0); | |
1547 | |
1548 if (!ReturnTrueFalseObject()) { | |
1549 __ li(v0, Operand(Smi::FromInt(1))); | |
1550 } | |
1551 } | |
1552 | |
1553 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); | |
1554 | |
1555 Label object_not_null, object_not_null_or_smi; | |
1556 __ bind(¬_js_object); | |
1557 // Before null, smi and string value checks, check that the rhs is a function | |
1558 // as for a non-function rhs an exception needs to be thrown. | |
1559 __ JumpIfSmi(function, &slow); | |
1560 __ GetObjectType(function, scratch2, scratch); | |
1561 __ Branch(&slow, ne, scratch, Operand(JS_FUNCTION_TYPE)); | |
1562 | |
1563 // Null is not instance of anything. | |
1564 __ Branch(&object_not_null, ne, object, | |
1565 Operand(isolate()->factory()->null_value())); | |
1566 if (ReturnTrueFalseObject()) { | |
1567 __ LoadRoot(v0, Heap::kFalseValueRootIndex); | |
1568 } else { | |
1569 __ li(v0, Operand(Smi::FromInt(1))); | |
1570 } | |
1571 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); | |
1572 | |
1573 __ bind(&object_not_null); | |
1574 // Smi values are not instances of anything. | |
1575 __ JumpIfNotSmi(object, &object_not_null_or_smi); | |
1576 if (ReturnTrueFalseObject()) { | |
1577 __ LoadRoot(v0, Heap::kFalseValueRootIndex); | |
1578 } else { | |
1579 __ li(v0, Operand(Smi::FromInt(1))); | |
1580 } | |
1581 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); | |
1582 | |
1583 __ bind(&object_not_null_or_smi); | |
1584 // String values are not instances of anything. | |
1585 __ IsObjectJSStringType(object, scratch, &slow); | |
1586 if (ReturnTrueFalseObject()) { | |
1587 __ LoadRoot(v0, Heap::kFalseValueRootIndex); | |
1588 } else { | |
1589 __ li(v0, Operand(Smi::FromInt(1))); | |
1590 } | |
1591 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); | |
1592 | |
1593 // Slow-case. Tail call builtin. | |
1594 __ bind(&slow); | |
1595 if (!ReturnTrueFalseObject()) { | |
1596 if (HasArgsInRegisters()) { | |
1597 __ Push(a0, a1); | |
1598 } | |
1599 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | |
1600 } else { | |
1601 { | |
1602 FrameScope scope(masm, StackFrame::INTERNAL); | |
1603 __ Push(a0, a1); | |
1604 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); | |
1605 } | |
1606 __ mov(a0, v0); | |
1607 __ LoadRoot(v0, Heap::kTrueValueRootIndex); | |
1608 __ DropAndRet(HasArgsInRegisters() ? 0 : 2, eq, a0, Operand(zero_reg)); | |
1609 __ LoadRoot(v0, Heap::kFalseValueRootIndex); | |
1610 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); | |
1611 } | |
1612 } | 1515 } |
1613 | 1516 |
1614 | 1517 |
1615 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { | 1518 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { |
1616 Label miss; | 1519 Label miss; |
1617 Register receiver = LoadDescriptor::ReceiverRegister(); | 1520 Register receiver = LoadDescriptor::ReceiverRegister(); |
1618 // Ensure that the vector and slot registers won't be clobbered before | 1521 // Ensure that the vector and slot registers won't be clobbered before |
1619 // calling the miss handler. | 1522 // calling the miss handler. |
1620 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::VectorRegister(), | 1523 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::VectorRegister(), |
1621 LoadWithVectorDescriptor::SlotRegister())); | 1524 LoadWithVectorDescriptor::SlotRegister())); |
(...skipping 4132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5754 MemOperand(fp, 6 * kPointerSize), NULL); | 5657 MemOperand(fp, 6 * kPointerSize), NULL); |
5755 } | 5658 } |
5756 | 5659 |
5757 | 5660 |
5758 #undef __ | 5661 #undef __ |
5759 | 5662 |
5760 } // namespace internal | 5663 } // namespace internal |
5761 } // namespace v8 | 5664 } // namespace v8 |
5762 | 5665 |
5763 #endif // V8_TARGET_ARCH_MIPS64 | 5666 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |