OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/arm64/frames-arm64.h" | 7 #include "src/arm64/frames-arm64.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 1487 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1498 | 1498 |
1499 StubRuntimeCallHelper call_helper; | 1499 StubRuntimeCallHelper call_helper; |
1500 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper); | 1500 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper); |
1501 | 1501 |
1502 __ Bind(&miss); | 1502 __ Bind(&miss); |
1503 PropertyAccessCompiler::TailCallBuiltin( | 1503 PropertyAccessCompiler::TailCallBuiltin( |
1504 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 1504 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
1505 } | 1505 } |
1506 | 1506 |
1507 | 1507 |
1508 void InstanceofStub::Generate(MacroAssembler* masm) { | 1508 void InstanceOfStub::Generate(MacroAssembler* masm) { |
1509 // Stack on entry: | 1509 Register const object = x1; // Object (lhs). |
1510 // jssp[0]: function. | 1510 Register const function = x0; // Function (rhs). |
1511 // jssp[8]: object. | 1511 Register const object_map = x2; // Map of {object}. |
1512 // | 1512 Register const function_map = x3; // Map of {function}. |
1513 // Returns result in x0. Zero indicates instanceof, smi 1 indicates not | 1513 Register const function_prototype = x4; // Prototype of {function}. |
1514 // instanceof. | 1514 Register const scratch = x5; |
1515 | 1515 |
1516 Register result = x0; | 1516 DCHECK(object.is(InstanceOfDescriptor::LeftRegister())); |
1517 Register function = right(); | 1517 DCHECK(function.is(InstanceOfDescriptor::RightRegister())); |
1518 Register object = left(); | |
1519 Register scratch1 = x6; | |
1520 Register scratch2 = x7; | |
1521 Register res_true = x8; | |
1522 Register res_false = x9; | |
1523 // Only used if there was an inline map check site. (See | |
1524 // LCodeGen::DoInstanceOfKnownGlobal().) | |
1525 Register map_check_site = x4; | |
1526 // Delta for the instructions generated between the inline map check and the | |
1527 // instruction setting the result. | |
1528 const int32_t kDeltaToLoadBoolResult = 4 * kInstructionSize; | |
1529 | 1518 |
1530 Label not_js_object, slow; | 1519 // Check if {object} is a smi. |
| 1520 Label object_is_smi; |
| 1521 __ JumpIfSmi(object, &object_is_smi); |
1531 | 1522 |
1532 if (!HasArgsInRegisters()) { | 1523 // Lookup the {function} and the {object} map in the global instanceof cache. |
1533 __ Pop(function, object); | 1524 Label fast_case, slow_case; |
1534 } | 1525 __ Ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset)); |
1535 | 1526 __ JumpIfNotRoot(function, Heap::kInstanceofCacheFunctionRootIndex, |
1536 if (ReturnTrueFalseObject()) { | 1527 &fast_case); |
1537 __ LoadTrueFalseRoots(res_true, res_false); | 1528 __ JumpIfNotRoot(object_map, Heap::kInstanceofCacheMapRootIndex, &fast_case); |
1538 } else { | 1529 __ LoadRoot(x0, Heap::kInstanceofCacheAnswerRootIndex); |
1539 // This is counter-intuitive, but correct. | |
1540 __ Mov(res_true, Smi::FromInt(0)); | |
1541 __ Mov(res_false, Smi::FromInt(1)); | |
1542 } | |
1543 | |
1544 // Check that the left hand side is a JS object and load its map as a side | |
1545 // effect. | |
1546 Register map = x12; | |
1547 __ JumpIfSmi(object, ¬_js_object); | |
1548 __ IsObjectJSObjectType(object, map, scratch2, ¬_js_object); | |
1549 | |
1550 // If there is a call site cache, don't look in the global cache, but do the | |
1551 // real lookup and update the call site cache. | |
1552 if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) { | |
1553 Label miss; | |
1554 __ JumpIfNotRoot(function, Heap::kInstanceofCacheFunctionRootIndex, &miss); | |
1555 __ JumpIfNotRoot(map, Heap::kInstanceofCacheMapRootIndex, &miss); | |
1556 __ LoadRoot(result, Heap::kInstanceofCacheAnswerRootIndex); | |
1557 __ Ret(); | |
1558 __ Bind(&miss); | |
1559 } | |
1560 | |
1561 // Get the prototype of the function. | |
1562 Register prototype = x13; | |
1563 __ TryGetFunctionPrototype(function, prototype, scratch2, &slow, | |
1564 MacroAssembler::kMissOnBoundFunction); | |
1565 | |
1566 // Check that the function prototype is a JS object. | |
1567 __ JumpIfSmi(prototype, &slow); | |
1568 __ IsObjectJSObjectType(prototype, scratch1, scratch2, &slow); | |
1569 | |
1570 // Update the global instanceof or call site inlined cache with the current | |
1571 // map and function. The cached answer will be set when it is known below. | |
1572 if (HasCallSiteInlineCheck()) { | |
1573 // Patch the (relocated) inlined map check. | |
1574 __ GetRelocatedValueLocation(map_check_site, scratch1); | |
1575 // We have a cell, so need another level of dereferencing. | |
1576 __ Ldr(scratch1, MemOperand(scratch1)); | |
1577 __ Str(map, FieldMemOperand(scratch1, Cell::kValueOffset)); | |
1578 | |
1579 __ Mov(x14, map); | |
1580 // |scratch1| points at the beginning of the cell. Calculate the | |
1581 // field containing the map. | |
1582 __ Add(function, scratch1, Operand(Cell::kValueOffset - 1)); | |
1583 __ RecordWriteField(scratch1, Cell::kValueOffset, x14, function, | |
1584 kLRHasNotBeenSaved, kDontSaveFPRegs, | |
1585 OMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
1586 } else { | |
1587 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); | |
1588 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); | |
1589 } | |
1590 | |
1591 Label return_true, return_result; | |
1592 Register smi_value = scratch1; | |
1593 { | |
1594 // Loop through the prototype chain looking for the function prototype. | |
1595 Register chain_map = x1; | |
1596 Register chain_prototype = x14; | |
1597 Register null_value = x15; | |
1598 Label loop; | |
1599 __ Ldr(chain_prototype, FieldMemOperand(map, Map::kPrototypeOffset)); | |
1600 __ LoadRoot(null_value, Heap::kNullValueRootIndex); | |
1601 // Speculatively set a result. | |
1602 __ Mov(result, res_false); | |
1603 if (!HasCallSiteInlineCheck() && ReturnTrueFalseObject()) { | |
1604 // Value to store in the cache cannot be an object. | |
1605 __ Mov(smi_value, Smi::FromInt(1)); | |
1606 } | |
1607 | |
1608 __ Bind(&loop); | |
1609 | |
1610 // If the chain prototype is the object prototype, return true. | |
1611 __ Cmp(chain_prototype, prototype); | |
1612 __ B(eq, &return_true); | |
1613 | |
1614 // If the chain prototype is null, we've reached the end of the chain, so | |
1615 // return false. | |
1616 __ Cmp(chain_prototype, null_value); | |
1617 __ B(eq, &return_result); | |
1618 | |
1619 // Otherwise, load the next prototype in the chain, and loop. | |
1620 __ Ldr(chain_map, FieldMemOperand(chain_prototype, HeapObject::kMapOffset)); | |
1621 __ Ldr(chain_prototype, FieldMemOperand(chain_map, Map::kPrototypeOffset)); | |
1622 __ B(&loop); | |
1623 } | |
1624 | |
1625 // Return sequence when no arguments are on the stack. | |
1626 // We cannot fall through to here. | |
1627 __ Bind(&return_true); | |
1628 __ Mov(result, res_true); | |
1629 if (!HasCallSiteInlineCheck() && ReturnTrueFalseObject()) { | |
1630 // Value to store in the cache cannot be an object. | |
1631 __ Mov(smi_value, Smi::FromInt(0)); | |
1632 } | |
1633 __ Bind(&return_result); | |
1634 if (HasCallSiteInlineCheck()) { | |
1635 DCHECK(ReturnTrueFalseObject()); | |
1636 __ Add(map_check_site, map_check_site, kDeltaToLoadBoolResult); | |
1637 __ GetRelocatedValueLocation(map_check_site, scratch2); | |
1638 __ Str(result, MemOperand(scratch2)); | |
1639 } else { | |
1640 Register cached_value = ReturnTrueFalseObject() ? smi_value : result; | |
1641 __ StoreRoot(cached_value, Heap::kInstanceofCacheAnswerRootIndex); | |
1642 } | |
1643 __ Ret(); | 1530 __ Ret(); |
1644 | 1531 |
1645 Label object_not_null, object_not_null_or_smi; | 1532 // If {object} is a smi we can safely return false if {function} is a JS |
1646 | 1533 // function, otherwise we have to miss to the runtime and throw an exception. |
1647 __ Bind(¬_js_object); | 1534 __ Bind(&object_is_smi); |
1648 Register object_type = x14; | 1535 __ JumpIfSmi(function, &slow_case); |
1649 // x0 result result return register (uninit) | 1536 __ JumpIfNotObjectType(function, function_map, scratch, JS_FUNCTION_TYPE, |
1650 // x10 function pointer to function | 1537 &slow_case); |
1651 // x11 object pointer to object | 1538 __ LoadRoot(x0, Heap::kFalseValueRootIndex); |
1652 // x14 object_type type of object (uninit) | |
1653 | |
1654 // Before null, smi and string checks, check that the rhs is a function. | |
1655 // For a non-function rhs, an exception must be thrown. | |
1656 __ JumpIfSmi(function, &slow); | |
1657 __ JumpIfNotObjectType( | |
1658 function, scratch1, object_type, JS_FUNCTION_TYPE, &slow); | |
1659 | |
1660 __ Mov(result, res_false); | |
1661 | |
1662 // Null is not instance of anything. | |
1663 __ Cmp(object, Operand(isolate()->factory()->null_value())); | |
1664 __ B(ne, &object_not_null); | |
1665 __ Ret(); | 1539 __ Ret(); |
1666 | 1540 |
1667 __ Bind(&object_not_null); | 1541 // Fast-case: The {function} must be a valid JSFunction. |
1668 // Smi values are not instances of anything. | 1542 __ Bind(&fast_case); |
1669 __ JumpIfNotSmi(object, &object_not_null_or_smi); | 1543 __ JumpIfSmi(function, &slow_case); |
| 1544 __ JumpIfNotObjectType(function, function_map, scratch, JS_FUNCTION_TYPE, |
| 1545 &slow_case); |
| 1546 |
| 1547 // Ensure that {function} has an instance prototype. |
| 1548 __ Ldrb(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset)); |
| 1549 __ Tbnz(scratch, Map::kHasNonInstancePrototype, &slow_case); |
| 1550 |
| 1551 // Ensure that {function} is not bound. |
| 1552 Register const shared_info = scratch; |
| 1553 Register const scratch_w = scratch.W(); |
| 1554 __ Ldr(shared_info, |
| 1555 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
| 1556 // On 64-bit platforms, compiler hints field is not a smi. See definition of |
| 1557 // kCompilerHintsOffset in src/objects.h. |
| 1558 __ Ldr(scratch_w, FieldMemOperand(shared_info, |
| 1559 SharedFunctionInfo::kCompilerHintsOffset)); |
| 1560 __ Tbnz(scratch_w, SharedFunctionInfo::kBoundFunction, &slow_case); |
| 1561 |
| 1562 // Get the "prototype" (or initial map) of the {function}. |
| 1563 __ Ldr(function_prototype, |
| 1564 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
| 1565 __ AssertNotSmi(function_prototype); |
| 1566 |
| 1567 // Resolve the prototype if the {function} has an initial map. Afterwards the |
| 1568 // {function_prototype} will be either the JSReceiver prototype object or the |
| 1569 // hole value, which means that no instances of the {function} were created so |
| 1570 // far and hence we should return false. |
| 1571 Label function_prototype_valid; |
| 1572 __ JumpIfNotObjectType(function_prototype, scratch, scratch, MAP_TYPE, |
| 1573 &function_prototype_valid); |
| 1574 __ Ldr(function_prototype, |
| 1575 FieldMemOperand(function_prototype, Map::kPrototypeOffset)); |
| 1576 __ Bind(&function_prototype_valid); |
| 1577 __ AssertNotSmi(function_prototype); |
| 1578 |
| 1579 // Update the global instanceof cache with the current {object} map and |
| 1580 // {function}. The cached answer will be set when it is known below. |
| 1581 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); |
| 1582 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex); |
| 1583 |
| 1584 // Loop through the prototype chain looking for the {function} prototype. |
| 1585 // Assume true, and change to false if not found. |
| 1586 Register const object_prototype = object_map; |
| 1587 Register const null = scratch; |
| 1588 Label done, loop; |
| 1589 __ LoadRoot(x0, Heap::kTrueValueRootIndex); |
| 1590 __ LoadRoot(null, Heap::kNullValueRootIndex); |
| 1591 __ Bind(&loop); |
| 1592 __ Ldr(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset)); |
| 1593 __ Cmp(object_prototype, function_prototype); |
| 1594 __ B(eq, &done); |
| 1595 __ Cmp(object_prototype, null); |
| 1596 __ Ldr(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset)); |
| 1597 __ B(ne, &loop); |
| 1598 __ LoadRoot(x0, Heap::kFalseValueRootIndex); |
| 1599 __ Bind(&done); |
| 1600 __ StoreRoot(x0, Heap::kInstanceofCacheAnswerRootIndex); |
1670 __ Ret(); | 1601 __ Ret(); |
1671 | 1602 |
1672 __ Bind(&object_not_null_or_smi); | 1603 // Slow-case: Call the runtime function. |
1673 // String values are not instances of anything. | 1604 __ bind(&slow_case); |
1674 __ IsObjectJSStringType(object, scratch2, &slow); | 1605 __ Push(object, function); |
1675 __ Ret(); | 1606 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1); |
1676 | |
1677 // Slow-case. Tail call builtin. | |
1678 __ Bind(&slow); | |
1679 { | |
1680 FrameScope scope(masm, StackFrame::INTERNAL); | |
1681 // Arguments have either been passed into registers or have been previously | |
1682 // popped. We need to push them before calling builtin. | |
1683 __ Push(object, function); | |
1684 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); | |
1685 } | |
1686 if (ReturnTrueFalseObject()) { | |
1687 // Reload true/false because they were clobbered in the builtin call. | |
1688 __ LoadTrueFalseRoots(res_true, res_false); | |
1689 __ Cmp(result, 0); | |
1690 __ Csel(result, res_true, res_false, eq); | |
1691 } | |
1692 __ Ret(); | |
1693 } | 1607 } |
1694 | 1608 |
1695 | 1609 |
1696 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { | 1610 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { |
1697 Register arg_count = ArgumentsAccessReadDescriptor::parameter_count(); | 1611 Register arg_count = ArgumentsAccessReadDescriptor::parameter_count(); |
1698 Register key = ArgumentsAccessReadDescriptor::index(); | 1612 Register key = ArgumentsAccessReadDescriptor::index(); |
1699 DCHECK(arg_count.is(x0)); | 1613 DCHECK(arg_count.is(x0)); |
1700 DCHECK(key.is(x1)); | 1614 DCHECK(key.is(x1)); |
1701 | 1615 |
1702 // The displacement is the offset of the last parameter (if any) relative | 1616 // The displacement is the offset of the last parameter (if any) relative |
(...skipping 4289 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5992 MemOperand(fp, 6 * kPointerSize), NULL); | 5906 MemOperand(fp, 6 * kPointerSize), NULL); |
5993 } | 5907 } |
5994 | 5908 |
5995 | 5909 |
5996 #undef __ | 5910 #undef __ |
5997 | 5911 |
5998 } // namespace internal | 5912 } // namespace internal |
5999 } // namespace v8 | 5913 } // namespace v8 |
6000 | 5914 |
6001 #endif // V8_TARGET_ARCH_ARM64 | 5915 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |