| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1246 void MacroAssembler::EnumLengthUntagged(Register dst, Register map) { | 1246 void MacroAssembler::EnumLengthUntagged(Register dst, Register map) { |
| 1247 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0); | 1247 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0); |
| 1248 Ldrsw(dst, UntagSmiFieldMemOperand(map, Map::kBitField3Offset)); | 1248 Ldrsw(dst, UntagSmiFieldMemOperand(map, Map::kBitField3Offset)); |
| 1249 And(dst, dst, Map::EnumLengthBits::kMask); | 1249 And(dst, dst, Map::EnumLengthBits::kMask); |
| 1250 } | 1250 } |
| 1251 | 1251 |
| 1252 | 1252 |
| 1253 void MacroAssembler::EnumLengthSmi(Register dst, Register map) { | 1253 void MacroAssembler::EnumLengthSmi(Register dst, Register map) { |
| 1254 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0); | 1254 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0); |
| 1255 Ldr(dst, FieldMemOperand(map, Map::kBitField3Offset)); | 1255 Ldr(dst, FieldMemOperand(map, Map::kBitField3Offset)); |
| 1256 And(dst, dst, Operand(Smi::FromInt(Map::EnumLengthBits::kMask))); | 1256 And(dst, dst, Smi::FromInt(Map::EnumLengthBits::kMask)); |
| 1257 } | 1257 } |
| 1258 | 1258 |
| 1259 | 1259 |
| 1260 void MacroAssembler::CheckEnumCache(Register object, | 1260 void MacroAssembler::CheckEnumCache(Register object, |
| 1261 Register null_value, | 1261 Register null_value, |
| 1262 Register scratch0, | 1262 Register scratch0, |
| 1263 Register scratch1, | 1263 Register scratch1, |
| 1264 Register scratch2, | 1264 Register scratch2, |
| 1265 Register scratch3, | 1265 Register scratch3, |
| 1266 Label* call_runtime) { | 1266 Label* call_runtime) { |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1319 Register scratch1, | 1319 Register scratch1, |
| 1320 Register scratch2, | 1320 Register scratch2, |
| 1321 Label* no_memento_found) { | 1321 Label* no_memento_found) { |
| 1322 ExternalReference new_space_start = | 1322 ExternalReference new_space_start = |
| 1323 ExternalReference::new_space_start(isolate()); | 1323 ExternalReference::new_space_start(isolate()); |
| 1324 ExternalReference new_space_allocation_top = | 1324 ExternalReference new_space_allocation_top = |
| 1325 ExternalReference::new_space_allocation_top_address(isolate()); | 1325 ExternalReference::new_space_allocation_top_address(isolate()); |
| 1326 | 1326 |
| 1327 Add(scratch1, receiver, | 1327 Add(scratch1, receiver, |
| 1328 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag); | 1328 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag); |
| 1329 Cmp(scratch1, Operand(new_space_start)); | 1329 Cmp(scratch1, new_space_start); |
| 1330 B(lt, no_memento_found); | 1330 B(lt, no_memento_found); |
| 1331 | 1331 |
| 1332 Mov(scratch2, Operand(new_space_allocation_top)); | 1332 Mov(scratch2, new_space_allocation_top); |
| 1333 Ldr(scratch2, MemOperand(scratch2)); | 1333 Ldr(scratch2, MemOperand(scratch2)); |
| 1334 Cmp(scratch1, scratch2); | 1334 Cmp(scratch1, scratch2); |
| 1335 B(gt, no_memento_found); | 1335 B(gt, no_memento_found); |
| 1336 | 1336 |
| 1337 Ldr(scratch1, MemOperand(scratch1, -AllocationMemento::kSize)); | 1337 Ldr(scratch1, MemOperand(scratch1, -AllocationMemento::kSize)); |
| 1338 Cmp(scratch1, | 1338 Cmp(scratch1, |
| 1339 Operand(isolate()->factory()->allocation_memento_map())); | 1339 Operand(isolate()->factory()->allocation_memento_map())); |
| 1340 } | 1340 } |
| 1341 | 1341 |
| 1342 | 1342 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 1360 Br(scratch1); | 1360 Br(scratch1); |
| 1361 } | 1361 } |
| 1362 | 1362 |
| 1363 | 1363 |
| 1364 void MacroAssembler::InNewSpace(Register object, | 1364 void MacroAssembler::InNewSpace(Register object, |
| 1365 Condition cond, | 1365 Condition cond, |
| 1366 Label* branch) { | 1366 Label* branch) { |
| 1367 ASSERT(cond == eq || cond == ne); | 1367 ASSERT(cond == eq || cond == ne); |
| 1368 UseScratchRegisterScope temps(this); | 1368 UseScratchRegisterScope temps(this); |
| 1369 Register temp = temps.AcquireX(); | 1369 Register temp = temps.AcquireX(); |
| 1370 And(temp, object, Operand(ExternalReference::new_space_mask(isolate()))); | 1370 And(temp, object, ExternalReference::new_space_mask(isolate())); |
| 1371 Cmp(temp, Operand(ExternalReference::new_space_start(isolate()))); | 1371 Cmp(temp, ExternalReference::new_space_start(isolate())); |
| 1372 B(cond, branch); | 1372 B(cond, branch); |
| 1373 } | 1373 } |
| 1374 | 1374 |
| 1375 | 1375 |
| 1376 void MacroAssembler::Throw(Register value, | 1376 void MacroAssembler::Throw(Register value, |
| 1377 Register scratch1, | 1377 Register scratch1, |
| 1378 Register scratch2, | 1378 Register scratch2, |
| 1379 Register scratch3, | 1379 Register scratch3, |
| 1380 Register scratch4) { | 1380 Register scratch4) { |
| 1381 // Adjust this code if not the case. | 1381 // Adjust this code if not the case. |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1464 | 1464 |
| 1465 void MacroAssembler::Throw(BailoutReason reason) { | 1465 void MacroAssembler::Throw(BailoutReason reason) { |
| 1466 Label throw_start; | 1466 Label throw_start; |
| 1467 Bind(&throw_start); | 1467 Bind(&throw_start); |
| 1468 #ifdef DEBUG | 1468 #ifdef DEBUG |
| 1469 const char* msg = GetBailoutReason(reason); | 1469 const char* msg = GetBailoutReason(reason); |
| 1470 RecordComment("Throw message: "); | 1470 RecordComment("Throw message: "); |
| 1471 RecordComment((msg != NULL) ? msg : "UNKNOWN"); | 1471 RecordComment((msg != NULL) ? msg : "UNKNOWN"); |
| 1472 #endif | 1472 #endif |
| 1473 | 1473 |
| 1474 Mov(x0, Operand(Smi::FromInt(reason))); | 1474 Mov(x0, Smi::FromInt(reason)); |
| 1475 Push(x0); | 1475 Push(x0); |
| 1476 | 1476 |
| 1477 // Disable stub call restrictions to always allow calls to throw. | 1477 // Disable stub call restrictions to always allow calls to throw. |
| 1478 if (!has_frame_) { | 1478 if (!has_frame_) { |
| 1479 // We don't actually want to generate a pile of code for this, so just | 1479 // We don't actually want to generate a pile of code for this, so just |
| 1480 // claim there is a stack frame, without generating one. | 1480 // claim there is a stack frame, without generating one. |
| 1481 FrameScope scope(this, StackFrame::NONE); | 1481 FrameScope scope(this, StackFrame::NONE); |
| 1482 CallRuntime(Runtime::kThrowMessage, 1); | 1482 CallRuntime(Runtime::kThrowMessage, 1); |
| 1483 } else { | 1483 } else { |
| 1484 CallRuntime(Runtime::kThrowMessage, 1); | 1484 CallRuntime(Runtime::kThrowMessage, 1); |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1593 // Illegal operation: drop the stack arguments and return undefined. | 1593 // Illegal operation: drop the stack arguments and return undefined. |
| 1594 if (num_arguments > 0) { | 1594 if (num_arguments > 0) { |
| 1595 Drop(num_arguments); | 1595 Drop(num_arguments); |
| 1596 } | 1596 } |
| 1597 LoadRoot(x0, Heap::kUndefinedValueRootIndex); | 1597 LoadRoot(x0, Heap::kUndefinedValueRootIndex); |
| 1598 return; | 1598 return; |
| 1599 } | 1599 } |
| 1600 | 1600 |
| 1601 // Place the necessary arguments. | 1601 // Place the necessary arguments. |
| 1602 Mov(x0, num_arguments); | 1602 Mov(x0, num_arguments); |
| 1603 Mov(x1, Operand(ExternalReference(f, isolate()))); | 1603 Mov(x1, ExternalReference(f, isolate())); |
| 1604 | 1604 |
| 1605 CEntryStub stub(1, save_doubles); | 1605 CEntryStub stub(1, save_doubles); |
| 1606 CallStub(&stub); | 1606 CallStub(&stub); |
| 1607 } | 1607 } |
| 1608 | 1608 |
| 1609 | 1609 |
| 1610 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 1610 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
| 1611 return ref0.address() - ref1.address(); | 1611 return ref0.address() - ref1.address(); |
| 1612 } | 1612 } |
| 1613 | 1613 |
| (...skipping 18 matching lines...) Expand all Loading... |
| 1632 | 1632 |
| 1633 ASSERT(function_address.is(x1) || function_address.is(x2)); | 1633 ASSERT(function_address.is(x1) || function_address.is(x2)); |
| 1634 | 1634 |
| 1635 Label profiler_disabled; | 1635 Label profiler_disabled; |
| 1636 Label end_profiler_check; | 1636 Label end_profiler_check; |
| 1637 bool* is_profiling_flag = isolate()->cpu_profiler()->is_profiling_address(); | 1637 bool* is_profiling_flag = isolate()->cpu_profiler()->is_profiling_address(); |
| 1638 STATIC_ASSERT(sizeof(*is_profiling_flag) == 1); | 1638 STATIC_ASSERT(sizeof(*is_profiling_flag) == 1); |
| 1639 Mov(x10, reinterpret_cast<uintptr_t>(is_profiling_flag)); | 1639 Mov(x10, reinterpret_cast<uintptr_t>(is_profiling_flag)); |
| 1640 Ldrb(w10, MemOperand(x10)); | 1640 Ldrb(w10, MemOperand(x10)); |
| 1641 Cbz(w10, &profiler_disabled); | 1641 Cbz(w10, &profiler_disabled); |
| 1642 Mov(x3, Operand(thunk_ref)); | 1642 Mov(x3, thunk_ref); |
| 1643 B(&end_profiler_check); | 1643 B(&end_profiler_check); |
| 1644 | 1644 |
| 1645 Bind(&profiler_disabled); | 1645 Bind(&profiler_disabled); |
| 1646 Mov(x3, function_address); | 1646 Mov(x3, function_address); |
| 1647 Bind(&end_profiler_check); | 1647 Bind(&end_profiler_check); |
| 1648 | 1648 |
| 1649 // Save the callee-save registers we are going to use. | 1649 // Save the callee-save registers we are going to use. |
| 1650 // TODO(all): Is this necessary? ARM doesn't do it. | 1650 // TODO(all): Is this necessary? ARM doesn't do it. |
| 1651 STATIC_ASSERT(kCallApiFunctionSpillSpace == 4); | 1651 STATIC_ASSERT(kCallApiFunctionSpillSpace == 4); |
| 1652 Poke(x19, (spill_offset + 0) * kXRegSize); | 1652 Poke(x19, (spill_offset + 0) * kXRegSize); |
| 1653 Poke(x20, (spill_offset + 1) * kXRegSize); | 1653 Poke(x20, (spill_offset + 1) * kXRegSize); |
| 1654 Poke(x21, (spill_offset + 2) * kXRegSize); | 1654 Poke(x21, (spill_offset + 2) * kXRegSize); |
| 1655 Poke(x22, (spill_offset + 3) * kXRegSize); | 1655 Poke(x22, (spill_offset + 3) * kXRegSize); |
| 1656 | 1656 |
| 1657 // Allocate HandleScope in callee-save registers. | 1657 // Allocate HandleScope in callee-save registers. |
| 1658 // We will need to restore the HandleScope after the call to the API function, | 1658 // We will need to restore the HandleScope after the call to the API function, |
| 1659 // by allocating it in callee-save registers they will be preserved by C code. | 1659 // by allocating it in callee-save registers they will be preserved by C code. |
| 1660 Register handle_scope_base = x22; | 1660 Register handle_scope_base = x22; |
| 1661 Register next_address_reg = x19; | 1661 Register next_address_reg = x19; |
| 1662 Register limit_reg = x20; | 1662 Register limit_reg = x20; |
| 1663 Register level_reg = w21; | 1663 Register level_reg = w21; |
| 1664 | 1664 |
| 1665 Mov(handle_scope_base, Operand(next_address)); | 1665 Mov(handle_scope_base, next_address); |
| 1666 Ldr(next_address_reg, MemOperand(handle_scope_base, kNextOffset)); | 1666 Ldr(next_address_reg, MemOperand(handle_scope_base, kNextOffset)); |
| 1667 Ldr(limit_reg, MemOperand(handle_scope_base, kLimitOffset)); | 1667 Ldr(limit_reg, MemOperand(handle_scope_base, kLimitOffset)); |
| 1668 Ldr(level_reg, MemOperand(handle_scope_base, kLevelOffset)); | 1668 Ldr(level_reg, MemOperand(handle_scope_base, kLevelOffset)); |
| 1669 Add(level_reg, level_reg, 1); | 1669 Add(level_reg, level_reg, 1); |
| 1670 Str(level_reg, MemOperand(handle_scope_base, kLevelOffset)); | 1670 Str(level_reg, MemOperand(handle_scope_base, kLevelOffset)); |
| 1671 | 1671 |
| 1672 if (FLAG_log_timer_events) { | 1672 if (FLAG_log_timer_events) { |
| 1673 FrameScope frame(this, StackFrame::MANUAL); | 1673 FrameScope frame(this, StackFrame::MANUAL); |
| 1674 PushSafepointRegisters(); | 1674 PushSafepointRegisters(); |
| 1675 Mov(x0, Operand(ExternalReference::isolate_address(isolate()))); | 1675 Mov(x0, ExternalReference::isolate_address(isolate())); |
| 1676 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1); | 1676 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1); |
| 1677 PopSafepointRegisters(); | 1677 PopSafepointRegisters(); |
| 1678 } | 1678 } |
| 1679 | 1679 |
| 1680 // Native call returns to the DirectCEntry stub which redirects to the | 1680 // Native call returns to the DirectCEntry stub which redirects to the |
| 1681 // return address pushed on stack (could have moved after GC). | 1681 // return address pushed on stack (could have moved after GC). |
| 1682 // DirectCEntry stub itself is generated early and never moves. | 1682 // DirectCEntry stub itself is generated early and never moves. |
| 1683 DirectCEntryStub stub; | 1683 DirectCEntryStub stub; |
| 1684 stub.GenerateCall(this, x3); | 1684 stub.GenerateCall(this, x3); |
| 1685 | 1685 |
| 1686 if (FLAG_log_timer_events) { | 1686 if (FLAG_log_timer_events) { |
| 1687 FrameScope frame(this, StackFrame::MANUAL); | 1687 FrameScope frame(this, StackFrame::MANUAL); |
| 1688 PushSafepointRegisters(); | 1688 PushSafepointRegisters(); |
| 1689 Mov(x0, Operand(ExternalReference::isolate_address(isolate()))); | 1689 Mov(x0, ExternalReference::isolate_address(isolate())); |
| 1690 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1); | 1690 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1); |
| 1691 PopSafepointRegisters(); | 1691 PopSafepointRegisters(); |
| 1692 } | 1692 } |
| 1693 | 1693 |
| 1694 Label promote_scheduled_exception; | 1694 Label promote_scheduled_exception; |
| 1695 Label exception_handled; | 1695 Label exception_handled; |
| 1696 Label delete_allocated_handles; | 1696 Label delete_allocated_handles; |
| 1697 Label leave_exit_frame; | 1697 Label leave_exit_frame; |
| 1698 Label return_value_loaded; | 1698 Label return_value_loaded; |
| 1699 | 1699 |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1715 B(ne, &delete_allocated_handles); | 1715 B(ne, &delete_allocated_handles); |
| 1716 | 1716 |
| 1717 Bind(&leave_exit_frame); | 1717 Bind(&leave_exit_frame); |
| 1718 // Restore callee-saved registers. | 1718 // Restore callee-saved registers. |
| 1719 Peek(x19, (spill_offset + 0) * kXRegSize); | 1719 Peek(x19, (spill_offset + 0) * kXRegSize); |
| 1720 Peek(x20, (spill_offset + 1) * kXRegSize); | 1720 Peek(x20, (spill_offset + 1) * kXRegSize); |
| 1721 Peek(x21, (spill_offset + 2) * kXRegSize); | 1721 Peek(x21, (spill_offset + 2) * kXRegSize); |
| 1722 Peek(x22, (spill_offset + 3) * kXRegSize); | 1722 Peek(x22, (spill_offset + 3) * kXRegSize); |
| 1723 | 1723 |
| 1724 // Check if the function scheduled an exception. | 1724 // Check if the function scheduled an exception. |
| 1725 Mov(x5, Operand(ExternalReference::scheduled_exception_address(isolate()))); | 1725 Mov(x5, ExternalReference::scheduled_exception_address(isolate())); |
| 1726 Ldr(x5, MemOperand(x5)); | 1726 Ldr(x5, MemOperand(x5)); |
| 1727 JumpIfNotRoot(x5, Heap::kTheHoleValueRootIndex, &promote_scheduled_exception); | 1727 JumpIfNotRoot(x5, Heap::kTheHoleValueRootIndex, &promote_scheduled_exception); |
| 1728 Bind(&exception_handled); | 1728 Bind(&exception_handled); |
| 1729 | 1729 |
| 1730 bool restore_context = context_restore_operand != NULL; | 1730 bool restore_context = context_restore_operand != NULL; |
| 1731 if (restore_context) { | 1731 if (restore_context) { |
| 1732 Ldr(cp, *context_restore_operand); | 1732 Ldr(cp, *context_restore_operand); |
| 1733 } | 1733 } |
| 1734 | 1734 |
| 1735 LeaveExitFrame(false, x1, !restore_context); | 1735 LeaveExitFrame(false, x1, !restore_context); |
| 1736 Drop(stack_space); | 1736 Drop(stack_space); |
| 1737 Ret(); | 1737 Ret(); |
| 1738 | 1738 |
| 1739 Bind(&promote_scheduled_exception); | 1739 Bind(&promote_scheduled_exception); |
| 1740 { | 1740 { |
| 1741 FrameScope frame(this, StackFrame::INTERNAL); | 1741 FrameScope frame(this, StackFrame::INTERNAL); |
| 1742 CallExternalReference( | 1742 CallExternalReference( |
| 1743 ExternalReference(Runtime::kPromoteScheduledException, isolate()), 0); | 1743 ExternalReference(Runtime::kPromoteScheduledException, isolate()), 0); |
| 1744 } | 1744 } |
| 1745 B(&exception_handled); | 1745 B(&exception_handled); |
| 1746 | 1746 |
| 1747 // HandleScope limit has changed. Delete allocated extensions. | 1747 // HandleScope limit has changed. Delete allocated extensions. |
| 1748 Bind(&delete_allocated_handles); | 1748 Bind(&delete_allocated_handles); |
| 1749 Str(limit_reg, MemOperand(handle_scope_base, kLimitOffset)); | 1749 Str(limit_reg, MemOperand(handle_scope_base, kLimitOffset)); |
| 1750 // Save the return value in a callee-save register. | 1750 // Save the return value in a callee-save register. |
| 1751 Register saved_result = x19; | 1751 Register saved_result = x19; |
| 1752 Mov(saved_result, x0); | 1752 Mov(saved_result, x0); |
| 1753 Mov(x0, Operand(ExternalReference::isolate_address(isolate()))); | 1753 Mov(x0, ExternalReference::isolate_address(isolate())); |
| 1754 CallCFunction( | 1754 CallCFunction( |
| 1755 ExternalReference::delete_handle_scope_extensions(isolate()), 1); | 1755 ExternalReference::delete_handle_scope_extensions(isolate()), 1); |
| 1756 Mov(x0, saved_result); | 1756 Mov(x0, saved_result); |
| 1757 B(&leave_exit_frame); | 1757 B(&leave_exit_frame); |
| 1758 } | 1758 } |
| 1759 | 1759 |
| 1760 | 1760 |
| 1761 void MacroAssembler::CallExternalReference(const ExternalReference& ext, | 1761 void MacroAssembler::CallExternalReference(const ExternalReference& ext, |
| 1762 int num_arguments) { | 1762 int num_arguments) { |
| 1763 Mov(x0, num_arguments); | 1763 Mov(x0, num_arguments); |
| 1764 Mov(x1, Operand(ext)); | 1764 Mov(x1, ext); |
| 1765 | 1765 |
| 1766 CEntryStub stub(1); | 1766 CEntryStub stub(1); |
| 1767 CallStub(&stub); | 1767 CallStub(&stub); |
| 1768 } | 1768 } |
| 1769 | 1769 |
| 1770 | 1770 |
| 1771 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { | 1771 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { |
| 1772 Mov(x1, Operand(builtin)); | 1772 Mov(x1, builtin); |
| 1773 CEntryStub stub(1); | 1773 CEntryStub stub(1); |
| 1774 Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET); | 1774 Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET); |
| 1775 } | 1775 } |
| 1776 | 1776 |
| 1777 | 1777 |
| 1778 void MacroAssembler::GetBuiltinFunction(Register target, | 1778 void MacroAssembler::GetBuiltinFunction(Register target, |
| 1779 Builtins::JavaScript id) { | 1779 Builtins::JavaScript id) { |
| 1780 // Load the builtins object into target register. | 1780 // Load the builtins object into target register. |
| 1781 Ldr(target, GlobalObjectMemOperand()); | 1781 Ldr(target, GlobalObjectMemOperand()); |
| 1782 Ldr(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset)); | 1782 Ldr(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset)); |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1874 int num_of_reg_args) { | 1874 int num_of_reg_args) { |
| 1875 CallCFunction(function, num_of_reg_args, 0); | 1875 CallCFunction(function, num_of_reg_args, 0); |
| 1876 } | 1876 } |
| 1877 | 1877 |
| 1878 | 1878 |
| 1879 void MacroAssembler::CallCFunction(ExternalReference function, | 1879 void MacroAssembler::CallCFunction(ExternalReference function, |
| 1880 int num_of_reg_args, | 1880 int num_of_reg_args, |
| 1881 int num_of_double_args) { | 1881 int num_of_double_args) { |
| 1882 UseScratchRegisterScope temps(this); | 1882 UseScratchRegisterScope temps(this); |
| 1883 Register temp = temps.AcquireX(); | 1883 Register temp = temps.AcquireX(); |
| 1884 Mov(temp, Operand(function)); | 1884 Mov(temp, function); |
| 1885 CallCFunction(temp, num_of_reg_args, num_of_double_args); | 1885 CallCFunction(temp, num_of_reg_args, num_of_double_args); |
| 1886 } | 1886 } |
| 1887 | 1887 |
| 1888 | 1888 |
| 1889 void MacroAssembler::CallCFunction(Register function, | 1889 void MacroAssembler::CallCFunction(Register function, |
| 1890 int num_of_reg_args, | 1890 int num_of_reg_args, |
| 1891 int num_of_double_args) { | 1891 int num_of_double_args) { |
| 1892 ASSERT(has_frame()); | 1892 ASSERT(has_frame()); |
| 1893 // We can pass 8 integer arguments in registers. If we need to pass more than | 1893 // We can pass 8 integer arguments in registers. If we need to pass more than |
| 1894 // that, we'll need to implement support for passing them on the stack. | 1894 // that, we'll need to implement support for passing them on the stack. |
| (...skipping 1010 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2905 | 2905 |
| 2906 Bind(&done); | 2906 Bind(&done); |
| 2907 } | 2907 } |
| 2908 | 2908 |
| 2909 | 2909 |
| 2910 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { | 2910 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { |
| 2911 if (frame_mode == BUILD_STUB_FRAME) { | 2911 if (frame_mode == BUILD_STUB_FRAME) { |
| 2912 ASSERT(StackPointer().Is(jssp)); | 2912 ASSERT(StackPointer().Is(jssp)); |
| 2913 UseScratchRegisterScope temps(this); | 2913 UseScratchRegisterScope temps(this); |
| 2914 Register temp = temps.AcquireX(); | 2914 Register temp = temps.AcquireX(); |
| 2915 __ Mov(temp, Operand(Smi::FromInt(StackFrame::STUB))); | 2915 __ Mov(temp, Smi::FromInt(StackFrame::STUB)); |
| 2916 // Compiled stubs don't age, and so they don't need the predictable code | 2916 // Compiled stubs don't age, and so they don't need the predictable code |
| 2917 // ageing sequence. | 2917 // ageing sequence. |
| 2918 __ Push(lr, fp, cp, temp); | 2918 __ Push(lr, fp, cp, temp); |
| 2919 __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp); | 2919 __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp); |
| 2920 } else { | 2920 } else { |
| 2921 if (isolate()->IsCodePreAgingActive()) { | 2921 if (isolate()->IsCodePreAgingActive()) { |
| 2922 Code* stub = Code::GetPreAgedCodeAgeStub(isolate()); | 2922 Code* stub = Code::GetPreAgedCodeAgeStub(isolate()); |
| 2923 __ EmitCodeAgeSequence(stub); | 2923 __ EmitCodeAgeSequence(stub); |
| 2924 } else { | 2924 } else { |
| 2925 __ EmitFrameSetupForCodeAgePatching(); | 2925 __ EmitFrameSetupForCodeAgePatching(); |
| 2926 } | 2926 } |
| 2927 } | 2927 } |
| 2928 } | 2928 } |
| 2929 | 2929 |
| 2930 | 2930 |
| 2931 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 2931 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
| 2932 ASSERT(jssp.Is(StackPointer())); | 2932 ASSERT(jssp.Is(StackPointer())); |
| 2933 UseScratchRegisterScope temps(this); | 2933 UseScratchRegisterScope temps(this); |
| 2934 Register type_reg = temps.AcquireX(); | 2934 Register type_reg = temps.AcquireX(); |
| 2935 Register code_reg = temps.AcquireX(); | 2935 Register code_reg = temps.AcquireX(); |
| 2936 | 2936 |
| 2937 Push(lr, fp, cp); | 2937 Push(lr, fp, cp); |
| 2938 Mov(type_reg, Operand(Smi::FromInt(type))); | 2938 Mov(type_reg, Smi::FromInt(type)); |
| 2939 Mov(code_reg, Operand(CodeObject())); | 2939 Mov(code_reg, Operand(CodeObject())); |
| 2940 Push(type_reg, code_reg); | 2940 Push(type_reg, code_reg); |
| 2941 // jssp[4] : lr | 2941 // jssp[4] : lr |
| 2942 // jssp[3] : fp | 2942 // jssp[3] : fp |
| 2943 // jssp[2] : cp | 2943 // jssp[2] : cp |
| 2944 // jssp[1] : type | 2944 // jssp[1] : type |
| 2945 // jssp[0] : code object | 2945 // jssp[0] : code object |
| 2946 | 2946 |
| 2947 // Adjust FP to point to saved FP. | 2947 // Adjust FP to point to saved FP. |
| 2948 Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize); | 2948 Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize); |
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3085 SetStackPointer(jssp); | 3085 SetStackPointer(jssp); |
| 3086 AssertStackConsistency(); | 3086 AssertStackConsistency(); |
| 3087 Pop(fp, lr); | 3087 Pop(fp, lr); |
| 3088 } | 3088 } |
| 3089 | 3089 |
| 3090 | 3090 |
| 3091 void MacroAssembler::SetCounter(StatsCounter* counter, int value, | 3091 void MacroAssembler::SetCounter(StatsCounter* counter, int value, |
| 3092 Register scratch1, Register scratch2) { | 3092 Register scratch1, Register scratch2) { |
| 3093 if (FLAG_native_code_counters && counter->Enabled()) { | 3093 if (FLAG_native_code_counters && counter->Enabled()) { |
| 3094 Mov(scratch1, value); | 3094 Mov(scratch1, value); |
| 3095 Mov(scratch2, Operand(ExternalReference(counter))); | 3095 Mov(scratch2, ExternalReference(counter)); |
| 3096 Str(scratch1, MemOperand(scratch2)); | 3096 Str(scratch1, MemOperand(scratch2)); |
| 3097 } | 3097 } |
| 3098 } | 3098 } |
| 3099 | 3099 |
| 3100 | 3100 |
| 3101 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value, | 3101 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value, |
| 3102 Register scratch1, Register scratch2) { | 3102 Register scratch1, Register scratch2) { |
| 3103 ASSERT(value != 0); | 3103 ASSERT(value != 0); |
| 3104 if (FLAG_native_code_counters && counter->Enabled()) { | 3104 if (FLAG_native_code_counters && counter->Enabled()) { |
| 3105 Mov(scratch2, Operand(ExternalReference(counter))); | 3105 Mov(scratch2, ExternalReference(counter)); |
| 3106 Ldr(scratch1, MemOperand(scratch2)); | 3106 Ldr(scratch1, MemOperand(scratch2)); |
| 3107 Add(scratch1, scratch1, value); | 3107 Add(scratch1, scratch1, value); |
| 3108 Str(scratch1, MemOperand(scratch2)); | 3108 Str(scratch1, MemOperand(scratch2)); |
| 3109 } | 3109 } |
| 3110 } | 3110 } |
| 3111 | 3111 |
| 3112 | 3112 |
| 3113 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value, | 3113 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value, |
| 3114 Register scratch1, Register scratch2) { | 3114 Register scratch1, Register scratch2) { |
| 3115 IncrementCounter(counter, -value, scratch1, scratch2); | 3115 IncrementCounter(counter, -value, scratch1, scratch2); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 3128 // destination register in case we store into it (the write barrier | 3128 // destination register in case we store into it (the write barrier |
| 3129 // cannot be allowed to destroy the context in cp). | 3129 // cannot be allowed to destroy the context in cp). |
| 3130 Mov(dst, cp); | 3130 Mov(dst, cp); |
| 3131 } | 3131 } |
| 3132 } | 3132 } |
| 3133 | 3133 |
| 3134 | 3134 |
| 3135 #ifdef ENABLE_DEBUGGER_SUPPORT | 3135 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 3136 void MacroAssembler::DebugBreak() { | 3136 void MacroAssembler::DebugBreak() { |
| 3137 Mov(x0, 0); | 3137 Mov(x0, 0); |
| 3138 Mov(x1, Operand(ExternalReference(Runtime::kDebugBreak, isolate()))); | 3138 Mov(x1, ExternalReference(Runtime::kDebugBreak, isolate())); |
| 3139 CEntryStub ces(1); | 3139 CEntryStub ces(1); |
| 3140 ASSERT(AllowThisStubCall(&ces)); | 3140 ASSERT(AllowThisStubCall(&ces)); |
| 3141 Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK); | 3141 Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK); |
| 3142 } | 3142 } |
| 3143 #endif | 3143 #endif |
| 3144 | 3144 |
| 3145 | 3145 |
| 3146 void MacroAssembler::PushTryHandler(StackHandler::Kind kind, | 3146 void MacroAssembler::PushTryHandler(StackHandler::Kind kind, |
| 3147 int handler_index) { | 3147 int handler_index) { |
| 3148 ASSERT(jssp.Is(StackPointer())); | 3148 ASSERT(jssp.Is(StackPointer())); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 3167 | 3167 |
| 3168 // Push the frame pointer, context, state, and code object. | 3168 // Push the frame pointer, context, state, and code object. |
| 3169 if (kind == StackHandler::JS_ENTRY) { | 3169 if (kind == StackHandler::JS_ENTRY) { |
| 3170 ASSERT(Smi::FromInt(0) == 0); | 3170 ASSERT(Smi::FromInt(0) == 0); |
| 3171 Push(xzr, xzr, x11, x10); | 3171 Push(xzr, xzr, x11, x10); |
| 3172 } else { | 3172 } else { |
| 3173 Push(fp, cp, x11, x10); | 3173 Push(fp, cp, x11, x10); |
| 3174 } | 3174 } |
| 3175 | 3175 |
| 3176 // Link the current handler as the next handler. | 3176 // Link the current handler as the next handler. |
| 3177 Mov(x11, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); | 3177 Mov(x11, ExternalReference(Isolate::kHandlerAddress, isolate())); |
| 3178 Ldr(x10, MemOperand(x11)); | 3178 Ldr(x10, MemOperand(x11)); |
| 3179 Push(x10); | 3179 Push(x10); |
| 3180 // Set this new handler as the current one. | 3180 // Set this new handler as the current one. |
| 3181 Str(jssp, MemOperand(x11)); | 3181 Str(jssp, MemOperand(x11)); |
| 3182 } | 3182 } |
| 3183 | 3183 |
| 3184 | 3184 |
| 3185 void MacroAssembler::PopTryHandler() { | 3185 void MacroAssembler::PopTryHandler() { |
| 3186 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); | 3186 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
| 3187 Pop(x10); | 3187 Pop(x10); |
| 3188 Mov(x11, Operand(ExternalReference(Isolate::kHandlerAddress, isolate()))); | 3188 Mov(x11, ExternalReference(Isolate::kHandlerAddress, isolate())); |
| 3189 Drop(StackHandlerConstants::kSize - kXRegSize, kByteSizeInBytes); | 3189 Drop(StackHandlerConstants::kSize - kXRegSize, kByteSizeInBytes); |
| 3190 Str(x10, MemOperand(x11)); | 3190 Str(x10, MemOperand(x11)); |
| 3191 } | 3191 } |
| 3192 | 3192 |
| 3193 | 3193 |
| 3194 void MacroAssembler::Allocate(int object_size, | 3194 void MacroAssembler::Allocate(int object_size, |
| 3195 Register result, | 3195 Register result, |
| 3196 Register scratch1, | 3196 Register scratch1, |
| 3197 Register scratch2, | 3197 Register scratch2, |
| 3198 Label* gc_required, | 3198 Label* gc_required, |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3300 AllocationUtils::GetAllocationTopReference(isolate(), flags); | 3300 AllocationUtils::GetAllocationTopReference(isolate(), flags); |
| 3301 ExternalReference heap_allocation_limit = | 3301 ExternalReference heap_allocation_limit = |
| 3302 AllocationUtils::GetAllocationLimitReference(isolate(), flags); | 3302 AllocationUtils::GetAllocationLimitReference(isolate(), flags); |
| 3303 intptr_t top = reinterpret_cast<intptr_t>(heap_allocation_top.address()); | 3303 intptr_t top = reinterpret_cast<intptr_t>(heap_allocation_top.address()); |
| 3304 intptr_t limit = reinterpret_cast<intptr_t>(heap_allocation_limit.address()); | 3304 intptr_t limit = reinterpret_cast<intptr_t>(heap_allocation_limit.address()); |
| 3305 ASSERT((limit - top) == kPointerSize); | 3305 ASSERT((limit - top) == kPointerSize); |
| 3306 | 3306 |
| 3307 // Set up allocation top address and object size registers. | 3307 // Set up allocation top address and object size registers. |
| 3308 Register top_address = scratch1; | 3308 Register top_address = scratch1; |
| 3309 Register allocation_limit = scratch2; | 3309 Register allocation_limit = scratch2; |
| 3310 Mov(top_address, Operand(heap_allocation_top)); | 3310 Mov(top_address, heap_allocation_top); |
| 3311 | 3311 |
| 3312 if ((flags & RESULT_CONTAINS_TOP) == 0) { | 3312 if ((flags & RESULT_CONTAINS_TOP) == 0) { |
| 3313 // Load allocation top into result and the allocation limit. | 3313 // Load allocation top into result and the allocation limit. |
| 3314 Ldp(result, allocation_limit, MemOperand(top_address)); | 3314 Ldp(result, allocation_limit, MemOperand(top_address)); |
| 3315 } else { | 3315 } else { |
| 3316 if (emit_debug_code()) { | 3316 if (emit_debug_code()) { |
| 3317 // Assert that result actually contains top on entry. | 3317 // Assert that result actually contains top on entry. |
| 3318 Ldr(scratch3, MemOperand(top_address)); | 3318 Ldr(scratch3, MemOperand(top_address)); |
| 3319 Cmp(result, scratch3); | 3319 Cmp(result, scratch3); |
| 3320 Check(eq, kUnexpectedAllocationTop); | 3320 Check(eq, kUnexpectedAllocationTop); |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3353 | 3353 |
| 3354 void MacroAssembler::UndoAllocationInNewSpace(Register object, | 3354 void MacroAssembler::UndoAllocationInNewSpace(Register object, |
| 3355 Register scratch) { | 3355 Register scratch) { |
| 3356 ExternalReference new_space_allocation_top = | 3356 ExternalReference new_space_allocation_top = |
| 3357 ExternalReference::new_space_allocation_top_address(isolate()); | 3357 ExternalReference::new_space_allocation_top_address(isolate()); |
| 3358 | 3358 |
| 3359 // Make sure the object has no tag before resetting top. | 3359 // Make sure the object has no tag before resetting top. |
| 3360 Bic(object, object, kHeapObjectTagMask); | 3360 Bic(object, object, kHeapObjectTagMask); |
| 3361 #ifdef DEBUG | 3361 #ifdef DEBUG |
| 3362 // Check that the object un-allocated is below the current top. | 3362 // Check that the object un-allocated is below the current top. |
| 3363 Mov(scratch, Operand(new_space_allocation_top)); | 3363 Mov(scratch, new_space_allocation_top); |
| 3364 Ldr(scratch, MemOperand(scratch)); | 3364 Ldr(scratch, MemOperand(scratch)); |
| 3365 Cmp(object, scratch); | 3365 Cmp(object, scratch); |
| 3366 Check(lt, kUndoAllocationOfNonAllocatedMemory); | 3366 Check(lt, kUndoAllocationOfNonAllocatedMemory); |
| 3367 #endif | 3367 #endif |
| 3368 // Write the address of the object to un-allocate as the current top. | 3368 // Write the address of the object to un-allocate as the current top. |
| 3369 Mov(scratch, Operand(new_space_allocation_top)); | 3369 Mov(scratch, new_space_allocation_top); |
| 3370 Str(object, MemOperand(scratch)); | 3370 Str(object, MemOperand(scratch)); |
| 3371 } | 3371 } |
| 3372 | 3372 |
| 3373 | 3373 |
| 3374 void MacroAssembler::AllocateTwoByteString(Register result, | 3374 void MacroAssembler::AllocateTwoByteString(Register result, |
| 3375 Register length, | 3375 Register length, |
| 3376 Register scratch1, | 3376 Register scratch1, |
| 3377 Register scratch2, | 3377 Register scratch2, |
| 3378 Register scratch3, | 3378 Register scratch3, |
| 3379 Label* gc_required) { | 3379 Label* gc_required) { |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3452 void MacroAssembler::AllocateAsciiConsString(Register result, | 3452 void MacroAssembler::AllocateAsciiConsString(Register result, |
| 3453 Register length, | 3453 Register length, |
| 3454 Register scratch1, | 3454 Register scratch1, |
| 3455 Register scratch2, | 3455 Register scratch2, |
| 3456 Label* gc_required) { | 3456 Label* gc_required) { |
| 3457 Label allocate_new_space, install_map; | 3457 Label allocate_new_space, install_map; |
| 3458 AllocationFlags flags = TAG_OBJECT; | 3458 AllocationFlags flags = TAG_OBJECT; |
| 3459 | 3459 |
| 3460 ExternalReference high_promotion_mode = ExternalReference:: | 3460 ExternalReference high_promotion_mode = ExternalReference:: |
| 3461 new_space_high_promotion_mode_active_address(isolate()); | 3461 new_space_high_promotion_mode_active_address(isolate()); |
| 3462 Mov(scratch1, Operand(high_promotion_mode)); | 3462 Mov(scratch1, high_promotion_mode); |
| 3463 Ldr(scratch1, MemOperand(scratch1)); | 3463 Ldr(scratch1, MemOperand(scratch1)); |
| 3464 Cbz(scratch1, &allocate_new_space); | 3464 Cbz(scratch1, &allocate_new_space); |
| 3465 | 3465 |
| 3466 Allocate(ConsString::kSize, | 3466 Allocate(ConsString::kSize, |
| 3467 result, | 3467 result, |
| 3468 scratch1, | 3468 scratch1, |
| 3469 scratch2, | 3469 scratch2, |
| 3470 gc_required, | 3470 gc_required, |
| 3471 static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE)); | 3471 static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE)); |
| 3472 | 3472 |
| (...skipping 632 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4105 if (emit_debug_code()) { | 4105 if (emit_debug_code()) { |
| 4106 Label ok; | 4106 Label ok; |
| 4107 JumpIfNotInNewSpace(object, &ok); | 4107 JumpIfNotInNewSpace(object, &ok); |
| 4108 Abort(kRememberedSetPointerInNewSpace); | 4108 Abort(kRememberedSetPointerInNewSpace); |
| 4109 bind(&ok); | 4109 bind(&ok); |
| 4110 } | 4110 } |
| 4111 UseScratchRegisterScope temps(this); | 4111 UseScratchRegisterScope temps(this); |
| 4112 Register scratch2 = temps.AcquireX(); | 4112 Register scratch2 = temps.AcquireX(); |
| 4113 | 4113 |
| 4114 // Load store buffer top. | 4114 // Load store buffer top. |
| 4115 Mov(scratch2, Operand(ExternalReference::store_buffer_top(isolate()))); | 4115 Mov(scratch2, ExternalReference::store_buffer_top(isolate())); |
| 4116 Ldr(scratch1, MemOperand(scratch2)); | 4116 Ldr(scratch1, MemOperand(scratch2)); |
| 4117 // Store pointer to buffer and increment buffer top. | 4117 // Store pointer to buffer and increment buffer top. |
| 4118 Str(address, MemOperand(scratch1, kPointerSize, PostIndex)); | 4118 Str(address, MemOperand(scratch1, kPointerSize, PostIndex)); |
| 4119 // Write back new top of buffer. | 4119 // Write back new top of buffer. |
| 4120 Str(scratch1, MemOperand(scratch2)); | 4120 Str(scratch1, MemOperand(scratch2)); |
| 4121 // Call stub on end of buffer. | 4121 // Call stub on end of buffer. |
| 4122 // Check for end of buffer. | 4122 // Check for end of buffer. |
| 4123 ASSERT(StoreBuffer::kStoreBufferOverflowBit == | 4123 ASSERT(StoreBuffer::kStoreBufferOverflowBit == |
| 4124 (1 << (14 + kPointerSizeLog2))); | 4124 (1 << (14 + kPointerSizeLog2))); |
| 4125 if (and_then == kFallThroughAtEnd) { | 4125 if (and_then == kFallThroughAtEnd) { |
| (...skipping 488 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4614 Bind(&ok); | 4614 Bind(&ok); |
| 4615 } | 4615 } |
| 4616 } | 4616 } |
| 4617 | 4617 |
| 4618 | 4618 |
| 4619 void MacroAssembler::AssertIsString(const Register& object) { | 4619 void MacroAssembler::AssertIsString(const Register& object) { |
| 4620 if (emit_debug_code()) { | 4620 if (emit_debug_code()) { |
| 4621 UseScratchRegisterScope temps(this); | 4621 UseScratchRegisterScope temps(this); |
| 4622 Register temp = temps.AcquireX(); | 4622 Register temp = temps.AcquireX(); |
| 4623 STATIC_ASSERT(kSmiTag == 0); | 4623 STATIC_ASSERT(kSmiTag == 0); |
| 4624 Tst(object, Operand(kSmiTagMask)); | 4624 Tst(object, kSmiTagMask); |
| 4625 Check(ne, kOperandIsNotAString); | 4625 Check(ne, kOperandIsNotAString); |
| 4626 Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset)); | 4626 Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 4627 CompareInstanceType(temp, temp, FIRST_NONSTRING_TYPE); | 4627 CompareInstanceType(temp, temp, FIRST_NONSTRING_TYPE); |
| 4628 Check(lo, kOperandIsNotAString); | 4628 Check(lo, kOperandIsNotAString); |
| 4629 } | 4629 } |
| 4630 } | 4630 } |
| 4631 | 4631 |
| 4632 | 4632 |
| 4633 void MacroAssembler::Check(Condition cond, BailoutReason reason) { | 4633 void MacroAssembler::Check(Condition cond, BailoutReason reason) { |
| 4634 Label ok; | 4634 Label ok; |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4669 // We need some scratch registers for the MacroAssembler, so make sure we have | 4669 // We need some scratch registers for the MacroAssembler, so make sure we have |
| 4670 // some. This is safe here because Abort never returns. | 4670 // some. This is safe here because Abort never returns. |
| 4671 RegList old_tmp_list = TmpList()->list(); | 4671 RegList old_tmp_list = TmpList()->list(); |
| 4672 TmpList()->Combine(ip0); | 4672 TmpList()->Combine(ip0); |
| 4673 TmpList()->Combine(ip1); | 4673 TmpList()->Combine(ip1); |
| 4674 | 4674 |
| 4675 if (use_real_aborts()) { | 4675 if (use_real_aborts()) { |
| 4676 // Avoid infinite recursion; Push contains some assertions that use Abort. | 4676 // Avoid infinite recursion; Push contains some assertions that use Abort. |
| 4677 NoUseRealAbortsScope no_real_aborts(this); | 4677 NoUseRealAbortsScope no_real_aborts(this); |
| 4678 | 4678 |
| 4679 Mov(x0, Operand(Smi::FromInt(reason))); | 4679 Mov(x0, Smi::FromInt(reason)); |
| 4680 Push(x0); | 4680 Push(x0); |
| 4681 | 4681 |
| 4682 if (!has_frame_) { | 4682 if (!has_frame_) { |
| 4683 // We don't actually want to generate a pile of code for this, so just | 4683 // We don't actually want to generate a pile of code for this, so just |
| 4684 // claim there is a stack frame, without generating one. | 4684 // claim there is a stack frame, without generating one. |
| 4685 FrameScope scope(this, StackFrame::NONE); | 4685 FrameScope scope(this, StackFrame::NONE); |
| 4686 CallRuntime(Runtime::kAbort, 1); | 4686 CallRuntime(Runtime::kAbort, 1); |
| 4687 } else { | 4687 } else { |
| 4688 CallRuntime(Runtime::kAbort, 1); | 4688 CallRuntime(Runtime::kAbort, 1); |
| 4689 } | 4689 } |
| (...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5071 } | 5071 } |
| 5072 #endif | 5072 #endif |
| 5073 | 5073 |
| 5074 | 5074 |
| 5075 void MacroAssembler::TruncatingDiv(Register result, | 5075 void MacroAssembler::TruncatingDiv(Register result, |
| 5076 Register dividend, | 5076 Register dividend, |
| 5077 int32_t divisor) { | 5077 int32_t divisor) { |
| 5078 ASSERT(!AreAliased(result, dividend)); | 5078 ASSERT(!AreAliased(result, dividend)); |
| 5079 ASSERT(result.Is32Bits() && dividend.Is32Bits()); | 5079 ASSERT(result.Is32Bits() && dividend.Is32Bits()); |
| 5080 MultiplierAndShift ms(divisor); | 5080 MultiplierAndShift ms(divisor); |
| 5081 Mov(result, Operand(ms.multiplier())); | 5081 Mov(result, ms.multiplier()); |
| 5082 Smull(result.X(), dividend, result); | 5082 Smull(result.X(), dividend, result); |
| 5083 Asr(result.X(), result.X(), 32); | 5083 Asr(result.X(), result.X(), 32); |
| 5084 if (divisor > 0 && ms.multiplier() < 0) Add(result, result, dividend); | 5084 if (divisor > 0 && ms.multiplier() < 0) Add(result, result, dividend); |
| 5085 if (divisor < 0 && ms.multiplier() > 0) Sub(result, result, dividend); | 5085 if (divisor < 0 && ms.multiplier() > 0) Sub(result, result, dividend); |
| 5086 if (ms.shift() > 0) Asr(result, result, ms.shift()); | 5086 if (ms.shift() > 0) Asr(result, result, ms.shift()); |
| 5087 Add(result, result, Operand(dividend, LSR, 31)); | 5087 Add(result, result, Operand(dividend, LSR, 31)); |
| 5088 } | 5088 } |
| 5089 | 5089 |
| 5090 | 5090 |
| 5091 #undef __ | 5091 #undef __ |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5162 } | 5162 } |
| 5163 } | 5163 } |
| 5164 | 5164 |
| 5165 | 5165 |
| 5166 #undef __ | 5166 #undef __ |
| 5167 | 5167 |
| 5168 | 5168 |
| 5169 } } // namespace v8::internal | 5169 } } // namespace v8::internal |
| 5170 | 5170 |
| 5171 #endif // V8_TARGET_ARCH_A64 | 5171 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |