OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1604 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1615 // Interleave bits from both instance types and compare them in one check. | 1615 // Interleave bits from both instance types and compare them in one check. |
1616 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); | 1616 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); |
1617 and_(scratch1, kFlatAsciiStringMask); | 1617 and_(scratch1, kFlatAsciiStringMask); |
1618 and_(scratch2, kFlatAsciiStringMask); | 1618 and_(scratch2, kFlatAsciiStringMask); |
1619 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); | 1619 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); |
1620 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3)); | 1620 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3)); |
1621 j(not_equal, failure); | 1621 j(not_equal, failure); |
1622 } | 1622 } |
1623 | 1623 |
1624 | 1624 |
| 1625 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) { |
| 1626 int frameAlignment = OS::ActivationFrameAlignment(); |
| 1627 if (frameAlignment != 0) { |
| 1628 // Make stack end at alignment and make room for num_arguments words |
| 1629 // and the original value of esp. |
| 1630 mov(scratch, esp); |
| 1631 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize)); |
| 1632 ASSERT(IsPowerOf2(frameAlignment)); |
| 1633 and_(esp, -frameAlignment); |
| 1634 mov(Operand(esp, num_arguments * kPointerSize), scratch); |
| 1635 } else { |
| 1636 sub(Operand(esp), Immediate(num_arguments * kPointerSize)); |
| 1637 } |
| 1638 } |
| 1639 |
| 1640 |
| 1641 void MacroAssembler::CallCFunction(ExternalReference function, |
| 1642 int num_arguments) { |
| 1643 // Trashing eax is ok as it will be the return value. |
| 1644 mov(Operand(eax), Immediate(function)); |
| 1645 CallCFunction(eax, num_arguments); |
| 1646 } |
| 1647 |
| 1648 |
| 1649 void MacroAssembler::CallCFunction(Register function, |
| 1650 int num_arguments) { |
| 1651 call(Operand(function)); |
| 1652 if (OS::ActivationFrameAlignment() != 0) { |
| 1653 mov(esp, Operand(esp, num_arguments * kPointerSize)); |
| 1654 } else { |
| 1655 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t))); |
| 1656 } |
| 1657 } |
| 1658 |
| 1659 |
1625 CodePatcher::CodePatcher(byte* address, int size) | 1660 CodePatcher::CodePatcher(byte* address, int size) |
1626 : address_(address), size_(size), masm_(address, size + Assembler::kGap) { | 1661 : address_(address), size_(size), masm_(address, size + Assembler::kGap) { |
1627 // Create a new macro assembler pointing to the address of the code to patch. | 1662 // Create a new macro assembler pointing to the address of the code to patch. |
1628 // The size is adjusted with kGap on order for the assembler to generate size | 1663 // The size is adjusted with kGap on order for the assembler to generate size |
1629 // bytes of instructions without failing with buffer size constraints. | 1664 // bytes of instructions without failing with buffer size constraints. |
1630 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 1665 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
1631 } | 1666 } |
1632 | 1667 |
1633 | 1668 |
1634 CodePatcher::~CodePatcher() { | 1669 CodePatcher::~CodePatcher() { |
1635 // Indicate that code has changed. | 1670 // Indicate that code has changed. |
1636 CPU::FlushICache(address_, size_); | 1671 CPU::FlushICache(address_, size_); |
1637 | 1672 |
1638 // Check that the code was patched as expected. | 1673 // Check that the code was patched as expected. |
1639 ASSERT(masm_.pc_ == address_ + size_); | 1674 ASSERT(masm_.pc_ == address_ + size_); |
1640 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 1675 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
1641 } | 1676 } |
1642 | 1677 |
1643 | 1678 |
1644 } } // namespace v8::internal | 1679 } } // namespace v8::internal |
OLD | NEW |