| OLD | NEW |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 167 // The mask isn't really an address. We load it as an external reference in | 167 // The mask isn't really an address. We load it as an external reference in |
| 168 // case the size of the new space is different between the snapshot maker | 168 // case the size of the new space is different between the snapshot maker |
| 169 // and the running system. | 169 // and the running system. |
| 170 and_(Operand(value), Immediate(ExternalReference::new_space_mask())); | 170 and_(Operand(value), Immediate(ExternalReference::new_space_mask())); |
| 171 cmp(Operand(value), Immediate(ExternalReference::new_space_start())); | 171 cmp(Operand(value), Immediate(ExternalReference::new_space_start())); |
| 172 j(equal, &done); | 172 j(equal, &done); |
| 173 } else { | 173 } else { |
| 174 int32_t new_space_start = reinterpret_cast<int32_t>( | 174 int32_t new_space_start = reinterpret_cast<int32_t>( |
| 175 ExternalReference::new_space_start().address()); | 175 ExternalReference::new_space_start().address()); |
| 176 lea(value, Operand(object, -new_space_start)); | 176 lea(value, Operand(object, -new_space_start)); |
| 177 and_(value, Heap::NewSpaceMask()); | 177 // The mask isn't really an address. We load it as an external reference in |
| 178 // case the size of the new space is different between the snapshot maker |
| 179 // and the running system. |
| 180 and_(Operand(value), Immediate(ExternalReference::new_space_mask())); |
| 178 j(equal, &done); | 181 j(equal, &done); |
| 179 } | 182 } |
| 180 | 183 |
| 181 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) { | 184 if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) { |
| 182 // Compute the bit offset in the remembered set, leave it in 'value'. | 185 // Compute the bit offset in the remembered set, leave it in 'value'. |
| 183 lea(value, Operand(object, offset)); | 186 lea(value, Operand(object, offset)); |
| 184 and_(value, Page::kPageAlignmentMask); | 187 and_(value, Page::kPageAlignmentMask); |
| 185 shr(value, kPointerSizeLog2); | 188 shr(value, kPointerSizeLog2); |
| 186 | 189 |
| 187 // Compute the page address from the heap object pointer, leave it in | 190 // Compute the page address from the heap object pointer, leave it in |
| (...skipping 994 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1182 // arguments passed in because it is constant. At some point we | 1185 // arguments passed in because it is constant. At some point we |
| 1183 // should remove this need and make the runtime routine entry code | 1186 // should remove this need and make the runtime routine entry code |
| 1184 // smarter. | 1187 // smarter. |
| 1185 Set(eax, Immediate(num_arguments)); | 1188 Set(eax, Immediate(num_arguments)); |
| 1186 mov(ebx, Immediate(ExternalReference(f))); | 1189 mov(ebx, Immediate(ExternalReference(f))); |
| 1187 CEntryStub ces(1); | 1190 CEntryStub ces(1); |
| 1188 return TryCallStub(&ces); | 1191 return TryCallStub(&ces); |
| 1189 } | 1192 } |
| 1190 | 1193 |
| 1191 | 1194 |
| 1192 void MacroAssembler::TailCallRuntime(const ExternalReference& ext, | 1195 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext, |
| 1193 int num_arguments, | 1196 int num_arguments, |
| 1194 int result_size) { | 1197 int result_size) { |
| 1195 // TODO(1236192): Most runtime routines don't need the number of | 1198 // TODO(1236192): Most runtime routines don't need the number of |
| 1196 // arguments passed in because it is constant. At some point we | 1199 // arguments passed in because it is constant. At some point we |
| 1197 // should remove this need and make the runtime routine entry code | 1200 // should remove this need and make the runtime routine entry code |
| 1198 // smarter. | 1201 // smarter. |
| 1199 Set(eax, Immediate(num_arguments)); | 1202 Set(eax, Immediate(num_arguments)); |
| 1200 JumpToRuntime(ext); | 1203 JumpToExternalReference(ext); |
| 1204 } |
| 1205 |
| 1206 |
| 1207 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, |
| 1208 int num_arguments, |
| 1209 int result_size) { |
| 1210 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); |
| 1201 } | 1211 } |
| 1202 | 1212 |
| 1203 | 1213 |
| 1204 void MacroAssembler::PushHandleScope(Register scratch) { | 1214 void MacroAssembler::PushHandleScope(Register scratch) { |
| 1205 // Push the number of extensions, smi-tagged so the gc will ignore it. | 1215 // Push the number of extensions, smi-tagged so the gc will ignore it. |
| 1206 ExternalReference extensions_address = | 1216 ExternalReference extensions_address = |
| 1207 ExternalReference::handle_scope_extensions_address(); | 1217 ExternalReference::handle_scope_extensions_address(); |
| 1208 mov(scratch, Operand::StaticVariable(extensions_address)); | 1218 mov(scratch, Operand::StaticVariable(extensions_address)); |
| 1209 ASSERT_EQ(0, kSmiTag); | 1219 ASSERT_EQ(0, kSmiTag); |
| 1210 shl(scratch, kSmiTagSize); | 1220 shl(scratch, kSmiTagSize); |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1260 void MacroAssembler::PopHandleScope(Register saved, Register scratch) { | 1270 void MacroAssembler::PopHandleScope(Register saved, Register scratch) { |
| 1261 PopHandleScopeHelper(saved, scratch, true); | 1271 PopHandleScopeHelper(saved, scratch, true); |
| 1262 } | 1272 } |
| 1263 | 1273 |
| 1264 | 1274 |
| 1265 Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) { | 1275 Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) { |
| 1266 return PopHandleScopeHelper(saved, scratch, false); | 1276 return PopHandleScopeHelper(saved, scratch, false); |
| 1267 } | 1277 } |
| 1268 | 1278 |
| 1269 | 1279 |
| 1270 void MacroAssembler::JumpToRuntime(const ExternalReference& ext) { | 1280 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) { |
| 1271 // Set the entry point and jump to the C entry runtime stub. | 1281 // Set the entry point and jump to the C entry runtime stub. |
| 1272 mov(ebx, Immediate(ext)); | 1282 mov(ebx, Immediate(ext)); |
| 1273 CEntryStub ces(1); | 1283 CEntryStub ces(1); |
| 1274 jmp(ces.GetCode(), RelocInfo::CODE_TARGET); | 1284 jmp(ces.GetCode(), RelocInfo::CODE_TARGET); |
| 1275 } | 1285 } |
| 1276 | 1286 |
| 1277 | 1287 |
| 1278 void MacroAssembler::InvokePrologue(const ParameterCount& expected, | 1288 void MacroAssembler::InvokePrologue(const ParameterCount& expected, |
| 1279 const ParameterCount& actual, | 1289 const ParameterCount& actual, |
| 1280 Handle<Code> code_constant, | 1290 Handle<Code> code_constant, |
| (...skipping 330 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1611 // Interleave bits from both instance types and compare them in one check. | 1621 // Interleave bits from both instance types and compare them in one check. |
| 1612 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); | 1622 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); |
| 1613 and_(scratch1, kFlatAsciiStringMask); | 1623 and_(scratch1, kFlatAsciiStringMask); |
| 1614 and_(scratch2, kFlatAsciiStringMask); | 1624 and_(scratch2, kFlatAsciiStringMask); |
| 1615 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); | 1625 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); |
| 1616 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3)); | 1626 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3)); |
| 1617 j(not_equal, failure); | 1627 j(not_equal, failure); |
| 1618 } | 1628 } |
| 1619 | 1629 |
| 1620 | 1630 |
| 1631 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) { |
| 1632 int frameAlignment = OS::ActivationFrameAlignment(); |
| 1633 if (frameAlignment != 0) { |
| 1634 // Make stack end at alignment and make room for num_arguments words |
| 1635 // and the original value of esp. |
| 1636 mov(scratch, esp); |
| 1637 sub(Operand(esp), Immediate((num_arguments + 1) * kPointerSize)); |
| 1638 ASSERT(IsPowerOf2(frameAlignment)); |
| 1639 and_(esp, -frameAlignment); |
| 1640 mov(Operand(esp, num_arguments * kPointerSize), scratch); |
| 1641 } else { |
| 1642 sub(Operand(esp), Immediate(num_arguments * kPointerSize)); |
| 1643 } |
| 1644 } |
| 1645 |
| 1646 |
| 1647 void MacroAssembler::CallCFunction(ExternalReference function, |
| 1648 int num_arguments) { |
| 1649 // Trashing eax is ok as it will be the return value. |
| 1650 mov(Operand(eax), Immediate(function)); |
| 1651 CallCFunction(eax, num_arguments); |
| 1652 } |
| 1653 |
| 1654 |
| 1655 void MacroAssembler::CallCFunction(Register function, |
| 1656 int num_arguments) { |
| 1657 call(Operand(function)); |
| 1658 if (OS::ActivationFrameAlignment() != 0) { |
| 1659 mov(esp, Operand(esp, num_arguments * kPointerSize)); |
| 1660 } else { |
| 1661 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t))); |
| 1662 } |
| 1663 } |
| 1664 |
| 1665 |
| 1621 CodePatcher::CodePatcher(byte* address, int size) | 1666 CodePatcher::CodePatcher(byte* address, int size) |
| 1622 : address_(address), size_(size), masm_(address, size + Assembler::kGap) { | 1667 : address_(address), size_(size), masm_(address, size + Assembler::kGap) { |
| 1623 // Create a new macro assembler pointing to the address of the code to patch. | 1668 // Create a new macro assembler pointing to the address of the code to patch. |
| 1624 // The size is adjusted with kGap on order for the assembler to generate size | 1669 // The size is adjusted with kGap on order for the assembler to generate size |
| 1625 // bytes of instructions without failing with buffer size constraints. | 1670 // bytes of instructions without failing with buffer size constraints. |
| 1626 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 1671 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 1627 } | 1672 } |
| 1628 | 1673 |
| 1629 | 1674 |
| 1630 CodePatcher::~CodePatcher() { | 1675 CodePatcher::~CodePatcher() { |
| 1631 // Indicate that code has changed. | 1676 // Indicate that code has changed. |
| 1632 CPU::FlushICache(address_, size_); | 1677 CPU::FlushICache(address_, size_); |
| 1633 | 1678 |
| 1634 // Check that the code was patched as expected. | 1679 // Check that the code was patched as expected. |
| 1635 ASSERT(masm_.pc_ == address_ + size_); | 1680 ASSERT(masm_.pc_ == address_ + size_); |
| 1636 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 1681 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 1637 } | 1682 } |
| 1638 | 1683 |
| 1639 | 1684 |
| 1640 } } // namespace v8::internal | 1685 } } // namespace v8::internal |
| OLD | NEW |