OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 26 matching lines...) Expand all Loading... |
37 | 37 |
38 namespace v8 { | 38 namespace v8 { |
39 namespace internal { | 39 namespace internal { |
40 | 40 |
41 // ------------------------------------------------------------------------- | 41 // ------------------------------------------------------------------------- |
42 // MacroAssembler implementation. | 42 // MacroAssembler implementation. |
43 | 43 |
44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) | 44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) |
45 : Assembler(arg_isolate, buffer, size), | 45 : Assembler(arg_isolate, buffer, size), |
46 generating_stub_(false), | 46 generating_stub_(false), |
47 allow_stub_calls_(true) { | 47 allow_stub_calls_(true), |
| 48 has_frame_(false) { |
48 if (isolate() != NULL) { | 49 if (isolate() != NULL) { |
49 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), | 50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), |
50 isolate()); | 51 isolate()); |
51 } | 52 } |
52 } | 53 } |
53 | 54 |
54 | 55 |
55 void MacroAssembler::RecordWriteHelper(Register object, | 56 void MacroAssembler::RecordWriteHelper(Register object, |
56 Register addr, | 57 Register addr, |
57 Register scratch) { | 58 Register scratch) { |
(...skipping 1143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1201 // in initial map. | 1202 // in initial map. |
1202 bind(&non_instance); | 1203 bind(&non_instance); |
1203 mov(result, FieldOperand(result, Map::kConstructorOffset)); | 1204 mov(result, FieldOperand(result, Map::kConstructorOffset)); |
1204 | 1205 |
1205 // All done. | 1206 // All done. |
1206 bind(&done); | 1207 bind(&done); |
1207 } | 1208 } |
1208 | 1209 |
1209 | 1210 |
1210 void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) { | 1211 void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) { |
1211 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. | 1212 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs. |
1212 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); | 1213 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); |
1213 } | 1214 } |
1214 | 1215 |
1215 | 1216 |
1216 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) { | 1217 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) { |
1217 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. | 1218 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs. |
1218 Object* result; | 1219 Object* result; |
1219 { MaybeObject* maybe_result = stub->TryGetCode(); | 1220 { MaybeObject* maybe_result = stub->TryGetCode(); |
1220 if (!maybe_result->ToObject(&result)) return maybe_result; | 1221 if (!maybe_result->ToObject(&result)) return maybe_result; |
1221 } | 1222 } |
1222 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); | 1223 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); |
1223 return result; | 1224 return result; |
1224 } | 1225 } |
1225 | 1226 |
1226 | 1227 |
1227 void MacroAssembler::TailCallStub(CodeStub* stub) { | 1228 void MacroAssembler::TailCallStub(CodeStub* stub) { |
1228 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. | 1229 ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_); |
1229 jmp(stub->GetCode(), RelocInfo::CODE_TARGET); | 1230 jmp(stub->GetCode(), RelocInfo::CODE_TARGET); |
1230 } | 1231 } |
1231 | 1232 |
1232 | 1233 |
1233 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) { | 1234 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) { |
1234 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. | |
1235 Object* result; | 1235 Object* result; |
1236 { MaybeObject* maybe_result = stub->TryGetCode(); | 1236 { MaybeObject* maybe_result = stub->TryGetCode(); |
1237 if (!maybe_result->ToObject(&result)) return maybe_result; | 1237 if (!maybe_result->ToObject(&result)) return maybe_result; |
1238 } | 1238 } |
1239 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); | 1239 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); |
1240 return result; | 1240 return result; |
1241 } | 1241 } |
1242 | 1242 |
1243 | 1243 |
1244 void MacroAssembler::StubReturn(int argc) { | 1244 void MacroAssembler::StubReturn(int argc) { |
1245 ASSERT(argc >= 1 && generating_stub()); | 1245 ASSERT(argc >= 1 && generating_stub()); |
1246 ret((argc - 1) * kPointerSize); | 1246 ret((argc - 1) * kPointerSize); |
1247 } | 1247 } |
1248 | 1248 |
1249 | 1249 |
| 1250 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { |
| 1251 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false; |
| 1252 return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_; |
| 1253 } |
| 1254 |
| 1255 |
1250 void MacroAssembler::IllegalOperation(int num_arguments) { | 1256 void MacroAssembler::IllegalOperation(int num_arguments) { |
1251 if (num_arguments > 0) { | 1257 if (num_arguments > 0) { |
1252 add(Operand(esp), Immediate(num_arguments * kPointerSize)); | 1258 add(Operand(esp), Immediate(num_arguments * kPointerSize)); |
1253 } | 1259 } |
1254 mov(eax, Immediate(isolate()->factory()->undefined_value())); | 1260 mov(eax, Immediate(isolate()->factory()->undefined_value())); |
1255 } | 1261 } |
1256 | 1262 |
1257 | 1263 |
1258 void MacroAssembler::IndexFromHash(Register hash, Register index) { | 1264 void MacroAssembler::IndexFromHash(Register hash, Register index) { |
1259 // The assert checks that the constants for the maximum number of digits | 1265 // The assert checks that the constants for the maximum number of digits |
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1620 } | 1626 } |
1621 } | 1627 } |
1622 | 1628 |
1623 | 1629 |
1624 void MacroAssembler::InvokeCode(const Operand& code, | 1630 void MacroAssembler::InvokeCode(const Operand& code, |
1625 const ParameterCount& expected, | 1631 const ParameterCount& expected, |
1626 const ParameterCount& actual, | 1632 const ParameterCount& actual, |
1627 InvokeFlag flag, | 1633 InvokeFlag flag, |
1628 const CallWrapper& call_wrapper, | 1634 const CallWrapper& call_wrapper, |
1629 CallKind call_kind) { | 1635 CallKind call_kind) { |
| 1636 // You can't call a function without a valid frame. |
| 1637 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 1638 |
1630 Label done; | 1639 Label done; |
1631 InvokePrologue(expected, actual, Handle<Code>::null(), code, | 1640 InvokePrologue(expected, actual, Handle<Code>::null(), code, |
1632 &done, flag, Label::kNear, call_wrapper, | 1641 &done, flag, Label::kNear, call_wrapper, |
1633 call_kind); | 1642 call_kind); |
1634 if (flag == CALL_FUNCTION) { | 1643 if (flag == CALL_FUNCTION) { |
1635 call_wrapper.BeforeCall(CallSize(code)); | 1644 call_wrapper.BeforeCall(CallSize(code)); |
1636 SetCallKind(ecx, call_kind); | 1645 SetCallKind(ecx, call_kind); |
1637 call(code); | 1646 call(code); |
1638 call_wrapper.AfterCall(); | 1647 call_wrapper.AfterCall(); |
1639 } else { | 1648 } else { |
1640 ASSERT(flag == JUMP_FUNCTION); | 1649 ASSERT(flag == JUMP_FUNCTION); |
1641 SetCallKind(ecx, call_kind); | 1650 SetCallKind(ecx, call_kind); |
1642 jmp(code); | 1651 jmp(code); |
1643 } | 1652 } |
1644 bind(&done); | 1653 bind(&done); |
1645 } | 1654 } |
1646 | 1655 |
1647 | 1656 |
1648 void MacroAssembler::InvokeCode(Handle<Code> code, | 1657 void MacroAssembler::InvokeCode(Handle<Code> code, |
1649 const ParameterCount& expected, | 1658 const ParameterCount& expected, |
1650 const ParameterCount& actual, | 1659 const ParameterCount& actual, |
1651 RelocInfo::Mode rmode, | 1660 RelocInfo::Mode rmode, |
1652 InvokeFlag flag, | 1661 InvokeFlag flag, |
1653 const CallWrapper& call_wrapper, | 1662 const CallWrapper& call_wrapper, |
1654 CallKind call_kind) { | 1663 CallKind call_kind) { |
| 1664 // You can't call a function without a valid frame. |
| 1665 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 1666 |
1655 Label done; | 1667 Label done; |
1656 Operand dummy(eax); | 1668 Operand dummy(eax); |
1657 InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear, | 1669 InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear, |
1658 call_wrapper, call_kind); | 1670 call_wrapper, call_kind); |
1659 if (flag == CALL_FUNCTION) { | 1671 if (flag == CALL_FUNCTION) { |
1660 call_wrapper.BeforeCall(CallSize(code, rmode)); | 1672 call_wrapper.BeforeCall(CallSize(code, rmode)); |
1661 SetCallKind(ecx, call_kind); | 1673 SetCallKind(ecx, call_kind); |
1662 call(code, rmode); | 1674 call(code, rmode); |
1663 call_wrapper.AfterCall(); | 1675 call_wrapper.AfterCall(); |
1664 } else { | 1676 } else { |
1665 ASSERT(flag == JUMP_FUNCTION); | 1677 ASSERT(flag == JUMP_FUNCTION); |
1666 SetCallKind(ecx, call_kind); | 1678 SetCallKind(ecx, call_kind); |
1667 jmp(code, rmode); | 1679 jmp(code, rmode); |
1668 } | 1680 } |
1669 bind(&done); | 1681 bind(&done); |
1670 } | 1682 } |
1671 | 1683 |
1672 | 1684 |
1673 void MacroAssembler::InvokeFunction(Register fun, | 1685 void MacroAssembler::InvokeFunction(Register fun, |
1674 const ParameterCount& actual, | 1686 const ParameterCount& actual, |
1675 InvokeFlag flag, | 1687 InvokeFlag flag, |
1676 const CallWrapper& call_wrapper, | 1688 const CallWrapper& call_wrapper, |
1677 CallKind call_kind) { | 1689 CallKind call_kind) { |
| 1690 // You can't call a function without a valid frame. |
| 1691 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 1692 |
1678 ASSERT(fun.is(edi)); | 1693 ASSERT(fun.is(edi)); |
1679 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | 1694 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); |
1680 mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); | 1695 mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); |
1681 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); | 1696 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); |
1682 SmiUntag(ebx); | 1697 SmiUntag(ebx); |
1683 | 1698 |
1684 ParameterCount expected(ebx); | 1699 ParameterCount expected(ebx); |
1685 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), | 1700 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), |
1686 expected, actual, flag, call_wrapper, call_kind); | 1701 expected, actual, flag, call_wrapper, call_kind); |
1687 } | 1702 } |
1688 | 1703 |
1689 | 1704 |
1690 void MacroAssembler::InvokeFunction(JSFunction* function, | 1705 void MacroAssembler::InvokeFunction(JSFunction* function, |
1691 const ParameterCount& actual, | 1706 const ParameterCount& actual, |
1692 InvokeFlag flag, | 1707 InvokeFlag flag, |
1693 const CallWrapper& call_wrapper, | 1708 const CallWrapper& call_wrapper, |
1694 CallKind call_kind) { | 1709 CallKind call_kind) { |
| 1710 // You can't call a function without a valid frame. |
| 1711 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 1712 |
1695 ASSERT(function->is_compiled()); | 1713 ASSERT(function->is_compiled()); |
1696 // Get the function and setup the context. | 1714 // Get the function and setup the context. |
1697 mov(edi, Immediate(Handle<JSFunction>(function))); | 1715 mov(edi, Immediate(Handle<JSFunction>(function))); |
1698 mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); | 1716 mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); |
1699 | 1717 |
1700 ParameterCount expected(function->shared()->formal_parameter_count()); | 1718 ParameterCount expected(function->shared()->formal_parameter_count()); |
1701 if (V8::UseCrankshaft()) { | 1719 if (V8::UseCrankshaft()) { |
1702 // TODO(kasperl): For now, we always call indirectly through the | 1720 // TODO(kasperl): For now, we always call indirectly through the |
1703 // code field in the function to allow recompilation to take effect | 1721 // code field in the function to allow recompilation to take effect |
1704 // without changing any of the call sites. | 1722 // without changing any of the call sites. |
1705 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), | 1723 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), |
1706 expected, actual, flag, call_wrapper, call_kind); | 1724 expected, actual, flag, call_wrapper, call_kind); |
1707 } else { | 1725 } else { |
1708 Handle<Code> code(function->code()); | 1726 Handle<Code> code(function->code()); |
1709 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, | 1727 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, |
1710 flag, call_wrapper, call_kind); | 1728 flag, call_wrapper, call_kind); |
1711 } | 1729 } |
1712 } | 1730 } |
1713 | 1731 |
1714 | 1732 |
1715 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, | 1733 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, |
1716 InvokeFlag flag, | 1734 InvokeFlag flag, |
1717 const CallWrapper& call_wrapper) { | 1735 const CallWrapper& call_wrapper) { |
1718 // Calls are not allowed in some stubs. | 1736 // You can't call a builtin without a valid frame. |
1719 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls()); | 1737 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
1720 | 1738 |
1721 // Rely on the assertion to check that the number of provided | 1739 // Rely on the assertion to check that the number of provided |
1722 // arguments match the expected number of arguments. Fake a | 1740 // arguments match the expected number of arguments. Fake a |
1723 // parameter count to avoid emitting code to do the check. | 1741 // parameter count to avoid emitting code to do the check. |
1724 ParameterCount expected(0); | 1742 ParameterCount expected(0); |
1725 GetBuiltinFunction(edi, id); | 1743 GetBuiltinFunction(edi, id); |
1726 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), | 1744 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), |
1727 expected, expected, flag, call_wrapper, CALL_AS_METHOD); | 1745 expected, expected, flag, call_wrapper, CALL_AS_METHOD); |
1728 } | 1746 } |
1729 | 1747 |
| 1748 |
1730 void MacroAssembler::GetBuiltinFunction(Register target, | 1749 void MacroAssembler::GetBuiltinFunction(Register target, |
1731 Builtins::JavaScript id) { | 1750 Builtins::JavaScript id) { |
1732 // Load the JavaScript builtin function from the builtins object. | 1751 // Load the JavaScript builtin function from the builtins object. |
1733 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 1752 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
1734 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset)); | 1753 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset)); |
1735 mov(target, FieldOperand(target, | 1754 mov(target, FieldOperand(target, |
1736 JSBuiltinsObject::OffsetOfFunctionWithId(id))); | 1755 JSBuiltinsObject::OffsetOfFunctionWithId(id))); |
1737 } | 1756 } |
1738 | 1757 |
| 1758 |
1739 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { | 1759 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { |
1740 ASSERT(!target.is(edi)); | 1760 ASSERT(!target.is(edi)); |
1741 // Load the JavaScript builtin function from the builtins object. | 1761 // Load the JavaScript builtin function from the builtins object. |
1742 GetBuiltinFunction(edi, id); | 1762 GetBuiltinFunction(edi, id); |
1743 // Load the code entry point from the function into the target register. | 1763 // Load the code entry point from the function into the target register. |
1744 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 1764 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset)); |
1745 } | 1765 } |
1746 | 1766 |
1747 | 1767 |
1748 void MacroAssembler::LoadContext(Register dst, int context_chain_length) { | 1768 void MacroAssembler::LoadContext(Register dst, int context_chain_length) { |
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1984 // from the real pointer as a smi. | 2004 // from the real pointer as a smi. |
1985 intptr_t p1 = reinterpret_cast<intptr_t>(msg); | 2005 intptr_t p1 = reinterpret_cast<intptr_t>(msg); |
1986 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; | 2006 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; |
1987 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); | 2007 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); |
1988 #ifdef DEBUG | 2008 #ifdef DEBUG |
1989 if (msg != NULL) { | 2009 if (msg != NULL) { |
1990 RecordComment("Abort message: "); | 2010 RecordComment("Abort message: "); |
1991 RecordComment(msg); | 2011 RecordComment(msg); |
1992 } | 2012 } |
1993 #endif | 2013 #endif |
1994 // Disable stub call restrictions to always allow calls to abort. | |
1995 AllowStubCallsScope allow_scope(this, true); | |
1996 | 2014 |
1997 push(eax); | 2015 push(eax); |
1998 push(Immediate(p0)); | 2016 push(Immediate(p0)); |
1999 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); | 2017 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); |
2000 CallRuntime(Runtime::kAbort, 2); | 2018 // Disable stub call restrictions to always allow calls to abort. |
| 2019 if (!has_frame_) { |
| 2020 // We don't actually want to generate a pile of code for this, so just |
| 2021 // claim there is a stack frame, without generating one. |
| 2022 FrameScope scope(this, StackFrame::NONE); |
| 2023 CallRuntime(Runtime::kAbort, 2); |
| 2024 } else { |
| 2025 CallRuntime(Runtime::kAbort, 2); |
| 2026 } |
2001 // will not return here | 2027 // will not return here |
2002 int3(); | 2028 int3(); |
2003 } | 2029 } |
2004 | 2030 |
2005 | 2031 |
2006 void MacroAssembler::LoadInstanceDescriptors(Register map, | 2032 void MacroAssembler::LoadInstanceDescriptors(Register map, |
2007 Register descriptors) { | 2033 Register descriptors) { |
2008 mov(descriptors, | 2034 mov(descriptors, |
2009 FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); | 2035 FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); |
2010 Label not_smi; | 2036 Label not_smi; |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2090 void MacroAssembler::CallCFunction(ExternalReference function, | 2116 void MacroAssembler::CallCFunction(ExternalReference function, |
2091 int num_arguments) { | 2117 int num_arguments) { |
2092 // Trashing eax is ok as it will be the return value. | 2118 // Trashing eax is ok as it will be the return value. |
2093 mov(Operand(eax), Immediate(function)); | 2119 mov(Operand(eax), Immediate(function)); |
2094 CallCFunction(eax, num_arguments); | 2120 CallCFunction(eax, num_arguments); |
2095 } | 2121 } |
2096 | 2122 |
2097 | 2123 |
2098 void MacroAssembler::CallCFunction(Register function, | 2124 void MacroAssembler::CallCFunction(Register function, |
2099 int num_arguments) { | 2125 int num_arguments) { |
| 2126 ASSERT(has_frame()); |
2100 // Check stack alignment. | 2127 // Check stack alignment. |
2101 if (emit_debug_code()) { | 2128 if (emit_debug_code()) { |
2102 CheckStackAlignment(); | 2129 CheckStackAlignment(); |
2103 } | 2130 } |
2104 | 2131 |
2105 call(Operand(function)); | 2132 call(Operand(function)); |
2106 if (OS::ActivationFrameAlignment() != 0) { | 2133 if (OS::ActivationFrameAlignment() != 0) { |
2107 mov(esp, Operand(esp, num_arguments * kPointerSize)); | 2134 mov(esp, Operand(esp, num_arguments * kPointerSize)); |
2108 } else { | 2135 } else { |
2109 add(Operand(esp), Immediate(num_arguments * kPointerSize)); | 2136 add(Operand(esp), Immediate(num_arguments * kPointerSize)); |
(...skipping 18 matching lines...) Expand all Loading... |
2128 | 2155 |
2129 // Check that the code was patched as expected. | 2156 // Check that the code was patched as expected. |
2130 ASSERT(masm_.pc_ == address_ + size_); | 2157 ASSERT(masm_.pc_ == address_ + size_); |
2131 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2158 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2132 } | 2159 } |
2133 | 2160 |
2134 | 2161 |
2135 } } // namespace v8::internal | 2162 } } // namespace v8::internal |
2136 | 2163 |
2137 #endif // V8_TARGET_ARCH_IA32 | 2164 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |