| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 26 matching lines...) Expand all Loading... |
| 37 | 37 |
| 38 namespace v8 { | 38 namespace v8 { |
| 39 namespace internal { | 39 namespace internal { |
| 40 | 40 |
| 41 // ------------------------------------------------------------------------- | 41 // ------------------------------------------------------------------------- |
| 42 // MacroAssembler implementation. | 42 // MacroAssembler implementation. |
| 43 | 43 |
| 44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) | 44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) |
| 45 : Assembler(arg_isolate, buffer, size), | 45 : Assembler(arg_isolate, buffer, size), |
| 46 generating_stub_(false), | 46 generating_stub_(false), |
| 47 allow_stub_calls_(true) { | 47 allow_stub_calls_(true), |
| 48 has_frame_(false) { |
| 48 if (isolate() != NULL) { | 49 if (isolate() != NULL) { |
| 49 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), | 50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), |
| 50 isolate()); | 51 isolate()); |
| 51 } | 52 } |
| 52 } | 53 } |
| 53 | 54 |
| 54 | 55 |
| 55 void MacroAssembler::RecordWriteHelper(Register object, | 56 void MacroAssembler::RecordWriteHelper(Register object, |
| 56 Register addr, | 57 Register addr, |
| 57 Register scratch) { | 58 Register scratch) { |
| (...skipping 1302 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1360 // in initial map. | 1361 // in initial map. |
| 1361 bind(&non_instance); | 1362 bind(&non_instance); |
| 1362 mov(result, FieldOperand(result, Map::kConstructorOffset)); | 1363 mov(result, FieldOperand(result, Map::kConstructorOffset)); |
| 1363 | 1364 |
| 1364 // All done. | 1365 // All done. |
| 1365 bind(&done); | 1366 bind(&done); |
| 1366 } | 1367 } |
| 1367 | 1368 |
| 1368 | 1369 |
| 1369 void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) { | 1370 void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) { |
| 1370 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. | 1371 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs. |
| 1371 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); | 1372 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); |
| 1372 } | 1373 } |
| 1373 | 1374 |
| 1374 | 1375 |
| 1375 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) { | 1376 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) { |
| 1376 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. | 1377 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs. |
| 1377 Object* result; | 1378 Object* result; |
| 1378 { MaybeObject* maybe_result = stub->TryGetCode(); | 1379 { MaybeObject* maybe_result = stub->TryGetCode(); |
| 1379 if (!maybe_result->ToObject(&result)) return maybe_result; | 1380 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 1380 } | 1381 } |
| 1381 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); | 1382 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); |
| 1382 return result; | 1383 return result; |
| 1383 } | 1384 } |
| 1384 | 1385 |
| 1385 | 1386 |
| 1386 void MacroAssembler::TailCallStub(CodeStub* stub) { | 1387 void MacroAssembler::TailCallStub(CodeStub* stub) { |
| 1387 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. | 1388 ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_); |
| 1388 jmp(stub->GetCode(), RelocInfo::CODE_TARGET); | 1389 jmp(stub->GetCode(), RelocInfo::CODE_TARGET); |
| 1389 } | 1390 } |
| 1390 | 1391 |
| 1391 | 1392 |
| 1392 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) { | 1393 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) { |
| 1393 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. | |
| 1394 Object* result; | 1394 Object* result; |
| 1395 { MaybeObject* maybe_result = stub->TryGetCode(); | 1395 { MaybeObject* maybe_result = stub->TryGetCode(); |
| 1396 if (!maybe_result->ToObject(&result)) return maybe_result; | 1396 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 1397 } | 1397 } |
| 1398 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); | 1398 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); |
| 1399 return result; | 1399 return result; |
| 1400 } | 1400 } |
| 1401 | 1401 |
| 1402 | 1402 |
| 1403 void MacroAssembler::StubReturn(int argc) { | 1403 void MacroAssembler::StubReturn(int argc) { |
| 1404 ASSERT(argc >= 1 && generating_stub()); | 1404 ASSERT(argc >= 1 && generating_stub()); |
| 1405 ret((argc - 1) * kPointerSize); | 1405 ret((argc - 1) * kPointerSize); |
| 1406 } | 1406 } |
| 1407 | 1407 |
| 1408 | 1408 |
| 1409 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { |
| 1410 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false; |
| 1411 return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_; |
| 1412 } |
| 1413 |
| 1414 |
| 1409 void MacroAssembler::IllegalOperation(int num_arguments) { | 1415 void MacroAssembler::IllegalOperation(int num_arguments) { |
| 1410 if (num_arguments > 0) { | 1416 if (num_arguments > 0) { |
| 1411 add(Operand(esp), Immediate(num_arguments * kPointerSize)); | 1417 add(Operand(esp), Immediate(num_arguments * kPointerSize)); |
| 1412 } | 1418 } |
| 1413 mov(eax, Immediate(isolate()->factory()->undefined_value())); | 1419 mov(eax, Immediate(isolate()->factory()->undefined_value())); |
| 1414 } | 1420 } |
| 1415 | 1421 |
| 1416 | 1422 |
| 1417 void MacroAssembler::IndexFromHash(Register hash, Register index) { | 1423 void MacroAssembler::IndexFromHash(Register hash, Register index) { |
| 1418 // The assert checks that the constants for the maximum number of digits | 1424 // The assert checks that the constants for the maximum number of digits |
| (...skipping 358 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1777 } | 1783 } |
| 1778 } | 1784 } |
| 1779 | 1785 |
| 1780 | 1786 |
| 1781 void MacroAssembler::InvokeCode(const Operand& code, | 1787 void MacroAssembler::InvokeCode(const Operand& code, |
| 1782 const ParameterCount& expected, | 1788 const ParameterCount& expected, |
| 1783 const ParameterCount& actual, | 1789 const ParameterCount& actual, |
| 1784 InvokeFlag flag, | 1790 InvokeFlag flag, |
| 1785 const CallWrapper& call_wrapper, | 1791 const CallWrapper& call_wrapper, |
| 1786 CallKind call_kind) { | 1792 CallKind call_kind) { |
| 1793 // You can't call a function without a valid frame. |
| 1794 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 1795 |
| 1787 Label done; | 1796 Label done; |
| 1788 InvokePrologue(expected, actual, Handle<Code>::null(), code, | 1797 InvokePrologue(expected, actual, Handle<Code>::null(), code, |
| 1789 &done, flag, Label::kNear, call_wrapper, | 1798 &done, flag, Label::kNear, call_wrapper, |
| 1790 call_kind); | 1799 call_kind); |
| 1791 if (flag == CALL_FUNCTION) { | 1800 if (flag == CALL_FUNCTION) { |
| 1792 call_wrapper.BeforeCall(CallSize(code)); | 1801 call_wrapper.BeforeCall(CallSize(code)); |
| 1793 SetCallKind(ecx, call_kind); | 1802 SetCallKind(ecx, call_kind); |
| 1794 call(code); | 1803 call(code); |
| 1795 call_wrapper.AfterCall(); | 1804 call_wrapper.AfterCall(); |
| 1796 } else { | 1805 } else { |
| 1797 ASSERT(flag == JUMP_FUNCTION); | 1806 ASSERT(flag == JUMP_FUNCTION); |
| 1798 SetCallKind(ecx, call_kind); | 1807 SetCallKind(ecx, call_kind); |
| 1799 jmp(code); | 1808 jmp(code); |
| 1800 } | 1809 } |
| 1801 bind(&done); | 1810 bind(&done); |
| 1802 } | 1811 } |
| 1803 | 1812 |
| 1804 | 1813 |
| 1805 void MacroAssembler::InvokeCode(Handle<Code> code, | 1814 void MacroAssembler::InvokeCode(Handle<Code> code, |
| 1806 const ParameterCount& expected, | 1815 const ParameterCount& expected, |
| 1807 const ParameterCount& actual, | 1816 const ParameterCount& actual, |
| 1808 RelocInfo::Mode rmode, | 1817 RelocInfo::Mode rmode, |
| 1809 InvokeFlag flag, | 1818 InvokeFlag flag, |
| 1810 const CallWrapper& call_wrapper, | 1819 const CallWrapper& call_wrapper, |
| 1811 CallKind call_kind) { | 1820 CallKind call_kind) { |
| 1821 // You can't call a function without a valid frame. |
| 1822 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 1823 |
| 1812 Label done; | 1824 Label done; |
| 1813 Operand dummy(eax); | 1825 Operand dummy(eax); |
| 1814 InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear, | 1826 InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear, |
| 1815 call_wrapper, call_kind); | 1827 call_wrapper, call_kind); |
| 1816 if (flag == CALL_FUNCTION) { | 1828 if (flag == CALL_FUNCTION) { |
| 1817 call_wrapper.BeforeCall(CallSize(code, rmode)); | 1829 call_wrapper.BeforeCall(CallSize(code, rmode)); |
| 1818 SetCallKind(ecx, call_kind); | 1830 SetCallKind(ecx, call_kind); |
| 1819 call(code, rmode); | 1831 call(code, rmode); |
| 1820 call_wrapper.AfterCall(); | 1832 call_wrapper.AfterCall(); |
| 1821 } else { | 1833 } else { |
| 1822 ASSERT(flag == JUMP_FUNCTION); | 1834 ASSERT(flag == JUMP_FUNCTION); |
| 1823 SetCallKind(ecx, call_kind); | 1835 SetCallKind(ecx, call_kind); |
| 1824 jmp(code, rmode); | 1836 jmp(code, rmode); |
| 1825 } | 1837 } |
| 1826 bind(&done); | 1838 bind(&done); |
| 1827 } | 1839 } |
| 1828 | 1840 |
| 1829 | 1841 |
| 1830 void MacroAssembler::InvokeFunction(Register fun, | 1842 void MacroAssembler::InvokeFunction(Register fun, |
| 1831 const ParameterCount& actual, | 1843 const ParameterCount& actual, |
| 1832 InvokeFlag flag, | 1844 InvokeFlag flag, |
| 1833 const CallWrapper& call_wrapper, | 1845 const CallWrapper& call_wrapper, |
| 1834 CallKind call_kind) { | 1846 CallKind call_kind) { |
| 1847 // You can't call a function without a valid frame. |
| 1848 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 1849 |
| 1835 ASSERT(fun.is(edi)); | 1850 ASSERT(fun.is(edi)); |
| 1836 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); | 1851 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); |
| 1837 mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); | 1852 mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); |
| 1838 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); | 1853 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); |
| 1839 SmiUntag(ebx); | 1854 SmiUntag(ebx); |
| 1840 | 1855 |
| 1841 ParameterCount expected(ebx); | 1856 ParameterCount expected(ebx); |
| 1842 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), | 1857 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), |
| 1843 expected, actual, flag, call_wrapper, call_kind); | 1858 expected, actual, flag, call_wrapper, call_kind); |
| 1844 } | 1859 } |
| 1845 | 1860 |
| 1846 | 1861 |
| 1847 void MacroAssembler::InvokeFunction(JSFunction* function, | 1862 void MacroAssembler::InvokeFunction(JSFunction* function, |
| 1848 const ParameterCount& actual, | 1863 const ParameterCount& actual, |
| 1849 InvokeFlag flag, | 1864 InvokeFlag flag, |
| 1850 const CallWrapper& call_wrapper, | 1865 const CallWrapper& call_wrapper, |
| 1851 CallKind call_kind) { | 1866 CallKind call_kind) { |
| 1867 // You can't call a function without a valid frame. |
| 1868 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 1869 |
| 1852 ASSERT(function->is_compiled()); | 1870 ASSERT(function->is_compiled()); |
| 1853 // Get the function and setup the context. | 1871 // Get the function and setup the context. |
| 1854 mov(edi, Immediate(Handle<JSFunction>(function))); | 1872 mov(edi, Immediate(Handle<JSFunction>(function))); |
| 1855 mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); | 1873 mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); |
| 1856 | 1874 |
| 1857 ParameterCount expected(function->shared()->formal_parameter_count()); | 1875 ParameterCount expected(function->shared()->formal_parameter_count()); |
| 1858 if (V8::UseCrankshaft()) { | 1876 if (V8::UseCrankshaft()) { |
| 1859 // TODO(kasperl): For now, we always call indirectly through the | 1877 // TODO(kasperl): For now, we always call indirectly through the |
| 1860 // code field in the function to allow recompilation to take effect | 1878 // code field in the function to allow recompilation to take effect |
| 1861 // without changing any of the call sites. | 1879 // without changing any of the call sites. |
| 1862 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), | 1880 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), |
| 1863 expected, actual, flag, call_wrapper, call_kind); | 1881 expected, actual, flag, call_wrapper, call_kind); |
| 1864 } else { | 1882 } else { |
| 1865 Handle<Code> code(function->code()); | 1883 Handle<Code> code(function->code()); |
| 1866 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, | 1884 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, |
| 1867 flag, call_wrapper, call_kind); | 1885 flag, call_wrapper, call_kind); |
| 1868 } | 1886 } |
| 1869 } | 1887 } |
| 1870 | 1888 |
| 1871 | 1889 |
| 1872 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, | 1890 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, |
| 1873 InvokeFlag flag, | 1891 InvokeFlag flag, |
| 1874 const CallWrapper& call_wrapper) { | 1892 const CallWrapper& call_wrapper) { |
| 1875 // Calls are not allowed in some stubs. | 1893 // You can't call a builtin without a valid frame. |
| 1876 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls()); | 1894 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 1877 | 1895 |
| 1878 // Rely on the assertion to check that the number of provided | 1896 // Rely on the assertion to check that the number of provided |
| 1879 // arguments match the expected number of arguments. Fake a | 1897 // arguments match the expected number of arguments. Fake a |
| 1880 // parameter count to avoid emitting code to do the check. | 1898 // parameter count to avoid emitting code to do the check. |
| 1881 ParameterCount expected(0); | 1899 ParameterCount expected(0); |
| 1882 GetBuiltinFunction(edi, id); | 1900 GetBuiltinFunction(edi, id); |
| 1883 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), | 1901 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), |
| 1884 expected, expected, flag, call_wrapper, CALL_AS_METHOD); | 1902 expected, expected, flag, call_wrapper, CALL_AS_METHOD); |
| 1885 } | 1903 } |
| 1886 | 1904 |
| 1905 |
| 1887 void MacroAssembler::GetBuiltinFunction(Register target, | 1906 void MacroAssembler::GetBuiltinFunction(Register target, |
| 1888 Builtins::JavaScript id) { | 1907 Builtins::JavaScript id) { |
| 1889 // Load the JavaScript builtin function from the builtins object. | 1908 // Load the JavaScript builtin function from the builtins object. |
| 1890 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 1909 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 1891 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset)); | 1910 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset)); |
| 1892 mov(target, FieldOperand(target, | 1911 mov(target, FieldOperand(target, |
| 1893 JSBuiltinsObject::OffsetOfFunctionWithId(id))); | 1912 JSBuiltinsObject::OffsetOfFunctionWithId(id))); |
| 1894 } | 1913 } |
| 1895 | 1914 |
| 1915 |
| 1896 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { | 1916 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { |
| 1897 ASSERT(!target.is(edi)); | 1917 ASSERT(!target.is(edi)); |
| 1898 // Load the JavaScript builtin function from the builtins object. | 1918 // Load the JavaScript builtin function from the builtins object. |
| 1899 GetBuiltinFunction(edi, id); | 1919 GetBuiltinFunction(edi, id); |
| 1900 // Load the code entry point from the function into the target register. | 1920 // Load the code entry point from the function into the target register. |
| 1901 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 1921 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset)); |
| 1902 } | 1922 } |
| 1903 | 1923 |
| 1904 | 1924 |
| 1905 void MacroAssembler::LoadContext(Register dst, int context_chain_length) { | 1925 void MacroAssembler::LoadContext(Register dst, int context_chain_length) { |
| (...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2141 // from the real pointer as a smi. | 2161 // from the real pointer as a smi. |
| 2142 intptr_t p1 = reinterpret_cast<intptr_t>(msg); | 2162 intptr_t p1 = reinterpret_cast<intptr_t>(msg); |
| 2143 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; | 2163 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; |
| 2144 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); | 2164 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); |
| 2145 #ifdef DEBUG | 2165 #ifdef DEBUG |
| 2146 if (msg != NULL) { | 2166 if (msg != NULL) { |
| 2147 RecordComment("Abort message: "); | 2167 RecordComment("Abort message: "); |
| 2148 RecordComment(msg); | 2168 RecordComment(msg); |
| 2149 } | 2169 } |
| 2150 #endif | 2170 #endif |
| 2151 // Disable stub call restrictions to always allow calls to abort. | |
| 2152 AllowStubCallsScope allow_scope(this, true); | |
| 2153 | 2171 |
| 2154 push(eax); | 2172 push(eax); |
| 2155 push(Immediate(p0)); | 2173 push(Immediate(p0)); |
| 2156 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); | 2174 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); |
| 2157 CallRuntime(Runtime::kAbort, 2); | 2175 // Disable stub call restrictions to always allow calls to abort. |
| 2176 if (!has_frame_) { |
| 2177 // We don't actually want to generate a pile of code for this, so just |
| 2178 // claim there is a stack frame, without generating one. |
| 2179 FrameScope scope(this, StackFrame::NONE); |
| 2180 CallRuntime(Runtime::kAbort, 2); |
| 2181 } else { |
| 2182 CallRuntime(Runtime::kAbort, 2); |
| 2183 } |
| 2158 // will not return here | 2184 // will not return here |
| 2159 int3(); | 2185 int3(); |
| 2160 } | 2186 } |
| 2161 | 2187 |
| 2162 | 2188 |
| 2163 void MacroAssembler::LoadInstanceDescriptors(Register map, | 2189 void MacroAssembler::LoadInstanceDescriptors(Register map, |
| 2164 Register descriptors) { | 2190 Register descriptors) { |
| 2165 mov(descriptors, | 2191 mov(descriptors, |
| 2166 FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); | 2192 FieldOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); |
| 2167 Label not_smi; | 2193 Label not_smi; |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2246 void MacroAssembler::CallCFunction(ExternalReference function, | 2272 void MacroAssembler::CallCFunction(ExternalReference function, |
| 2247 int num_arguments) { | 2273 int num_arguments) { |
| 2248 // Trashing eax is ok as it will be the return value. | 2274 // Trashing eax is ok as it will be the return value. |
| 2249 mov(Operand(eax), Immediate(function)); | 2275 mov(Operand(eax), Immediate(function)); |
| 2250 CallCFunction(eax, num_arguments); | 2276 CallCFunction(eax, num_arguments); |
| 2251 } | 2277 } |
| 2252 | 2278 |
| 2253 | 2279 |
| 2254 void MacroAssembler::CallCFunction(Register function, | 2280 void MacroAssembler::CallCFunction(Register function, |
| 2255 int num_arguments) { | 2281 int num_arguments) { |
| 2282 ASSERT(has_frame()); |
| 2256 // Check stack alignment. | 2283 // Check stack alignment. |
| 2257 if (emit_debug_code()) { | 2284 if (emit_debug_code()) { |
| 2258 CheckStackAlignment(); | 2285 CheckStackAlignment(); |
| 2259 } | 2286 } |
| 2260 | 2287 |
| 2261 call(Operand(function)); | 2288 call(Operand(function)); |
| 2262 if (OS::ActivationFrameAlignment() != 0) { | 2289 if (OS::ActivationFrameAlignment() != 0) { |
| 2263 mov(esp, Operand(esp, num_arguments * kPointerSize)); | 2290 mov(esp, Operand(esp, num_arguments * kPointerSize)); |
| 2264 } else { | 2291 } else { |
| 2265 add(Operand(esp), Immediate(num_arguments * kPointerSize)); | 2292 add(Operand(esp), Immediate(num_arguments * kPointerSize)); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 2284 | 2311 |
| 2285 // Check that the code was patched as expected. | 2312 // Check that the code was patched as expected. |
| 2286 ASSERT(masm_.pc_ == address_ + size_); | 2313 ASSERT(masm_.pc_ == address_ + size_); |
| 2287 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2314 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 2288 } | 2315 } |
| 2289 | 2316 |
| 2290 | 2317 |
| 2291 } } // namespace v8::internal | 2318 } } // namespace v8::internal |
| 2292 | 2319 |
| 2293 #endif // V8_TARGET_ARCH_IA32 | 2320 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |