Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/mips/macro-assembler-mips.cc

Issue 7328013: MIPS: Cleaned up calling-related methods in the assembler. (Closed)
Patch Set: Created 9 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/macro-assembler-mips.h ('k') | src/mips/regexp-macro-assembler-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
43 : Assembler(arg_isolate, buffer, size), 43 : Assembler(arg_isolate, buffer, size),
44 generating_stub_(false), 44 generating_stub_(false),
45 allow_stub_calls_(true) { 45 allow_stub_calls_(true) {
46 if (isolate() != NULL) { 46 if (isolate() != NULL) {
47 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), 47 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
48 isolate()); 48 isolate());
49 } 49 }
50 } 50 }
51 51
52 52
53 // Arguments macros.
54 #define COND_TYPED_ARGS Condition cond, Register r1, const Operand& r2
55 #define COND_ARGS cond, r1, r2
56
57 #define REGISTER_TARGET_BODY(Name) \
58 void MacroAssembler::Name(Register target, \
59 BranchDelaySlot bd) { \
60 Name(Operand(target), bd); \
61 } \
62 void MacroAssembler::Name(Register target, COND_TYPED_ARGS, \
63 BranchDelaySlot bd) { \
64 Name(Operand(target), COND_ARGS, bd); \
65 }
66
67
68 #define INT_PTR_TARGET_BODY(Name) \
69 void MacroAssembler::Name(intptr_t target, RelocInfo::Mode rmode, \
70 BranchDelaySlot bd) { \
71 Name(Operand(target, rmode), bd); \
72 } \
73 void MacroAssembler::Name(intptr_t target, \
74 RelocInfo::Mode rmode, \
75 COND_TYPED_ARGS, \
76 BranchDelaySlot bd) { \
77 Name(Operand(target, rmode), COND_ARGS, bd); \
78 }
79
80
81 #define BYTE_PTR_TARGET_BODY(Name) \
82 void MacroAssembler::Name(byte* target, RelocInfo::Mode rmode, \
83 BranchDelaySlot bd) { \
84 Name(reinterpret_cast<intptr_t>(target), rmode, bd); \
85 } \
86 void MacroAssembler::Name(byte* target, \
87 RelocInfo::Mode rmode, \
88 COND_TYPED_ARGS, \
89 BranchDelaySlot bd) { \
90 Name(reinterpret_cast<intptr_t>(target), rmode, COND_ARGS, bd); \
91 }
92
93
94 #define CODE_TARGET_BODY(Name) \
95 void MacroAssembler::Name(Handle<Code> target, RelocInfo::Mode rmode, \
96 BranchDelaySlot bd) { \
97 Name(reinterpret_cast<intptr_t>(target.location()), rmode, bd); \
98 } \
99 void MacroAssembler::Name(Handle<Code> target, \
100 RelocInfo::Mode rmode, \
101 COND_TYPED_ARGS, \
102 BranchDelaySlot bd) { \
103 Name(reinterpret_cast<intptr_t>(target.location()), rmode, COND_ARGS, bd); \
104 }
105
106
107 REGISTER_TARGET_BODY(Jump)
108 REGISTER_TARGET_BODY(Call)
109 INT_PTR_TARGET_BODY(Jump)
110 INT_PTR_TARGET_BODY(Call)
111 BYTE_PTR_TARGET_BODY(Jump)
112 BYTE_PTR_TARGET_BODY(Call)
113 CODE_TARGET_BODY(Jump)
114 CODE_TARGET_BODY(Call)
115
116 #undef COND_TYPED_ARGS
117 #undef COND_ARGS
118 #undef REGISTER_TARGET_BODY
119 #undef BYTE_PTR_TARGET_BODY
120 #undef CODE_TARGET_BODY
121
122
123 void MacroAssembler::Ret(BranchDelaySlot bd) {
124 Jump(Operand(ra), bd);
125 }
126
127
128 void MacroAssembler::Ret(Condition cond, Register r1, const Operand& r2,
129 BranchDelaySlot bd) {
130 Jump(Operand(ra), cond, r1, r2, bd);
131 }
132
133
134 void MacroAssembler::LoadRoot(Register destination, 53 void MacroAssembler::LoadRoot(Register destination,
135 Heap::RootListIndex index) { 54 Heap::RootListIndex index) {
136 lw(destination, MemOperand(s6, index << kPointerSizeLog2)); 55 lw(destination, MemOperand(s6, index << kPointerSizeLog2));
137 } 56 }
138 57
139 58
140 void MacroAssembler::LoadRoot(Register destination, 59 void MacroAssembler::LoadRoot(Register destination,
141 Heap::RootListIndex index, 60 Heap::RootListIndex index,
142 Condition cond, 61 Condition cond,
143 Register src1, const Operand& src2) { 62 Register src1, const Operand& src2) {
(...skipping 1750 matching lines...) Expand 10 before | Expand all | Expand 10 after
1894 1813
1895 // Check that offset could actually hold on an int16_t. 1814 // Check that offset could actually hold on an int16_t.
1896 ASSERT(is_int16(offset)); 1815 ASSERT(is_int16(offset));
1897 1816
1898 // Emit a nop in the branch delay slot if required. 1817 // Emit a nop in the branch delay slot if required.
1899 if (bdslot == PROTECT) 1818 if (bdslot == PROTECT)
1900 nop(); 1819 nop();
1901 } 1820 }
1902 1821
1903 1822
1823 void MacroAssembler::Jump(Register target,
1824 Condition cond,
1825 Register rs,
1826 const Operand& rt,
1827 BranchDelaySlot bd) {
1828 BlockTrampolinePoolScope block_trampoline_pool(this);
1829 if (cond == cc_always) {
1830 jr(target);
1831 } else {
1832 BRANCH_ARGS_CHECK(cond, rs, rt);
1833 Branch(2, NegateCondition(cond), rs, rt);
1834 jr(target);
1835 }
1836 // Emit a nop in the branch delay slot if required.
1837 if (bd == PROTECT)
1838 nop();
1839 }
1840
1841
1842 void MacroAssembler::Jump(intptr_t target,
1843 RelocInfo::Mode rmode,
1844 Condition cond,
1845 Register rs,
1846 const Operand& rt,
1847 BranchDelaySlot bd) {
1848 li(t9, Operand(target, rmode));
1849 Jump(t9, cond, rs, rt, bd);
1850 }
1851
1852
1853 void MacroAssembler::Jump(Address target,
1854 RelocInfo::Mode rmode,
1855 Condition cond,
1856 Register rs,
1857 const Operand& rt,
1858 BranchDelaySlot bd) {
1859 ASSERT(!RelocInfo::IsCodeTarget(rmode));
1860 Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd);
1861 }
1862
1863
1864 void MacroAssembler::Jump(Handle<Code> code,
1865 RelocInfo::Mode rmode,
1866 Condition cond,
1867 Register rs,
1868 const Operand& rt,
1869 BranchDelaySlot bd) {
1870 ASSERT(RelocInfo::IsCodeTarget(rmode));
1871 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd);
1872 }
1873
1874
1875 int MacroAssembler::CallSize(Register target,
1876 Condition cond,
1877 Register rs,
1878 const Operand& rt,
1879 BranchDelaySlot bd) {
1880 int size = 0;
1881
1882 if (cond == cc_always) {
1883 size += 1;
1884 } else {
1885 size += 3;
1886 }
1887
1888 if (bd == PROTECT)
1889 size += 1;
1890
1891 return size * kInstrSize;
1892 }
1893
1894
1895 // Note: To call gcc-compiled C code on mips, you must call thru t9.
1896 void MacroAssembler::Call(Register target,
1897 Condition cond,
1898 Register rs,
1899 const Operand& rt,
1900 BranchDelaySlot bd) {
1901 BlockTrampolinePoolScope block_trampoline_pool(this);
1902 Label start;
1903 bind(&start);
1904 if (cond == cc_always) {
1905 jalr(target);
1906 } else {
1907 BRANCH_ARGS_CHECK(cond, rs, rt);
1908 Branch(2, NegateCondition(cond), rs, rt);
1909 jalr(target);
1910 }
1911 // Emit a nop in the branch delay slot if required.
1912 if (bd == PROTECT)
1913 nop();
1914
1915 ASSERT_EQ(CallSize(target, cond, rs, rt, bd),
1916 SizeOfCodeGeneratedSince(&start));
1917 }
1918
1919
1920 int MacroAssembler::CallSize(Address target,
1921 RelocInfo::Mode rmode,
1922 Condition cond,
1923 Register rs,
1924 const Operand& rt,
1925 BranchDelaySlot bd) {
1926 int size = CallSize(t9, cond, rs, rt, bd);
1927 return size + 2 * kInstrSize;
1928 }
1929
1930
1931 void MacroAssembler::Call(Address target,
1932 RelocInfo::Mode rmode,
1933 Condition cond,
1934 Register rs,
1935 const Operand& rt,
1936 BranchDelaySlot bd) {
1937 BlockTrampolinePoolScope block_trampoline_pool(this);
1938 Label start;
1939 bind(&start);
1940 int32_t target_int = reinterpret_cast<int32_t>(target);
1941 // Must record previous source positions before the
1942 // li() generates a new code target.
1943 positions_recorder()->WriteRecordedPositions();
1944 li(t9, Operand(target_int, rmode), true);
1945 Call(t9, cond, rs, rt, bd);
1946 ASSERT_EQ(CallSize(target, rmode, cond, rs, rt, bd),
1947 SizeOfCodeGeneratedSince(&start));
1948 }
1949
1950
1951 int MacroAssembler::CallSize(Handle<Code> code,
1952 RelocInfo::Mode rmode,
1953 unsigned ast_id,
1954 Condition cond,
1955 Register rs,
1956 const Operand& rt,
1957 BranchDelaySlot bd) {
1958 return CallSize(reinterpret_cast<Address>(code.location()),
1959 rmode, cond, rs, rt, bd);
1960 }
1961
1962
1963 void MacroAssembler::Call(Handle<Code> code,
1964 RelocInfo::Mode rmode,
1965 unsigned ast_id,
1966 Condition cond,
1967 Register rs,
1968 const Operand& rt,
1969 BranchDelaySlot bd) {
1970 BlockTrampolinePoolScope block_trampoline_pool(this);
1971 Label start;
1972 bind(&start);
1973 ASSERT(RelocInfo::IsCodeTarget(rmode));
1974 if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) {
1975 ASSERT(ast_id_for_reloc_info_ == kNoASTId);
1976 ast_id_for_reloc_info_ = ast_id;
1977 rmode = RelocInfo::CODE_TARGET_WITH_ID;
1978 }
1979 Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd);
1980 ASSERT_EQ(CallSize(code, rmode, ast_id, cond, rs, rt),
1981 SizeOfCodeGeneratedSince(&start));
1982 }
1983
1984
1985 void MacroAssembler::Ret(Condition cond,
1986 Register rs,
1987 const Operand& rt,
1988 BranchDelaySlot bd) {
1989 Jump(ra, cond, rs, rt, bd);
1990 }
1991
1992
1904 void MacroAssembler::J(Label* L, BranchDelaySlot bdslot) { 1993 void MacroAssembler::J(Label* L, BranchDelaySlot bdslot) {
1905 BlockTrampolinePoolScope block_trampoline_pool(this); 1994 BlockTrampolinePoolScope block_trampoline_pool(this);
1906 1995
1907 uint32_t imm28; 1996 uint32_t imm28;
1908 imm28 = jump_address(L); 1997 imm28 = jump_address(L);
1909 imm28 &= kImm28Mask; 1998 imm28 &= kImm28Mask;
1910 { BlockGrowBufferScope block_buf_growth(this); 1999 { BlockGrowBufferScope block_buf_growth(this);
1911 // Buffer growth (and relocation) must be blocked for internal references 2000 // Buffer growth (and relocation) must be blocked for internal references
1912 // until associated instructions are emitted and available to be patched. 2001 // until associated instructions are emitted and available to be patched.
1913 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); 2002 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
1952 ori(at, at, (imm32 & kImm16Mask)); 2041 ori(at, at, (imm32 & kImm16Mask));
1953 } 2042 }
1954 jalr(at); 2043 jalr(at);
1955 2044
1956 // Emit a nop in the branch delay slot if required. 2045 // Emit a nop in the branch delay slot if required.
1957 if (bdslot == PROTECT) 2046 if (bdslot == PROTECT)
1958 nop(); 2047 nop();
1959 } 2048 }
1960 2049
1961 2050
1962 void MacroAssembler::Jump(const Operand& target, BranchDelaySlot bdslot) { 2051 void MacroAssembler::DropAndRet(int drop,
1963 BlockTrampolinePoolScope block_trampoline_pool(this); 2052 Condition cond,
1964 if (target.is_reg()) { 2053 Register r1,
1965 jr(target.rm()); 2054 const Operand& r2) {
1966 } else { 2055 // This is a workaround to make sure only one branch instruction is
1967 if (!MustUseReg(target.rmode_)) { 2056 // generated. It relies on Drop and Ret not creating branches if
1968 j(target.imm32_); 2057 // cond == cc_always.
1969 } else { 2058 Label skip;
1970 li(t9, target); 2059 if (cond != cc_always) {
1971 jr(t9); 2060 Branch(&skip, NegateCondition(cond), r1, r2);
1972 }
1973 } 2061 }
1974 // Emit a nop in the branch delay slot if required. 2062
1975 if (bdslot == PROTECT) 2063 Drop(drop);
1976 nop(); 2064 Ret();
2065
2066 if (cond != cc_always) {
2067 bind(&skip);
2068 }
1977 } 2069 }
1978 2070
1979 2071
1980 void MacroAssembler::Jump(const Operand& target,
1981 Condition cond, Register rs, const Operand& rt,
1982 BranchDelaySlot bdslot) {
1983 BlockTrampolinePoolScope block_trampoline_pool(this);
1984 BRANCH_ARGS_CHECK(cond, rs, rt);
1985 if (target.is_reg()) {
1986 if (cond == cc_always) {
1987 jr(target.rm());
1988 } else {
1989 Branch(2, NegateCondition(cond), rs, rt);
1990 jr(target.rm());
1991 }
1992 } else { // Not register target.
1993 if (!MustUseReg(target.rmode_)) {
1994 if (cond == cc_always) {
1995 j(target.imm32_);
1996 } else {
1997 Branch(2, NegateCondition(cond), rs, rt);
1998 j(target.imm32_); // Will generate only one instruction.
1999 }
2000 } else { // MustUseReg(target).
2001 li(t9, target);
2002 if (cond == cc_always) {
2003 jr(t9);
2004 } else {
2005 Branch(2, NegateCondition(cond), rs, rt);
2006 jr(t9); // Will generate only one instruction.
2007 }
2008 }
2009 }
2010 // Emit a nop in the branch delay slot if required.
2011 if (bdslot == PROTECT)
2012 nop();
2013 }
2014
2015
2016 int MacroAssembler::CallSize(Handle<Code> code, RelocInfo::Mode rmode) {
2017 return 4 * kInstrSize;
2018 }
2019
2020
2021 int MacroAssembler::CallSize(Register reg) {
2022 return 2 * kInstrSize;
2023 }
2024
2025
2026 // Note: To call gcc-compiled C code on mips, you must call thru t9.
2027 void MacroAssembler::Call(const Operand& target, BranchDelaySlot bdslot) {
2028 BlockTrampolinePoolScope block_trampoline_pool(this);
2029 if (target.is_reg()) {
2030 jalr(target.rm());
2031 } else { // !target.is_reg().
2032 if (!MustUseReg(target.rmode_)) {
2033 jal(target.imm32_);
2034 } else { // MustUseReg(target).
2035 // Must record previous source positions before the
2036 // li() generates a new code target.
2037 positions_recorder()->WriteRecordedPositions();
2038 li(t9, target);
2039 jalr(t9);
2040 }
2041 }
2042 // Emit a nop in the branch delay slot if required.
2043 if (bdslot == PROTECT)
2044 nop();
2045 }
2046
2047
2048 // Note: To call gcc-compiled C code on mips, you must call thru t9.
2049 void MacroAssembler::Call(const Operand& target,
2050 Condition cond, Register rs, const Operand& rt,
2051 BranchDelaySlot bdslot) {
2052 BlockTrampolinePoolScope block_trampoline_pool(this);
2053 BRANCH_ARGS_CHECK(cond, rs, rt);
2054 if (target.is_reg()) {
2055 if (cond == cc_always) {
2056 jalr(target.rm());
2057 } else {
2058 Branch(2, NegateCondition(cond), rs, rt);
2059 jalr(target.rm());
2060 }
2061 } else { // !target.is_reg().
2062 if (!MustUseReg(target.rmode_)) {
2063 if (cond == cc_always) {
2064 jal(target.imm32_);
2065 } else {
2066 Branch(2, NegateCondition(cond), rs, rt);
2067 jal(target.imm32_); // Will generate only one instruction.
2068 }
2069 } else { // MustUseReg(target)
2070 li(t9, target);
2071 if (cond == cc_always) {
2072 jalr(t9);
2073 } else {
2074 Branch(2, NegateCondition(cond), rs, rt);
2075 jalr(t9); // Will generate only one instruction.
2076 }
2077 }
2078 }
2079 // Emit a nop in the branch delay slot if required.
2080 if (bdslot == PROTECT)
2081 nop();
2082 }
2083
2084
2085 void MacroAssembler::CallWithAstId(Handle<Code> code,
2086 RelocInfo::Mode rmode,
2087 unsigned ast_id,
2088 Condition cond,
2089 Register r1,
2090 const Operand& r2) {
2091 ASSERT(RelocInfo::IsCodeTarget(rmode));
2092 if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) {
2093 ASSERT(ast_id_for_reloc_info_ == kNoASTId);
2094 ast_id_for_reloc_info_ = ast_id;
2095 rmode = RelocInfo::CODE_TARGET_WITH_ID;
2096 }
2097 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond, r1, r2);
2098 }
2099
2100
2101 void MacroAssembler::Drop(int count, 2072 void MacroAssembler::Drop(int count,
2102 Condition cond, 2073 Condition cond,
2103 Register reg, 2074 Register reg,
2104 const Operand& op) { 2075 const Operand& op) {
2105 if (count <= 0) { 2076 if (count <= 0) {
2106 return; 2077 return;
2107 } 2078 }
2108 2079
2109 Label skip; 2080 Label skip;
2110 2081
2111 if (cond != al) { 2082 if (cond != al) {
2112 Branch(&skip, NegateCondition(cond), reg, op); 2083 Branch(&skip, NegateCondition(cond), reg, op);
2113 } 2084 }
2114 2085
2115 if (count > 0) { 2086 addiu(sp, sp, count * kPointerSize);
2116 addiu(sp, sp, count * kPointerSize);
2117 }
2118 2087
2119 if (cond != al) { 2088 if (cond != al) {
2120 bind(&skip); 2089 bind(&skip);
2121 } 2090 }
2122 } 2091 }
2123 2092
2124 2093
2125 void MacroAssembler::DropAndRet(int drop,
2126 Condition cond,
2127 Register r1,
2128 const Operand& r2) {
2129 // This is a workaround to make sure only one branch instruction is
2130 // generated. It relies on Drop and Ret not creating branches if
2131 // cond == cc_always.
2132 Label skip;
2133 if (cond != cc_always) {
2134 Branch(&skip, NegateCondition(cond), r1, r2);
2135 }
2136
2137 Drop(drop);
2138 Ret();
2139
2140 if (cond != cc_always) {
2141 bind(&skip);
2142 }
2143 }
2144
2145 2094
2146 void MacroAssembler::Swap(Register reg1, 2095 void MacroAssembler::Swap(Register reg1,
2147 Register reg2, 2096 Register reg2,
2148 Register scratch) { 2097 Register scratch) {
2149 if (scratch.is(no_reg)) { 2098 if (scratch.is(no_reg)) {
2150 Xor(reg1, reg1, Operand(reg2)); 2099 Xor(reg1, reg1, Operand(reg2));
2151 Xor(reg2, reg2, Operand(reg1)); 2100 Xor(reg2, reg2, Operand(reg1));
2152 Xor(reg1, reg1, Operand(reg2)); 2101 Xor(reg1, reg1, Operand(reg2));
2153 } else { 2102 } else {
2154 mov(scratch, reg1); 2103 mov(scratch, reg1);
(...skipping 817 matching lines...) Expand 10 before | Expand all | Expand 10 after
2972 2921
2973 if (!definitely_matches) { 2922 if (!definitely_matches) {
2974 if (!code_constant.is_null()) { 2923 if (!code_constant.is_null()) {
2975 li(a3, Operand(code_constant)); 2924 li(a3, Operand(code_constant));
2976 addiu(a3, a3, Code::kHeaderSize - kHeapObjectTag); 2925 addiu(a3, a3, Code::kHeaderSize - kHeapObjectTag);
2977 } 2926 }
2978 2927
2979 Handle<Code> adaptor = 2928 Handle<Code> adaptor =
2980 isolate()->builtins()->ArgumentsAdaptorTrampoline(); 2929 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2981 if (flag == CALL_FUNCTION) { 2930 if (flag == CALL_FUNCTION) {
2982 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET)); 2931 call_wrapper.BeforeCall(CallSize(adaptor));
2983 SetCallKind(t1, call_kind); 2932 SetCallKind(t1, call_kind);
2984 Call(adaptor, RelocInfo::CODE_TARGET); 2933 Call(adaptor);
2985 call_wrapper.AfterCall(); 2934 call_wrapper.AfterCall();
2986 jmp(done); 2935 jmp(done);
2987 } else { 2936 } else {
2988 SetCallKind(t1, call_kind); 2937 SetCallKind(t1, call_kind);
2989 Jump(adaptor, RelocInfo::CODE_TARGET); 2938 Jump(adaptor, RelocInfo::CODE_TARGET);
2990 } 2939 }
2991 bind(&regular_invoke); 2940 bind(&regular_invoke);
2992 } 2941 }
2993 } 2942 }
2994 2943
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
3171 lbu(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset)); 3120 lbu(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3172 } 3121 }
3173 3122
3174 3123
3175 // ----------------------------------------------------------------------------- 3124 // -----------------------------------------------------------------------------
3176 // Runtime calls. 3125 // Runtime calls.
3177 3126
3178 void MacroAssembler::CallStub(CodeStub* stub, Condition cond, 3127 void MacroAssembler::CallStub(CodeStub* stub, Condition cond,
3179 Register r1, const Operand& r2) { 3128 Register r1, const Operand& r2) {
3180 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. 3129 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
3181 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2); 3130 Call(stub->GetCode(), RelocInfo::CODE_TARGET, kNoASTId, cond, r1, r2);
3182 } 3131 }
3183 3132
3184 3133
3185 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub, Condition cond, 3134 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub, Condition cond,
3186 Register r1, const Operand& r2) { 3135 Register r1, const Operand& r2) {
3187 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. 3136 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
3188 Object* result; 3137 Object* result;
3189 { MaybeObject* maybe_result = stub->TryGetCode(); 3138 { MaybeObject* maybe_result = stub->TryGetCode();
3190 if (!maybe_result->ToObject(&result)) return maybe_result; 3139 if (!maybe_result->ToObject(&result)) return maybe_result;
3191 } 3140 }
3192 Call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2); 3141 Call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET,
3142 kNoASTId, cond, r1, r2);
3193 return result; 3143 return result;
3194 } 3144 }
3195 3145
3196 3146
3197 void MacroAssembler::TailCallStub(CodeStub* stub) { 3147 void MacroAssembler::TailCallStub(CodeStub* stub) {
3198 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. 3148 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
3199 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); 3149 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
3200 } 3150 }
3201 3151
3202 3152
(...skipping 1029 matching lines...) Expand 10 before | Expand all | Expand 10 after
4232 opcode == BGTZL); 4182 opcode == BGTZL);
4233 opcode = (cond == eq) ? BEQ : BNE; 4183 opcode = (cond == eq) ? BEQ : BNE;
4234 instr = (instr & ~kOpcodeMask) | opcode; 4184 instr = (instr & ~kOpcodeMask) | opcode;
4235 masm_.emit(instr); 4185 masm_.emit(instr);
4236 } 4186 }
4237 4187
4238 4188
4239 } } // namespace v8::internal 4189 } } // namespace v8::internal
4240 4190
4241 #endif // V8_TARGET_ARCH_MIPS 4191 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/macro-assembler-mips.h ('k') | src/mips/regexp-macro-assembler-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698