OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
7 | 7 |
8 #include "vm/intrinsifier.h" | 8 #include "vm/intrinsifier.h" |
9 | 9 |
10 #include "vm/assembler.h" | 10 #include "vm/assembler.h" |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
172 __ LoadImmediate(T7, reinterpret_cast<int32_t>(Object::null())); | 172 __ LoadImmediate(T7, reinterpret_cast<int32_t>(Object::null())); |
173 __ Ret(); | 173 __ Ret(); |
174 __ delay_slot()->mov(V0, T7); | 174 __ delay_slot()->mov(V0, T7); |
175 __ Bind(&fall_through); | 175 __ Bind(&fall_through); |
176 } | 176 } |
177 | 177 |
178 | 178 |
179 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_shift) \ | 179 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_shift) \ |
180 Label fall_through; \ | 180 Label fall_through; \ |
181 const intptr_t kArrayLengthStackOffset = 0 * kWordSize; \ | 181 const intptr_t kArrayLengthStackOffset = 0 * kWordSize; \ |
182 __ MaybeTraceAllocation(cid, T2, &fall_through); \ | 182 __ MaybeTraceAllocation(cid, T2, &fall_through, \ |
| 183 /* inline_isolate = */ false); \ |
183 __ lw(T2, Address(SP, kArrayLengthStackOffset)); /* Array length. */ \ | 184 __ lw(T2, Address(SP, kArrayLengthStackOffset)); /* Array length. */ \ |
184 /* Check that length is a positive Smi. */ \ | 185 /* Check that length is a positive Smi. */ \ |
185 /* T2: requested array length argument. */ \ | 186 /* T2: requested array length argument. */ \ |
186 __ andi(CMPRES1, T2, Immediate(kSmiTagMask)); \ | 187 __ andi(CMPRES1, T2, Immediate(kSmiTagMask)); \ |
187 __ bne(CMPRES1, ZR, &fall_through); \ | 188 __ bne(CMPRES1, ZR, &fall_through); \ |
188 __ BranchSignedLess(T2, Immediate(0), &fall_through); \ | 189 __ BranchSignedLess(T2, Immediate(0), &fall_through); \ |
189 __ SmiUntag(T2); \ | 190 __ SmiUntag(T2); \ |
190 /* Check for maximum allowed length. */ \ | 191 /* Check for maximum allowed length. */ \ |
191 /* T2: untagged array length. */ \ | 192 /* T2: untagged array length. */ \ |
192 __ BranchSignedGreater(T2, Immediate(max_len), &fall_through); \ | 193 __ BranchSignedGreater(T2, Immediate(max_len), &fall_through); \ |
193 __ sll(T2, T2, scale_shift); \ | 194 __ sll(T2, T2, scale_shift); \ |
194 const intptr_t fixed_size = sizeof(Raw##type_name) + kObjectAlignment - 1; \ | 195 const intptr_t fixed_size = sizeof(Raw##type_name) + kObjectAlignment - 1; \ |
195 __ AddImmediate(T2, fixed_size); \ | 196 __ AddImmediate(T2, fixed_size); \ |
196 __ LoadImmediate(TMP, -kObjectAlignment); \ | 197 __ LoadImmediate(TMP, -kObjectAlignment); \ |
197 __ and_(T2, T2, TMP); \ | 198 __ and_(T2, T2, TMP); \ |
198 Heap* heap = Isolate::Current()->heap(); \ | 199 Heap::Space space = Heap::SpaceForAllocation(cid); \ |
199 Heap::Space space = heap->SpaceForAllocation(cid); \ | 200 __ lw(T3, Address(THR, Thread::heap_offset())); \ |
200 __ LoadImmediate(V0, heap->TopAddress(space)); \ | 201 __ lw(V0, Address(T3, Heap::TopOffset(space))); \ |
201 __ lw(V0, Address(V0, 0)); \ | |
202 \ | 202 \ |
203 /* T2: allocation size. */ \ | 203 /* T2: allocation size. */ \ |
204 __ addu(T1, V0, T2); \ | 204 __ addu(T1, V0, T2); \ |
205 /* Branch on unsigned overflow. */ \ | 205 /* Branch on unsigned overflow. */ \ |
206 __ BranchUnsignedLess(T1, V0, &fall_through); \ | 206 __ BranchUnsignedLess(T1, V0, &fall_through); \ |
207 \ | 207 \ |
208 /* Check if the allocation fits into the remaining space. */ \ | 208 /* Check if the allocation fits into the remaining space. */ \ |
209 /* V0: potential new object start. */ \ | 209 /* V0: potential new object start. */ \ |
210 /* T1: potential next object start. */ \ | 210 /* T1: potential next object start. */ \ |
211 /* T2: allocation size. */ \ | 211 /* T2: allocation size. */ \ |
212 __ LoadImmediate(T3, heap->EndAddress(space)); \ | 212 /* T3: heap. */ \ |
213 __ lw(T3, Address(T3, 0)); \ | 213 __ lw(T4, Address(T3, Heap::EndOffset(space))); \ |
214 __ BranchUnsignedGreaterEqual(T1, T3, &fall_through); \ | 214 __ BranchUnsignedGreaterEqual(T1, T4, &fall_through); \ |
215 \ | 215 \ |
216 /* Successfully allocated the object(s), now update top to point to */ \ | 216 /* Successfully allocated the object(s), now update top to point to */ \ |
217 /* next object start and initialize the object. */ \ | 217 /* next object start and initialize the object. */ \ |
218 __ LoadImmediate(T3, heap->TopAddress(space)); \ | 218 __ sw(T1, Address(T3, Heap::TopOffset(space))); \ |
219 __ sw(T1, Address(T3, 0)); \ | |
220 __ AddImmediate(V0, kHeapObjectTag); \ | 219 __ AddImmediate(V0, kHeapObjectTag); \ |
221 __ UpdateAllocationStatsWithSize(cid, T2, T4, space); \ | 220 __ UpdateAllocationStatsWithSize(cid, T2, T4, space, \ |
| 221 /* inline_isolate = */ false); \ |
222 /* Initialize the tags. */ \ | 222 /* Initialize the tags. */ \ |
223 /* V0: new object start as a tagged pointer. */ \ | 223 /* V0: new object start as a tagged pointer. */ \ |
224 /* T1: new object end address. */ \ | 224 /* T1: new object end address. */ \ |
225 /* T2: allocation size. */ \ | 225 /* T2: allocation size. */ \ |
226 { \ | 226 { \ |
227 Label size_tag_overflow, done; \ | 227 Label size_tag_overflow, done; \ |
228 __ BranchUnsignedGreater(T2, Immediate(RawObject::SizeTag::kMaxSizeTag), \ | 228 __ BranchUnsignedGreater(T2, Immediate(RawObject::SizeTag::kMaxSizeTag), \ |
229 &size_tag_overflow); \ | 229 &size_tag_overflow); \ |
230 __ b(&done); \ | 230 __ b(&done); \ |
231 __ delay_slot()->sll(T2, T2, \ | 231 __ delay_slot()->sll(T2, T2, \ |
(...skipping 1658 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1890 | 1890 |
1891 | 1891 |
1892 // Allocates one-byte string of length 'end - start'. The content is not | 1892 // Allocates one-byte string of length 'end - start'. The content is not |
1893 // initialized. | 1893 // initialized. |
1894 // 'length-reg' (T2) contains tagged length. | 1894 // 'length-reg' (T2) contains tagged length. |
1895 // Returns new string as tagged pointer in V0. | 1895 // Returns new string as tagged pointer in V0. |
1896 static void TryAllocateOnebyteString(Assembler* assembler, | 1896 static void TryAllocateOnebyteString(Assembler* assembler, |
1897 Label* ok, | 1897 Label* ok, |
1898 Label* failure) { | 1898 Label* failure) { |
1899 const Register length_reg = T2; | 1899 const Register length_reg = T2; |
1900 __ MaybeTraceAllocation(kOneByteStringCid, V0, failure); | 1900 __ MaybeTraceAllocation(kOneByteStringCid, V0, failure, |
| 1901 /* inline_isolate = */ false); |
1901 __ mov(T6, length_reg); // Save the length register. | 1902 __ mov(T6, length_reg); // Save the length register. |
1902 // TODO(koda): Protect against negative length and overflow here. | 1903 // TODO(koda): Protect against negative length and overflow here. |
1903 __ SmiUntag(length_reg); | 1904 __ SmiUntag(length_reg); |
1904 const intptr_t fixed_size = sizeof(RawString) + kObjectAlignment - 1; | 1905 const intptr_t fixed_size = sizeof(RawString) + kObjectAlignment - 1; |
1905 __ AddImmediate(length_reg, fixed_size); | 1906 __ AddImmediate(length_reg, fixed_size); |
1906 __ LoadImmediate(TMP, ~(kObjectAlignment - 1)); | 1907 __ LoadImmediate(TMP, ~(kObjectAlignment - 1)); |
1907 __ and_(length_reg, length_reg, TMP); | 1908 __ and_(length_reg, length_reg, TMP); |
1908 | 1909 |
1909 Isolate* isolate = Isolate::Current(); | |
1910 Heap* heap = isolate->heap(); | |
1911 const intptr_t cid = kOneByteStringCid; | 1910 const intptr_t cid = kOneByteStringCid; |
1912 Heap::Space space = heap->SpaceForAllocation(cid); | 1911 Heap::Space space = Heap::SpaceForAllocation(cid); |
1913 __ LoadImmediate(T3, heap->TopAddress(space)); | 1912 __ lw(T3, Address(THR, Thread::heap_offset())); |
1914 __ lw(V0, Address(T3, 0)); | 1913 __ lw(V0, Address(T3, Heap::TopOffset(space))); |
1915 | 1914 |
1916 // length_reg: allocation size. | 1915 // length_reg: allocation size. |
1917 __ addu(T1, V0, length_reg); | 1916 __ addu(T1, V0, length_reg); |
1918 __ BranchUnsignedLess(T1, V0, failure); // Fail on unsigned overflow. | 1917 __ BranchUnsignedLess(T1, V0, failure); // Fail on unsigned overflow. |
1919 | 1918 |
1920 // Check if the allocation fits into the remaining space. | 1919 // Check if the allocation fits into the remaining space. |
1921 // V0: potential new object start. | 1920 // V0: potential new object start. |
1922 // T1: potential next object start. | 1921 // T1: potential next object start. |
1923 // T2: allocation size. | 1922 // T2: allocation size. |
1924 // T3: heap->TopAddress(space). | 1923 // T3: heap. |
1925 __ LoadImmediate(T4, heap->EndAddress(space)); | 1924 __ lw(T4, Address(T3, Heap::EndOffset(space))); |
1926 __ lw(T4, Address(T4, 0)); | |
1927 __ BranchUnsignedGreaterEqual(T1, T4, failure); | 1925 __ BranchUnsignedGreaterEqual(T1, T4, failure); |
1928 | 1926 |
1929 // Successfully allocated the object(s), now update top to point to | 1927 // Successfully allocated the object(s), now update top to point to |
1930 // next object start and initialize the object. | 1928 // next object start and initialize the object. |
1931 __ sw(T1, Address(T3, 0)); | 1929 __ sw(T1, Address(T3, Heap::TopOffset(space))); |
1932 __ AddImmediate(V0, kHeapObjectTag); | 1930 __ AddImmediate(V0, kHeapObjectTag); |
1933 | 1931 |
1934 __ UpdateAllocationStatsWithSize(cid, T2, T3, space); | 1932 __ UpdateAllocationStatsWithSize(cid, T2, T3, space, |
| 1933 /* inline_isolate = */ false); |
1935 | 1934 |
1936 // Initialize the tags. | 1935 // Initialize the tags. |
1937 // V0: new object start as a tagged pointer. | 1936 // V0: new object start as a tagged pointer. |
1938 // T1: new object end address. | 1937 // T1: new object end address. |
1939 // T2: allocation size. | 1938 // T2: allocation size. |
1940 { | 1939 { |
1941 Label overflow, done; | 1940 Label overflow, done; |
1942 const intptr_t shift = RawObject::kSizeTagPos - kObjectAlignmentLog2; | 1941 const intptr_t shift = RawObject::kSizeTagPos - kObjectAlignmentLog2; |
1943 | 1942 |
1944 __ BranchUnsignedGreater( | 1943 __ BranchUnsignedGreater( |
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2176 | 2175 |
2177 void Intrinsifier::Profiler_getCurrentTag(Assembler* assembler) { | 2176 void Intrinsifier::Profiler_getCurrentTag(Assembler* assembler) { |
2178 __ LoadIsolate(V0); | 2177 __ LoadIsolate(V0); |
2179 __ Ret(); | 2178 __ Ret(); |
2180 __ delay_slot()->lw(V0, Address(V0, Isolate::current_tag_offset())); | 2179 __ delay_slot()->lw(V0, Address(V0, Isolate::current_tag_offset())); |
2181 } | 2180 } |
2182 | 2181 |
2183 } // namespace dart | 2182 } // namespace dart |
2184 | 2183 |
2185 #endif // defined TARGET_ARCH_MIPS | 2184 #endif // defined TARGET_ARCH_MIPS |
OLD | NEW |