OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
97 __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); | 97 __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); |
98 __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); | 98 __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); |
99 // r4: source FixedArray | 99 // r4: source FixedArray |
100 // r5: number of elements (smi-tagged) | 100 // r5: number of elements (smi-tagged) |
101 | 101 |
102 // Allocate new FixedDoubleArray. | 102 // Allocate new FixedDoubleArray. |
103 __ mov(lr, Operand(FixedDoubleArray::kHeaderSize)); | 103 __ mov(lr, Operand(FixedDoubleArray::kHeaderSize)); |
104 __ add(lr, lr, Operand(r5, LSL, 2)); | 104 __ add(lr, lr, Operand(r5, LSL, 2)); |
105 __ AllocateInNewSpace(lr, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS); | 105 __ AllocateInNewSpace(lr, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS); |
106 // r6: destination FixedDoubleArray, not tagged as heap object | 106 // r6: destination FixedDoubleArray, not tagged as heap object |
107 // Set destination FixedDoubleArray's length and map. | |
107 __ LoadRoot(r9, Heap::kFixedDoubleArrayMapRootIndex); | 108 __ LoadRoot(r9, Heap::kFixedDoubleArrayMapRootIndex); |
109 __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); | |
108 __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); | 110 __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); |
109 // Set destination FixedDoubleArray's length. | |
110 __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); | |
111 // Update receiver's map. | 111 // Update receiver's map. |
112 | 112 |
113 __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); | 113 __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); |
114 __ RecordWriteField(r2, | 114 __ RecordWriteField(r2, |
115 HeapObject::kMapOffset, | 115 HeapObject::kMapOffset, |
116 r3, | 116 r3, |
117 r9, | 117 r9, |
118 kLRHasBeenSaved, | 118 kLRHasBeenSaved, |
119 kDontSaveFPRegs, | 119 kDontSaveFPRegs, |
120 EMIT_REMEMBERED_SET, | 120 EMIT_REMEMBERED_SET, |
(...skipping 27 matching lines...) Expand all Loading... | |
148 | 148 |
149 // Call into runtime if GC is required. | 149 // Call into runtime if GC is required. |
150 __ bind(&gc_required); | 150 __ bind(&gc_required); |
151 __ pop(lr); | 151 __ pop(lr); |
152 __ b(fail); | 152 __ b(fail); |
153 | 153 |
154 // Convert and copy elements. | 154 // Convert and copy elements. |
155 __ bind(&loop); | 155 __ bind(&loop); |
156 __ ldr(r9, MemOperand(r3, 4, PostIndex)); | 156 __ ldr(r9, MemOperand(r3, 4, PostIndex)); |
157 // r9: current element | 157 // r9: current element |
158 __ JumpIfNotSmi(r9, &convert_hole); | 158 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); |
ulan
2012/01/27 13:19:11
Can we introduce UntagAndJumpIfNotSmi for these tr
| |
159 __ mov(r9, Operand(r9, ASR, kSmiShiftSize), SetCC); | |
160 __ b(cs, &convert_hole); // Non-smi sets shifter carry. | |
159 | 161 |
160 // Normal smi, convert to double and store. | 162 // Normal smi, convert to double and store. |
161 __ SmiUntag(r9); | |
162 if (vfp3_supported) { | 163 if (vfp3_supported) { |
163 CpuFeatures::Scope scope(VFP3); | 164 CpuFeatures::Scope scope(VFP3); |
164 __ vmov(s0, r9); | 165 __ vmov(s0, r9); |
165 __ vcvt_f64_s32(d0, s0); | 166 __ vcvt_f64_s32(d0, s0); |
166 __ vstr(d0, r7, 0); | 167 __ vstr(d0, r7, 0); |
167 __ add(r7, r7, Operand(8)); | 168 __ add(r7, r7, Operand(8)); |
168 } else { | 169 } else { |
169 FloatingPointHelper::ConvertIntToDouble(masm, | 170 FloatingPointHelper::ConvertIntToDouble(masm, |
170 r9, | 171 r9, |
171 FloatingPointHelper::kCoreRegisters, | 172 FloatingPointHelper::kCoreRegisters, |
172 d0, | 173 d0, |
173 r0, | 174 r0, |
174 r1, | 175 r1, |
175 lr, | 176 lr, |
176 s0); | 177 s0); |
177 __ Strd(r0, r1, MemOperand(r7, 8, PostIndex)); | 178 __ Strd(r0, r1, MemOperand(r7, 8, PostIndex)); |
178 } | 179 } |
179 __ b(&entry); | 180 __ b(&entry); |
180 | 181 |
181 // Hole found, store the-hole NaN. | 182 // Hole found, store the-hole NaN. |
182 __ bind(&convert_hole); | 183 __ bind(&convert_hole); |
183 if (FLAG_debug_code) { | 184 if (FLAG_debug_code) { |
185 // Restore a "smi-untagged" heap object. | |
186 __ SmiTag(r9); | |
187 __ orr(r9, r9, Operand(1)); | |
184 __ CompareRoot(r9, Heap::kTheHoleValueRootIndex); | 188 __ CompareRoot(r9, Heap::kTheHoleValueRootIndex); |
185 __ Assert(eq, "object found in smi-only array"); | 189 __ Assert(eq, "object found in smi-only array"); |
186 } | 190 } |
187 __ Strd(r4, r5, MemOperand(r7, 8, PostIndex)); | 191 __ Strd(r4, r5, MemOperand(r7, 8, PostIndex)); |
188 | 192 |
189 __ bind(&entry); | 193 __ bind(&entry); |
190 __ cmp(r7, r6); | 194 __ cmp(r7, r6); |
191 __ b(lt, &loop); | 195 __ b(lt, &loop); |
192 | 196 |
193 if (!vfp3_supported) __ Pop(r1, r0); | 197 if (!vfp3_supported) __ Pop(r1, r0); |
194 __ pop(lr); | 198 __ pop(lr); |
195 } | 199 } |
196 | 200 |
197 | 201 |
198 void ElementsTransitionGenerator::GenerateDoubleToObject( | 202 void ElementsTransitionGenerator::GenerateDoubleToObject( |
199 MacroAssembler* masm, Label* fail) { | 203 MacroAssembler* masm, Label* fail) { |
200 // ----------- S t a t e ------------- | 204 // ----------- S t a t e ------------- |
201 // -- r0 : value | 205 // -- r0 : value |
202 // -- r1 : key | 206 // -- r1 : key |
203 // -- r2 : receiver | 207 // -- r2 : receiver |
204 // -- lr : return address | 208 // -- lr : return address |
205 // -- r3 : target map, scratch for subsequent call | 209 // -- r3 : target map, scratch for subsequent call |
206 // -- r4 : scratch (elements) | 210 // -- r4 : scratch (elements) |
207 // ----------------------------------- | 211 // ----------------------------------- |
208 Label entry, loop, convert_hole, gc_required; | 212 Label entry, loop, convert_hole, gc_required; |
209 | 213 |
210 __ push(lr); | 214 __ push(lr); |
215 __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); | |
211 __ Push(r3, r2, r1, r0); | 216 __ Push(r3, r2, r1, r0); |
212 | |
213 __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); | |
214 __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); | 217 __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); |
215 // r4: source FixedDoubleArray | 218 // r4: source FixedDoubleArray |
216 // r5: number of elements (smi-tagged) | 219 // r5: number of elements (smi-tagged) |
217 | 220 |
218 // Allocate new FixedArray. | 221 // Allocate new FixedArray. |
219 __ mov(r0, Operand(FixedDoubleArray::kHeaderSize)); | 222 __ mov(r0, Operand(FixedDoubleArray::kHeaderSize)); |
220 __ add(r0, r0, Operand(r5, LSL, 1)); | 223 __ add(r0, r0, Operand(r5, LSL, 1)); |
221 __ AllocateInNewSpace(r0, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS); | 224 __ AllocateInNewSpace(r0, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS); |
222 // r6: destination FixedArray, not tagged as heap object | 225 // r6: destination FixedArray, not tagged as heap object |
226 // Set destination FixedDoubleArray's length and map. | |
223 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex); | 227 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex); |
228 __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); | |
224 __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); | 229 __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); |
225 // Set destination FixedDoubleArray's length. | |
226 __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); | |
227 | 230 |
228 // Prepare for conversion loop. | 231 // Prepare for conversion loop. |
229 __ add(r4, r4, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4)); | 232 __ add(r4, r4, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4)); |
230 __ add(r3, r6, Operand(FixedArray::kHeaderSize)); | 233 __ add(r3, r6, Operand(FixedArray::kHeaderSize)); |
231 __ add(r6, r6, Operand(kHeapObjectTag)); | 234 __ add(r6, r6, Operand(kHeapObjectTag)); |
232 __ add(r5, r3, Operand(r5, LSL, 1)); | 235 __ add(r5, r3, Operand(r5, LSL, 1)); |
233 __ LoadRoot(r7, Heap::kTheHoleValueRootIndex); | 236 __ LoadRoot(r7, Heap::kTheHoleValueRootIndex); |
234 __ LoadRoot(r9, Heap::kHeapNumberMapRootIndex); | 237 __ LoadRoot(r9, Heap::kHeapNumberMapRootIndex); |
235 // Using offsetted addresses in r4 to fully take advantage of post-indexing. | 238 // Using offsetted addresses in r4 to fully take advantage of post-indexing. |
236 // r3: begin of destination FixedArray element fields, not tagged | 239 // r3: begin of destination FixedArray element fields, not tagged |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
318 __ b(eq, &check_sequential); | 321 __ b(eq, &check_sequential); |
319 | 322 |
320 // Dispatch on the indirect string shape: slice or cons. | 323 // Dispatch on the indirect string shape: slice or cons. |
321 Label cons_string; | 324 Label cons_string; |
322 __ tst(result, Operand(kSlicedNotConsMask)); | 325 __ tst(result, Operand(kSlicedNotConsMask)); |
323 __ b(eq, &cons_string); | 326 __ b(eq, &cons_string); |
324 | 327 |
325 // Handle slices. | 328 // Handle slices. |
326 Label indirect_string_loaded; | 329 Label indirect_string_loaded; |
327 __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset)); | 330 __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset)); |
331 __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset)); | |
328 __ add(index, index, Operand(result, ASR, kSmiTagSize)); | 332 __ add(index, index, Operand(result, ASR, kSmiTagSize)); |
329 __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset)); | |
330 __ jmp(&indirect_string_loaded); | 333 __ jmp(&indirect_string_loaded); |
331 | 334 |
332 // Handle cons strings. | 335 // Handle cons strings. |
333 // Check whether the right hand side is the empty string (i.e. if | 336 // Check whether the right hand side is the empty string (i.e. if |
334 // this is really a flat string in a cons string). If that is not | 337 // this is really a flat string in a cons string). If that is not |
335 // the case we would rather go to the runtime system now to flatten | 338 // the case we would rather go to the runtime system now to flatten |
336 // the string. | 339 // the string. |
337 __ bind(&cons_string); | 340 __ bind(&cons_string); |
338 __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset)); | 341 __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset)); |
339 __ LoadRoot(ip, Heap::kEmptyStringRootIndex); | 342 __ CompareRoot(result, Heap::kEmptyStringRootIndex); |
340 __ cmp(result, ip); | |
341 __ b(ne, call_runtime); | |
342 // Get the first of the two strings and load its instance type. | 343 // Get the first of the two strings and load its instance type. |
344 __ b(ne, call_runtime); // This branch refers to the previous CompareRoot. | |
ulan
2012/01/27 13:19:11
This branch should be above the comment in line 34
| |
343 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset)); | 345 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset)); |
344 | 346 |
345 __ bind(&indirect_string_loaded); | 347 __ bind(&indirect_string_loaded); |
346 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset)); | 348 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset)); |
347 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset)); | 349 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset)); |
348 | 350 |
349 // Distinguish sequential and external strings. Only these two string | 351 // Distinguish sequential and external strings. Only these two string |
350 // representations can reach here (slices and flat cons strings have been | 352 // representations can reach here (slices and flat cons strings have been |
351 // reduced to the underlying sequential or external string). | 353 // reduced to the underlying sequential or external string). |
352 Label external_string, check_encoding; | 354 Label external_string, check_encoding; |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
388 // Ascii string. | 390 // Ascii string. |
389 __ ldrb(result, MemOperand(string, index)); | 391 __ ldrb(result, MemOperand(string, index)); |
390 __ bind(&done); | 392 __ bind(&done); |
391 } | 393 } |
392 | 394 |
393 #undef __ | 395 #undef __ |
394 | 396 |
395 } } // namespace v8::internal | 397 } } // namespace v8::internal |
396 | 398 |
397 #endif // V8_TARGET_ARCH_ARM | 399 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |