OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 237 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
248 __ mov(cp, r0); | 248 __ mov(cp, r0); |
249 __ add(sp, sp, Operand(2 * kPointerSize)); | 249 __ add(sp, sp, Operand(2 * kPointerSize)); |
250 __ Ret(); | 250 __ Ret(); |
251 | 251 |
252 // Need to collect. Call into runtime system. | 252 // Need to collect. Call into runtime system. |
253 __ bind(&gc); | 253 __ bind(&gc); |
254 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | 254 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
255 } | 255 } |
256 | 256 |
257 | 257 |
| 258 static void GenerateFastCloneShallowArrayCommon( |
| 259 MacroAssembler* masm, |
| 260 int length, |
| 261 FastCloneShallowArrayStub::Mode mode, |
| 262 Label* fail) { |
| 263 // Registers on entry: |
| 264 // |
| 265 // r3: boilerplate literal array. |
| 266 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); |
| 267 |
| 268 // All sizes here are multiples of kPointerSize. |
| 269 int elements_size = 0; |
| 270 if (length > 0) { |
| 271 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
| 272 ? FixedDoubleArray::SizeFor(length) |
| 273 : FixedArray::SizeFor(length); |
| 274 } |
| 275 int size = JSArray::kSize + elements_size; |
| 276 |
| 277 // Allocate both the JS array and the elements array in one big |
| 278 // allocation. This avoids multiple limit checks. |
| 279 __ AllocateInNewSpace(size, |
| 280 r0, |
| 281 r1, |
| 282 r2, |
| 283 fail, |
| 284 TAG_OBJECT); |
| 285 |
| 286 // Copy the JS array part. |
| 287 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
| 288 if ((i != JSArray::kElementsOffset) || (length == 0)) { |
| 289 __ ldr(r1, FieldMemOperand(r3, i)); |
| 290 __ str(r1, FieldMemOperand(r0, i)); |
| 291 } |
| 292 } |
| 293 |
| 294 if (length > 0) { |
| 295 // Get hold of the elements array of the boilerplate and setup the |
| 296 // elements pointer in the resulting object. |
| 297 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); |
| 298 __ add(r2, r0, Operand(JSArray::kSize)); |
| 299 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset)); |
| 300 |
| 301 // Copy the elements array. |
| 302 ASSERT((elements_size % kPointerSize) == 0); |
| 303 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); |
| 304 } |
| 305 } |
| 306 |
258 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 307 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
259 // Stack layout on entry: | 308 // Stack layout on entry: |
260 // | 309 // |
261 // [sp]: constant elements. | 310 // [sp]: constant elements. |
262 // [sp + kPointerSize]: literal index. | 311 // [sp + kPointerSize]: literal index. |
263 // [sp + (2 * kPointerSize)]: literals array. | 312 // [sp + (2 * kPointerSize)]: literals array. |
264 | 313 |
265 // All sizes here are multiples of kPointerSize. | |
266 int elements_size = 0; | |
267 if (length_ > 0) { | |
268 elements_size = mode_ == CLONE_DOUBLE_ELEMENTS | |
269 ? FixedDoubleArray::SizeFor(length_) | |
270 : FixedArray::SizeFor(length_); | |
271 } | |
272 int size = JSArray::kSize + elements_size; | |
273 | |
274 // Load boilerplate object into r3 and check if we need to create a | 314 // Load boilerplate object into r3 and check if we need to create a |
275 // boilerplate. | 315 // boilerplate. |
276 Label slow_case; | 316 Label slow_case; |
277 __ ldr(r3, MemOperand(sp, 2 * kPointerSize)); | 317 __ ldr(r3, MemOperand(sp, 2 * kPointerSize)); |
278 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); | 318 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); |
279 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 319 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
280 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); | 320 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); |
281 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); | 321 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); |
282 __ b(eq, &slow_case); | 322 __ b(eq, &slow_case); |
283 | 323 |
| 324 FastCloneShallowArrayStub::Mode mode = mode_; |
| 325 if (mode == CLONE_ANY_ELEMENTS) { |
| 326 Label double_elements, check_fast_elements; |
| 327 __ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset)); |
| 328 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 329 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex); |
| 330 __ cmp(r0, ip); |
| 331 __ b(ne, &check_fast_elements); |
| 332 GenerateFastCloneShallowArrayCommon(masm, 0, |
| 333 COPY_ON_WRITE_ELEMENTS, &slow_case); |
| 334 // Return and remove the on-stack parameters. |
| 335 __ add(sp, sp, Operand(3 * kPointerSize)); |
| 336 __ Ret(); |
| 337 |
| 338 __ bind(&check_fast_elements); |
| 339 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
| 340 __ cmp(r0, ip); |
| 341 __ b(ne, &double_elements); |
| 342 GenerateFastCloneShallowArrayCommon(masm, length_, |
| 343 CLONE_ELEMENTS, &slow_case); |
| 344 // Return and remove the on-stack parameters. |
| 345 __ add(sp, sp, Operand(3 * kPointerSize)); |
| 346 __ Ret(); |
| 347 |
| 348 __ bind(&double_elements); |
| 349 mode = CLONE_DOUBLE_ELEMENTS; |
| 350 // Fall through to generate the code to handle double elements. |
| 351 } |
| 352 |
284 if (FLAG_debug_code) { | 353 if (FLAG_debug_code) { |
285 const char* message; | 354 const char* message; |
286 Heap::RootListIndex expected_map_index; | 355 Heap::RootListIndex expected_map_index; |
287 if (mode_ == CLONE_ELEMENTS) { | 356 if (mode == CLONE_ELEMENTS) { |
288 message = "Expected (writable) fixed array"; | 357 message = "Expected (writable) fixed array"; |
289 expected_map_index = Heap::kFixedArrayMapRootIndex; | 358 expected_map_index = Heap::kFixedArrayMapRootIndex; |
290 } else if (mode_ == CLONE_DOUBLE_ELEMENTS) { | 359 } else if (mode == CLONE_DOUBLE_ELEMENTS) { |
291 message = "Expected (writable) fixed double array"; | 360 message = "Expected (writable) fixed double array"; |
292 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; | 361 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; |
293 } else { | 362 } else { |
294 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); | 363 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); |
295 message = "Expected copy-on-write fixed array"; | 364 message = "Expected copy-on-write fixed array"; |
296 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; | 365 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; |
297 } | 366 } |
298 __ push(r3); | 367 __ push(r3); |
299 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); | 368 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); |
300 __ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 369 __ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); |
301 __ CompareRoot(r3, expected_map_index); | 370 __ CompareRoot(r3, expected_map_index); |
302 __ Assert(eq, message); | 371 __ Assert(eq, message); |
303 __ pop(r3); | 372 __ pop(r3); |
304 } | 373 } |
305 | 374 |
306 // Allocate both the JS array and the elements array in one big | 375 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); |
307 // allocation. This avoids multiple limit checks. | |
308 __ AllocateInNewSpace(size, | |
309 r0, | |
310 r1, | |
311 r2, | |
312 &slow_case, | |
313 TAG_OBJECT); | |
314 | |
315 // Copy the JS array part. | |
316 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | |
317 if ((i != JSArray::kElementsOffset) || (length_ == 0)) { | |
318 __ ldr(r1, FieldMemOperand(r3, i)); | |
319 __ str(r1, FieldMemOperand(r0, i)); | |
320 } | |
321 } | |
322 | |
323 if (length_ > 0) { | |
324 // Get hold of the elements array of the boilerplate and setup the | |
325 // elements pointer in the resulting object. | |
326 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); | |
327 __ add(r2, r0, Operand(JSArray::kSize)); | |
328 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset)); | |
329 | |
330 // Copy the elements array. | |
331 ASSERT((elements_size % kPointerSize) == 0); | |
332 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); | |
333 } | |
334 | 376 |
335 // Return and remove the on-stack parameters. | 377 // Return and remove the on-stack parameters. |
336 __ add(sp, sp, Operand(3 * kPointerSize)); | 378 __ add(sp, sp, Operand(3 * kPointerSize)); |
337 __ Ret(); | 379 __ Ret(); |
338 | 380 |
339 __ bind(&slow_case); | 381 __ bind(&slow_case); |
340 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 382 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
341 } | 383 } |
342 | 384 |
343 | 385 |
(...skipping 6862 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7206 __ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r10, | 7248 __ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r10, |
7207 &slow_elements); | 7249 &slow_elements); |
7208 __ Ret(); | 7250 __ Ret(); |
7209 } | 7251 } |
7210 | 7252 |
7211 #undef __ | 7253 #undef __ |
7212 | 7254 |
7213 } } // namespace v8::internal | 7255 } } // namespace v8::internal |
7214 | 7256 |
7215 #endif // V8_TARGET_ARCH_ARM | 7257 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |