OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
336 // Need to collect. Call into runtime system. | 336 // Need to collect. Call into runtime system. |
337 __ bind(&gc); | 337 __ bind(&gc); |
338 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | 338 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
339 } | 339 } |
340 | 340 |
341 | 341 |
342 static void GenerateFastCloneShallowArrayCommon( | 342 static void GenerateFastCloneShallowArrayCommon( |
343 MacroAssembler* masm, | 343 MacroAssembler* masm, |
344 int length, | 344 int length, |
345 FastCloneShallowArrayStub::Mode mode, | 345 FastCloneShallowArrayStub::Mode mode, |
| 346 AllocationSiteInfoMode allocation_site_info_mode, |
346 Label* fail) { | 347 Label* fail) { |
347 // Registers on entry: | 348 // Registers on entry: |
348 // a3: boilerplate literal array. | 349 // a3: boilerplate literal array. |
349 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); | 350 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); |
350 | 351 |
351 // All sizes here are multiples of kPointerSize. | 352 // All sizes here are multiples of kPointerSize. |
352 int elements_size = 0; | 353 int elements_size = 0; |
353 if (length > 0) { | 354 if (length > 0) { |
354 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 355 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
355 ? FixedDoubleArray::SizeFor(length) | 356 ? FixedDoubleArray::SizeFor(length) |
356 : FixedArray::SizeFor(length); | 357 : FixedArray::SizeFor(length); |
357 } | 358 } |
358 int size = JSArray::kSize + elements_size; | 359 int size = JSArray::kSize; |
| 360 int allocation_info_start = size; |
| 361 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { |
| 362 size += AllocationSiteInfo::kSize; |
| 363 } |
| 364 size += elements_size; |
359 | 365 |
360 // Allocate both the JS array and the elements array in one big | 366 // Allocate both the JS array and the elements array in one big |
361 // allocation. This avoids multiple limit checks. | 367 // allocation. This avoids multiple limit checks. |
362 __ AllocateInNewSpace(size, | 368 __ AllocateInNewSpace(size, |
363 v0, | 369 v0, |
364 a1, | 370 a1, |
365 a2, | 371 a2, |
366 fail, | 372 fail, |
367 TAG_OBJECT); | 373 TAG_OBJECT); |
368 | 374 |
| 375 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { |
| 376 __ li(a2, Operand(Handle<Map>(masm->isolate()->heap()-> |
| 377 allocation_site_info_map()))); |
| 378 __ sw(a2, FieldMemOperand(v0, allocation_info_start)); |
| 379 __ sw(a3, FieldMemOperand(v0, allocation_info_start + kPointerSize)); |
| 380 } |
| 381 |
369 // Copy the JS array part. | 382 // Copy the JS array part. |
370 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | 383 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
371 if ((i != JSArray::kElementsOffset) || (length == 0)) { | 384 if ((i != JSArray::kElementsOffset) || (length == 0)) { |
372 __ lw(a1, FieldMemOperand(a3, i)); | 385 __ lw(a1, FieldMemOperand(a3, i)); |
373 __ sw(a1, FieldMemOperand(v0, i)); | 386 __ sw(a1, FieldMemOperand(v0, i)); |
374 } | 387 } |
375 } | 388 } |
376 | 389 |
377 if (length > 0) { | 390 if (length > 0) { |
378 // Get hold of the elements array of the boilerplate and setup the | 391 // Get hold of the elements array of the boilerplate and setup the |
379 // elements pointer in the resulting object. | 392 // elements pointer in the resulting object. |
380 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); | 393 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); |
381 __ Addu(a2, v0, Operand(JSArray::kSize)); | 394 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { |
| 395 __ Addu(a2, v0, Operand(JSArray::kSize + AllocationSiteInfo::kSize)); |
| 396 } else { |
| 397 __ Addu(a2, v0, Operand(JSArray::kSize)); |
| 398 } |
382 __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset)); | 399 __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset)); |
383 | 400 |
384 // Copy the elements array. | 401 // Copy the elements array. |
385 ASSERT((elements_size % kPointerSize) == 0); | 402 ASSERT((elements_size % kPointerSize) == 0); |
386 __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize); | 403 __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize); |
387 } | 404 } |
388 } | 405 } |
389 | 406 |
390 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 407 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
391 // Stack layout on entry: | 408 // Stack layout on entry: |
392 // | 409 // |
393 // [sp]: constant elements. | 410 // [sp]: constant elements. |
394 // [sp + kPointerSize]: literal index. | 411 // [sp + kPointerSize]: literal index. |
395 // [sp + (2 * kPointerSize)]: literals array. | 412 // [sp + (2 * kPointerSize)]: literals array. |
396 | 413 |
397 // Load boilerplate object into r3 and check if we need to create a | 414 // Load boilerplate object into r3 and check if we need to create a |
398 // boilerplate. | 415 // boilerplate. |
399 Label slow_case; | 416 Label slow_case; |
400 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); | 417 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); |
401 __ lw(a0, MemOperand(sp, 1 * kPointerSize)); | 418 __ lw(a0, MemOperand(sp, 1 * kPointerSize)); |
402 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 419 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
403 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); | 420 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); |
404 __ Addu(t0, a3, t0); | 421 __ Addu(t0, a3, t0); |
405 __ lw(a3, MemOperand(t0)); | 422 __ lw(a3, MemOperand(t0)); |
406 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); | 423 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); |
407 __ Branch(&slow_case, eq, a3, Operand(t1)); | 424 __ Branch(&slow_case, eq, a3, Operand(t1)); |
408 | 425 |
409 FastCloneShallowArrayStub::Mode mode = mode_; | 426 FastCloneShallowArrayStub::Mode mode = mode_; |
| 427 AllocationSiteInfoMode allocation_site_info_mode = |
| 428 DONT_TRACK_ALLOCATION_SITE_INFO; |
| 429 if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) { |
| 430 mode = CLONE_ANY_ELEMENTS; |
| 431 allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO; |
| 432 } |
410 if (mode == CLONE_ANY_ELEMENTS) { | 433 if (mode == CLONE_ANY_ELEMENTS) { |
411 Label double_elements, check_fast_elements; | 434 Label double_elements, check_fast_elements; |
412 __ lw(v0, FieldMemOperand(a3, JSArray::kElementsOffset)); | 435 __ lw(v0, FieldMemOperand(a3, JSArray::kElementsOffset)); |
413 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); | 436 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); |
414 __ LoadRoot(t1, Heap::kFixedCOWArrayMapRootIndex); | 437 __ LoadRoot(t1, Heap::kFixedCOWArrayMapRootIndex); |
415 __ Branch(&check_fast_elements, ne, v0, Operand(t1)); | 438 __ Branch(&check_fast_elements, ne, v0, Operand(t1)); |
416 GenerateFastCloneShallowArrayCommon(masm, 0, | 439 GenerateFastCloneShallowArrayCommon(masm, 0, |
417 COPY_ON_WRITE_ELEMENTS, &slow_case); | 440 COPY_ON_WRITE_ELEMENTS, |
| 441 allocation_site_info_mode, |
| 442 &slow_case); |
418 // Return and remove the on-stack parameters. | 443 // Return and remove the on-stack parameters. |
419 __ DropAndRet(3); | 444 __ DropAndRet(3); |
420 | 445 |
421 __ bind(&check_fast_elements); | 446 __ bind(&check_fast_elements); |
422 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); | 447 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); |
423 __ Branch(&double_elements, ne, v0, Operand(t1)); | 448 __ Branch(&double_elements, ne, v0, Operand(t1)); |
424 GenerateFastCloneShallowArrayCommon(masm, length_, | 449 GenerateFastCloneShallowArrayCommon(masm, length_, |
425 CLONE_ELEMENTS, &slow_case); | 450 CLONE_ELEMENTS, |
| 451 allocation_site_info_mode, |
| 452 &slow_case); |
426 // Return and remove the on-stack parameters. | 453 // Return and remove the on-stack parameters. |
427 __ DropAndRet(3); | 454 __ DropAndRet(3); |
428 | 455 |
429 __ bind(&double_elements); | 456 __ bind(&double_elements); |
430 mode = CLONE_DOUBLE_ELEMENTS; | 457 mode = CLONE_DOUBLE_ELEMENTS; |
431 // Fall through to generate the code to handle double elements. | 458 // Fall through to generate the code to handle double elements. |
432 } | 459 } |
433 | 460 |
434 if (FLAG_debug_code) { | 461 if (FLAG_debug_code) { |
435 const char* message; | 462 const char* message; |
(...skipping 10 matching lines...) Expand all Loading... |
446 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; | 473 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; |
447 } | 474 } |
448 __ push(a3); | 475 __ push(a3); |
449 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); | 476 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); |
450 __ lw(a3, FieldMemOperand(a3, HeapObject::kMapOffset)); | 477 __ lw(a3, FieldMemOperand(a3, HeapObject::kMapOffset)); |
451 __ LoadRoot(at, expected_map_index); | 478 __ LoadRoot(at, expected_map_index); |
452 __ Assert(eq, message, a3, Operand(at)); | 479 __ Assert(eq, message, a3, Operand(at)); |
453 __ pop(a3); | 480 __ pop(a3); |
454 } | 481 } |
455 | 482 |
456 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); | 483 GenerateFastCloneShallowArrayCommon(masm, length_, mode, |
| 484 allocation_site_info_mode, &slow_case); |
457 | 485 |
458 // Return and remove the on-stack parameters. | 486 // Return and remove the on-stack parameters. |
459 __ DropAndRet(3); | 487 __ DropAndRet(3); |
460 | 488 |
461 __ bind(&slow_case); | 489 __ bind(&slow_case); |
462 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 490 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
463 } | 491 } |
464 | 492 |
465 | 493 |
466 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { | 494 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { |
(...skipping 7374 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7841 __ Pop(ra, t1, a1); | 7869 __ Pop(ra, t1, a1); |
7842 __ Ret(); | 7870 __ Ret(); |
7843 } | 7871 } |
7844 | 7872 |
7845 | 7873 |
7846 #undef __ | 7874 #undef __ |
7847 | 7875 |
7848 } } // namespace v8::internal | 7876 } } // namespace v8::internal |
7849 | 7877 |
7850 #endif // V8_TARGET_ARCH_MIPS | 7878 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |