OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
8 | 8 |
9 #include "src/arm/simulator-arm.h" | 9 #include "src/arm/simulator-arm.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
367 masm->set_has_frame(false); | 367 masm->set_has_frame(false); |
368 } | 368 } |
369 | 369 |
370 | 370 |
371 // ------------------------------------------------------------------------- | 371 // ------------------------------------------------------------------------- |
372 // Code generators | 372 // Code generators |
373 | 373 |
374 #define __ ACCESS_MASM(masm) | 374 #define __ ACCESS_MASM(masm) |
375 | 375 |
376 void ElementsTransitionGenerator::GenerateMapChangeElementsTransition( | 376 void ElementsTransitionGenerator::GenerateMapChangeElementsTransition( |
377 MacroAssembler* masm, AllocationSiteMode mode, | 377 MacroAssembler* masm, |
| 378 Register receiver, |
| 379 Register key, |
| 380 Register value, |
| 381 Register target_map, |
| 382 AllocationSiteMode mode, |
378 Label* allocation_memento_found) { | 383 Label* allocation_memento_found) { |
379 // ----------- S t a t e ------------- | 384 Register scratch_elements = r4; |
380 // -- r0 : value | 385 ASSERT(!AreAliased(receiver, key, value, target_map, |
381 // -- r1 : key | 386 scratch_elements)); |
382 // -- r2 : receiver | 387 |
383 // -- lr : return address | |
384 // -- r3 : target map, scratch for subsequent call | |
385 // -- r4 : scratch (elements) | |
386 // ----------------------------------- | |
387 if (mode == TRACK_ALLOCATION_SITE) { | 388 if (mode == TRACK_ALLOCATION_SITE) { |
388 ASSERT(allocation_memento_found != NULL); | 389 ASSERT(allocation_memento_found != NULL); |
389 __ JumpIfJSArrayHasAllocationMemento(r2, r4, allocation_memento_found); | 390 __ JumpIfJSArrayHasAllocationMemento( |
| 391 receiver, scratch_elements, allocation_memento_found); |
390 } | 392 } |
391 | 393 |
392 // Set transitioned map. | 394 // Set transitioned map. |
393 __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); | 395 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
394 __ RecordWriteField(r2, | 396 __ RecordWriteField(receiver, |
395 HeapObject::kMapOffset, | 397 HeapObject::kMapOffset, |
396 r3, | 398 target_map, |
397 r9, | 399 r9, |
398 kLRHasNotBeenSaved, | 400 kLRHasNotBeenSaved, |
399 kDontSaveFPRegs, | 401 kDontSaveFPRegs, |
400 EMIT_REMEMBERED_SET, | 402 EMIT_REMEMBERED_SET, |
401 OMIT_SMI_CHECK); | 403 OMIT_SMI_CHECK); |
402 } | 404 } |
403 | 405 |
404 | 406 |
405 void ElementsTransitionGenerator::GenerateSmiToDouble( | 407 void ElementsTransitionGenerator::GenerateSmiToDouble( |
406 MacroAssembler* masm, AllocationSiteMode mode, Label* fail) { | 408 MacroAssembler* masm, |
407 // ----------- S t a t e ------------- | 409 Register receiver, |
408 // -- r0 : value | 410 Register key, |
409 // -- r1 : key | 411 Register value, |
410 // -- r2 : receiver | 412 Register target_map, |
411 // -- lr : return address | 413 AllocationSiteMode mode, |
412 // -- r3 : target map, scratch for subsequent call | 414 Label* fail) { |
413 // -- r4 : scratch (elements) | 415 // Register lr contains the return address. |
414 // ----------------------------------- | |
415 Label loop, entry, convert_hole, gc_required, only_change_map, done; | 416 Label loop, entry, convert_hole, gc_required, only_change_map, done; |
| 417 Register elements = r4; |
| 418 Register length = r5; |
| 419 Register array = r6; |
| 420 Register array_end = array; |
| 421 |
| 422 // target_map parameter can be clobbered. |
| 423 Register scratch1 = target_map; |
| 424 Register scratch2 = r9; |
| 425 |
| 426 // Verify input registers don't conflict with locals. |
| 427 ASSERT(!AreAliased(receiver, key, value, target_map, |
| 428 elements, length, array, scratch2)); |
416 | 429 |
417 if (mode == TRACK_ALLOCATION_SITE) { | 430 if (mode == TRACK_ALLOCATION_SITE) { |
418 __ JumpIfJSArrayHasAllocationMemento(r2, r4, fail); | 431 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, fail); |
419 } | 432 } |
420 | 433 |
421 // Check for empty arrays, which only require a map transition and no changes | 434 // Check for empty arrays, which only require a map transition and no changes |
422 // to the backing store. | 435 // to the backing store. |
423 __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); | 436 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
424 __ CompareRoot(r4, Heap::kEmptyFixedArrayRootIndex); | 437 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex); |
425 __ b(eq, &only_change_map); | 438 __ b(eq, &only_change_map); |
426 | 439 |
427 __ push(lr); | 440 __ push(lr); |
428 __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); | 441 __ ldr(length, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
429 // r5: number of elements (smi-tagged) | 442 // length: number of elements (smi-tagged) |
430 | 443 |
431 // Allocate new FixedDoubleArray. | 444 // Allocate new FixedDoubleArray. |
432 // Use lr as a temporary register. | 445 // Use lr as a temporary register. |
433 __ mov(lr, Operand(r5, LSL, 2)); | 446 __ mov(lr, Operand(length, LSL, 2)); |
434 __ add(lr, lr, Operand(FixedDoubleArray::kHeaderSize)); | 447 __ add(lr, lr, Operand(FixedDoubleArray::kHeaderSize)); |
435 __ Allocate(lr, r6, r4, r9, &gc_required, DOUBLE_ALIGNMENT); | 448 __ Allocate(lr, array, elements, scratch2, &gc_required, DOUBLE_ALIGNMENT); |
436 // r6: destination FixedDoubleArray, not tagged as heap object. | 449 // array: destination FixedDoubleArray, not tagged as heap object. |
437 __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); | 450 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
438 // r4: source FixedArray. | 451 // r4: source FixedArray. |
439 | 452 |
440 // Set destination FixedDoubleArray's length and map. | 453 // Set destination FixedDoubleArray's length and map. |
441 __ LoadRoot(r9, Heap::kFixedDoubleArrayMapRootIndex); | 454 __ LoadRoot(scratch2, Heap::kFixedDoubleArrayMapRootIndex); |
442 __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); | 455 __ str(length, MemOperand(array, FixedDoubleArray::kLengthOffset)); |
443 // Update receiver's map. | 456 // Update receiver's map. |
444 __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); | 457 __ str(scratch2, MemOperand(array, HeapObject::kMapOffset)); |
445 | 458 |
446 __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); | 459 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
447 __ RecordWriteField(r2, | 460 __ RecordWriteField(receiver, |
448 HeapObject::kMapOffset, | 461 HeapObject::kMapOffset, |
449 r3, | 462 target_map, |
450 r9, | 463 scratch2, |
451 kLRHasBeenSaved, | 464 kLRHasBeenSaved, |
452 kDontSaveFPRegs, | 465 kDontSaveFPRegs, |
453 OMIT_REMEMBERED_SET, | 466 OMIT_REMEMBERED_SET, |
454 OMIT_SMI_CHECK); | 467 OMIT_SMI_CHECK); |
455 // Replace receiver's backing store with newly created FixedDoubleArray. | 468 // Replace receiver's backing store with newly created FixedDoubleArray. |
456 __ add(r3, r6, Operand(kHeapObjectTag)); | 469 __ add(scratch1, array, Operand(kHeapObjectTag)); |
457 __ str(r3, FieldMemOperand(r2, JSObject::kElementsOffset)); | 470 __ str(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
458 __ RecordWriteField(r2, | 471 __ RecordWriteField(receiver, |
459 JSObject::kElementsOffset, | 472 JSObject::kElementsOffset, |
460 r3, | 473 scratch1, |
461 r9, | 474 scratch2, |
462 kLRHasBeenSaved, | 475 kLRHasBeenSaved, |
463 kDontSaveFPRegs, | 476 kDontSaveFPRegs, |
464 EMIT_REMEMBERED_SET, | 477 EMIT_REMEMBERED_SET, |
465 OMIT_SMI_CHECK); | 478 OMIT_SMI_CHECK); |
466 | 479 |
467 // Prepare for conversion loop. | 480 // Prepare for conversion loop. |
468 __ add(r3, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 481 __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
469 __ add(r9, r6, Operand(FixedDoubleArray::kHeaderSize)); | 482 __ add(scratch2, array, Operand(FixedDoubleArray::kHeaderSize)); |
470 __ add(r6, r9, Operand(r5, LSL, 2)); | 483 __ add(array_end, scratch2, Operand(length, LSL, 2)); |
471 __ mov(r4, Operand(kHoleNanLower32)); | 484 |
472 __ mov(r5, Operand(kHoleNanUpper32)); | 485 // Repurpose registers no longer in use. |
473 // r3: begin of source FixedArray element fields, not tagged | 486 Register hole_lower = elements; |
474 // r4: kHoleNanLower32 | 487 Register hole_upper = length; |
475 // r5: kHoleNanUpper32 | 488 |
476 // r6: end of destination FixedDoubleArray, not tagged | 489 __ mov(hole_lower, Operand(kHoleNanLower32)); |
477 // r9: begin of FixedDoubleArray element fields, not tagged | 490 __ mov(hole_upper, Operand(kHoleNanUpper32)); |
| 491 // scratch1: begin of source FixedArray element fields, not tagged |
| 492 // hole_lower: kHoleNanLower32 |
| 493 // hole_upper: kHoleNanUpper32 |
| 494 // array_end: end of destination FixedDoubleArray, not tagged |
| 495 // scratch2: begin of FixedDoubleArray element fields, not tagged |
478 | 496 |
479 __ b(&entry); | 497 __ b(&entry); |
480 | 498 |
481 __ bind(&only_change_map); | 499 __ bind(&only_change_map); |
482 __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); | 500 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
483 __ RecordWriteField(r2, | 501 __ RecordWriteField(receiver, |
484 HeapObject::kMapOffset, | 502 HeapObject::kMapOffset, |
485 r3, | 503 target_map, |
486 r9, | 504 scratch2, |
487 kLRHasNotBeenSaved, | 505 kLRHasNotBeenSaved, |
488 kDontSaveFPRegs, | 506 kDontSaveFPRegs, |
489 OMIT_REMEMBERED_SET, | 507 OMIT_REMEMBERED_SET, |
490 OMIT_SMI_CHECK); | 508 OMIT_SMI_CHECK); |
491 __ b(&done); | 509 __ b(&done); |
492 | 510 |
493 // Call into runtime if GC is required. | 511 // Call into runtime if GC is required. |
494 __ bind(&gc_required); | 512 __ bind(&gc_required); |
495 __ pop(lr); | 513 __ pop(lr); |
496 __ b(fail); | 514 __ b(fail); |
497 | 515 |
498 // Convert and copy elements. | 516 // Convert and copy elements. |
499 __ bind(&loop); | 517 __ bind(&loop); |
500 __ ldr(lr, MemOperand(r3, 4, PostIndex)); | 518 __ ldr(lr, MemOperand(scratch1, 4, PostIndex)); |
501 // lr: current element | 519 // lr: current element |
502 __ UntagAndJumpIfNotSmi(lr, lr, &convert_hole); | 520 __ UntagAndJumpIfNotSmi(lr, lr, &convert_hole); |
503 | 521 |
504 // Normal smi, convert to double and store. | 522 // Normal smi, convert to double and store. |
505 __ vmov(s0, lr); | 523 __ vmov(s0, lr); |
506 __ vcvt_f64_s32(d0, s0); | 524 __ vcvt_f64_s32(d0, s0); |
507 __ vstr(d0, r9, 0); | 525 __ vstr(d0, scratch2, 0); |
508 __ add(r9, r9, Operand(8)); | 526 __ add(scratch2, scratch2, Operand(8)); |
509 __ b(&entry); | 527 __ b(&entry); |
510 | 528 |
511 // Hole found, store the-hole NaN. | 529 // Hole found, store the-hole NaN. |
512 __ bind(&convert_hole); | 530 __ bind(&convert_hole); |
513 if (FLAG_debug_code) { | 531 if (FLAG_debug_code) { |
514 // Restore a "smi-untagged" heap object. | 532 // Restore a "smi-untagged" heap object. |
515 __ SmiTag(lr); | 533 __ SmiTag(lr); |
516 __ orr(lr, lr, Operand(1)); | 534 __ orr(lr, lr, Operand(1)); |
517 __ CompareRoot(lr, Heap::kTheHoleValueRootIndex); | 535 __ CompareRoot(lr, Heap::kTheHoleValueRootIndex); |
518 __ Assert(eq, kObjectFoundInSmiOnlyArray); | 536 __ Assert(eq, kObjectFoundInSmiOnlyArray); |
519 } | 537 } |
520 __ Strd(r4, r5, MemOperand(r9, 8, PostIndex)); | 538 __ Strd(hole_lower, hole_upper, MemOperand(scratch2, 8, PostIndex)); |
521 | 539 |
522 __ bind(&entry); | 540 __ bind(&entry); |
523 __ cmp(r9, r6); | 541 __ cmp(scratch2, array_end); |
524 __ b(lt, &loop); | 542 __ b(lt, &loop); |
525 | 543 |
526 __ pop(lr); | 544 __ pop(lr); |
527 __ bind(&done); | 545 __ bind(&done); |
528 } | 546 } |
529 | 547 |
530 | 548 |
531 void ElementsTransitionGenerator::GenerateDoubleToObject( | 549 void ElementsTransitionGenerator::GenerateDoubleToObject( |
532 MacroAssembler* masm, AllocationSiteMode mode, Label* fail) { | 550 MacroAssembler* masm, |
533 // ----------- S t a t e ------------- | 551 Register receiver, |
534 // -- r0 : value | 552 Register key, |
535 // -- r1 : key | 553 Register value, |
536 // -- r2 : receiver | 554 Register target_map, |
537 // -- lr : return address | 555 AllocationSiteMode mode, |
538 // -- r3 : target map, scratch for subsequent call | 556 Label* fail) { |
539 // -- r4 : scratch (elements) | 557 // Register lr contains the return address. |
540 // ----------------------------------- | |
541 Label entry, loop, convert_hole, gc_required, only_change_map; | 558 Label entry, loop, convert_hole, gc_required, only_change_map; |
| 559 Register elements = r4; |
| 560 Register array = r6; |
| 561 Register length = r5; |
| 562 Register scratch = r9; |
| 563 |
| 564 // Verify input registers don't conflict with locals. |
| 565 ASSERT(!AreAliased(receiver, key, value, target_map, |
| 566 elements, array, length, scratch)); |
542 | 567 |
543 if (mode == TRACK_ALLOCATION_SITE) { | 568 if (mode == TRACK_ALLOCATION_SITE) { |
544 __ JumpIfJSArrayHasAllocationMemento(r2, r4, fail); | 569 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, fail); |
545 } | 570 } |
546 | 571 |
547 // Check for empty arrays, which only require a map transition and no changes | 572 // Check for empty arrays, which only require a map transition and no changes |
548 // to the backing store. | 573 // to the backing store. |
549 __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); | 574 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
550 __ CompareRoot(r4, Heap::kEmptyFixedArrayRootIndex); | 575 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex); |
551 __ b(eq, &only_change_map); | 576 __ b(eq, &only_change_map); |
552 | 577 |
553 __ push(lr); | 578 __ push(lr); |
554 __ Push(r3, r2, r1, r0); | 579 __ Push(target_map, receiver, key, value); |
555 __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); | 580 __ ldr(length, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
556 // r4: source FixedDoubleArray | 581 // elements: source FixedDoubleArray |
557 // r5: number of elements (smi-tagged) | 582 // length: number of elements (smi-tagged) |
558 | 583 |
559 // Allocate new FixedArray. | 584 // Allocate new FixedArray. |
560 __ mov(r0, Operand(FixedDoubleArray::kHeaderSize)); | 585 // Re-use value and target_map registers, as they have been saved on the |
561 __ add(r0, r0, Operand(r5, LSL, 1)); | 586 // stack. |
562 __ Allocate(r0, r6, r3, r9, &gc_required, NO_ALLOCATION_FLAGS); | 587 Register array_size = value; |
563 // r6: destination FixedArray, not tagged as heap object | 588 Register allocate_scratch = target_map; |
| 589 __ mov(array_size, Operand(FixedDoubleArray::kHeaderSize)); |
| 590 __ add(array_size, array_size, Operand(length, LSL, 1)); |
| 591 __ Allocate(array_size, array, allocate_scratch, scratch, &gc_required, |
| 592 NO_ALLOCATION_FLAGS); |
| 593 // array: destination FixedArray, not tagged as heap object |
564 // Set destination FixedDoubleArray's length and map. | 594 // Set destination FixedDoubleArray's length and map. |
565 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex); | 595 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex); |
566 __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); | 596 __ str(length, MemOperand(array, FixedDoubleArray::kLengthOffset)); |
567 __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); | 597 __ str(scratch, MemOperand(array, HeapObject::kMapOffset)); |
568 | 598 |
569 // Prepare for conversion loop. | 599 // Prepare for conversion loop. |
570 __ add(r4, r4, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4)); | 600 Register src_elements = elements; |
571 __ add(r3, r6, Operand(FixedArray::kHeaderSize)); | 601 Register dst_elements = target_map; |
572 __ add(r6, r6, Operand(kHeapObjectTag)); | 602 Register dst_end = length; |
573 __ add(r5, r3, Operand(r5, LSL, 1)); | 603 Register heap_number_map = scratch; |
574 __ LoadRoot(r9, Heap::kHeapNumberMapRootIndex); | 604 __ add(src_elements, elements, |
575 // Using offsetted addresses in r4 to fully take advantage of post-indexing. | 605 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4)); |
576 // r3: begin of destination FixedArray element fields, not tagged | 606 __ add(dst_elements, array, Operand(FixedArray::kHeaderSize)); |
577 // r4: begin of source FixedDoubleArray element fields, not tagged, +4 | 607 __ add(array, array, Operand(kHeapObjectTag)); |
578 // r5: end of destination FixedArray, not tagged | 608 __ add(dst_end, dst_elements, Operand(length, LSL, 1)); |
579 // r6: destination FixedArray | 609 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
580 // r9: heap number map | 610 // Using offsetted addresses in src_elements to fully take advantage of |
| 611 // post-indexing. |
| 612 // dst_elements: begin of destination FixedArray element fields, not tagged |
| 613 // src_elements: begin of source FixedDoubleArray element fields, |
| 614 // not tagged, +4 |
| 615 // dst_end: end of destination FixedArray, not tagged |
| 616 // array: destination FixedArray |
| 617 // heap_number_map: heap number map |
581 __ b(&entry); | 618 __ b(&entry); |
582 | 619 |
583 // Call into runtime if GC is required. | 620 // Call into runtime if GC is required. |
584 __ bind(&gc_required); | 621 __ bind(&gc_required); |
585 __ Pop(r3, r2, r1, r0); | 622 __ Pop(target_map, receiver, key, value); |
586 __ pop(lr); | 623 __ pop(lr); |
587 __ b(fail); | 624 __ b(fail); |
588 | 625 |
589 __ bind(&loop); | 626 __ bind(&loop); |
590 __ ldr(r1, MemOperand(r4, 8, PostIndex)); | 627 Register upper_bits = key; |
591 // r1: current element's upper 32 bit | 628 __ ldr(upper_bits, MemOperand(src_elements, 8, PostIndex)); |
592 // r4: address of next element's upper 32 bit | 629 // upper_bits: current element's upper 32 bit |
593 __ cmp(r1, Operand(kHoleNanUpper32)); | 630 // src_elements: address of next element's upper 32 bit |
| 631 __ cmp(upper_bits, Operand(kHoleNanUpper32)); |
594 __ b(eq, &convert_hole); | 632 __ b(eq, &convert_hole); |
595 | 633 |
596 // Non-hole double, copy value into a heap number. | 634 // Non-hole double, copy value into a heap number. |
597 __ AllocateHeapNumber(r2, r0, lr, r9, &gc_required); | 635 Register heap_number = receiver; |
598 // r2: new heap number | 636 Register scratch2 = value; |
599 __ ldr(r0, MemOperand(r4, 12, NegOffset)); | 637 __ AllocateHeapNumber(heap_number, scratch2, lr, heap_number_map, |
600 __ Strd(r0, r1, FieldMemOperand(r2, HeapNumber::kValueOffset)); | 638 &gc_required); |
601 __ mov(r0, r3); | 639 // heap_number: new heap number |
602 __ str(r2, MemOperand(r3, 4, PostIndex)); | 640 __ ldr(scratch2, MemOperand(src_elements, 12, NegOffset)); |
603 __ RecordWrite(r6, | 641 __ Strd(scratch2, upper_bits, |
604 r0, | 642 FieldMemOperand(heap_number, HeapNumber::kValueOffset)); |
605 r2, | 643 __ mov(scratch2, dst_elements); |
| 644 __ str(heap_number, MemOperand(dst_elements, 4, PostIndex)); |
| 645 __ RecordWrite(array, |
| 646 scratch2, |
| 647 heap_number, |
606 kLRHasBeenSaved, | 648 kLRHasBeenSaved, |
607 kDontSaveFPRegs, | 649 kDontSaveFPRegs, |
608 EMIT_REMEMBERED_SET, | 650 EMIT_REMEMBERED_SET, |
609 OMIT_SMI_CHECK); | 651 OMIT_SMI_CHECK); |
610 __ b(&entry); | 652 __ b(&entry); |
611 | 653 |
612 // Replace the-hole NaN with the-hole pointer. | 654 // Replace the-hole NaN with the-hole pointer. |
613 __ bind(&convert_hole); | 655 __ bind(&convert_hole); |
614 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); | 656 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex); |
615 __ str(r0, MemOperand(r3, 4, PostIndex)); | 657 __ str(scratch2, MemOperand(dst_elements, 4, PostIndex)); |
616 | 658 |
617 __ bind(&entry); | 659 __ bind(&entry); |
618 __ cmp(r3, r5); | 660 __ cmp(dst_elements, dst_end); |
619 __ b(lt, &loop); | 661 __ b(lt, &loop); |
620 | 662 |
621 __ Pop(r3, r2, r1, r0); | 663 __ Pop(target_map, receiver, key, value); |
622 // Replace receiver's backing store with newly created and filled FixedArray. | 664 // Replace receiver's backing store with newly created and filled FixedArray. |
623 __ str(r6, FieldMemOperand(r2, JSObject::kElementsOffset)); | 665 __ str(array, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
624 __ RecordWriteField(r2, | 666 __ RecordWriteField(receiver, |
625 JSObject::kElementsOffset, | 667 JSObject::kElementsOffset, |
626 r6, | 668 array, |
627 r9, | 669 scratch, |
628 kLRHasBeenSaved, | 670 kLRHasBeenSaved, |
629 kDontSaveFPRegs, | 671 kDontSaveFPRegs, |
630 EMIT_REMEMBERED_SET, | 672 EMIT_REMEMBERED_SET, |
631 OMIT_SMI_CHECK); | 673 OMIT_SMI_CHECK); |
632 __ pop(lr); | 674 __ pop(lr); |
633 | 675 |
634 __ bind(&only_change_map); | 676 __ bind(&only_change_map); |
635 // Update receiver's map. | 677 // Update receiver's map. |
636 __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); | 678 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
637 __ RecordWriteField(r2, | 679 __ RecordWriteField(receiver, |
638 HeapObject::kMapOffset, | 680 HeapObject::kMapOffset, |
639 r3, | 681 target_map, |
640 r9, | 682 scratch, |
641 kLRHasNotBeenSaved, | 683 kLRHasNotBeenSaved, |
642 kDontSaveFPRegs, | 684 kDontSaveFPRegs, |
643 OMIT_REMEMBERED_SET, | 685 OMIT_REMEMBERED_SET, |
644 OMIT_SMI_CHECK); | 686 OMIT_SMI_CHECK); |
645 } | 687 } |
646 | 688 |
647 | 689 |
648 void StringCharLoadGenerator::Generate(MacroAssembler* masm, | 690 void StringCharLoadGenerator::Generate(MacroAssembler* masm, |
649 Register string, | 691 Register string, |
650 Register index, | 692 Register index, |
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
885 patcher.masm()->add(r0, pc, Operand(-8)); | 927 patcher.masm()->add(r0, pc, Operand(-8)); |
886 patcher.masm()->ldr(pc, MemOperand(pc, -4)); | 928 patcher.masm()->ldr(pc, MemOperand(pc, -4)); |
887 patcher.masm()->emit_code_stub_address(stub); | 929 patcher.masm()->emit_code_stub_address(stub); |
888 } | 930 } |
889 } | 931 } |
890 | 932 |
891 | 933 |
892 } } // namespace v8::internal | 934 } } // namespace v8::internal |
893 | 935 |
894 #endif // V8_TARGET_ARCH_ARM | 936 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |