| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 437 } | 437 } |
| 438 | 438 |
| 439 // Check for empty arrays, which only require a map transition and no changes | 439 // Check for empty arrays, which only require a map transition and no changes |
| 440 // to the backing store. | 440 // to the backing store. |
| 441 __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); | 441 __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); |
| 442 __ CompareRoot(r4, Heap::kEmptyFixedArrayRootIndex); | 442 __ CompareRoot(r4, Heap::kEmptyFixedArrayRootIndex); |
| 443 __ b(eq, &only_change_map); | 443 __ b(eq, &only_change_map); |
| 444 | 444 |
| 445 __ push(lr); | 445 __ push(lr); |
| 446 __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); | 446 __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); |
| 447 // r4: source FixedArray | |
| 448 // r5: number of elements (smi-tagged) | 447 // r5: number of elements (smi-tagged) |
| 449 | 448 |
| 450 // Allocate new FixedDoubleArray. | 449 // Allocate new FixedDoubleArray. |
| 451 // Use lr as a temporary register. | 450 // Use lr as a temporary register. |
| 452 __ mov(lr, Operand(r5, LSL, 2)); | 451 __ mov(lr, Operand(r5, LSL, 2)); |
| 453 __ add(lr, lr, Operand(FixedDoubleArray::kHeaderSize)); | 452 __ add(lr, lr, Operand(FixedDoubleArray::kHeaderSize)); |
| 454 __ Allocate(lr, r6, r7, r9, &gc_required, DOUBLE_ALIGNMENT); | 453 __ Allocate(lr, r6, r4, r9, &gc_required, DOUBLE_ALIGNMENT); |
| 455 // r6: destination FixedDoubleArray, not tagged as heap object. | 454 // r6: destination FixedDoubleArray, not tagged as heap object. |
| 455 __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset)); |
| 456 // r4: source FixedArray. |
| 456 | 457 |
| 457 // Set destination FixedDoubleArray's length and map. | 458 // Set destination FixedDoubleArray's length and map. |
| 458 __ LoadRoot(r9, Heap::kFixedDoubleArrayMapRootIndex); | 459 __ LoadRoot(r9, Heap::kFixedDoubleArrayMapRootIndex); |
| 459 __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); | 460 __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); |
| 460 // Update receiver's map. | 461 // Update receiver's map. |
| 461 __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); | 462 __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); |
| 462 | 463 |
| 463 __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); | 464 __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 464 __ RecordWriteField(r2, | 465 __ RecordWriteField(r2, |
| 465 HeapObject::kMapOffset, | 466 HeapObject::kMapOffset, |
| (...skipping 10 matching lines...) Expand all Loading... |
| 476 JSObject::kElementsOffset, | 477 JSObject::kElementsOffset, |
| 477 r3, | 478 r3, |
| 478 r9, | 479 r9, |
| 479 kLRHasBeenSaved, | 480 kLRHasBeenSaved, |
| 480 kDontSaveFPRegs, | 481 kDontSaveFPRegs, |
| 481 EMIT_REMEMBERED_SET, | 482 EMIT_REMEMBERED_SET, |
| 482 OMIT_SMI_CHECK); | 483 OMIT_SMI_CHECK); |
| 483 | 484 |
| 484 // Prepare for conversion loop. | 485 // Prepare for conversion loop. |
| 485 __ add(r3, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 486 __ add(r3, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 486 __ add(r7, r6, Operand(FixedDoubleArray::kHeaderSize)); | 487 __ add(r9, r6, Operand(FixedDoubleArray::kHeaderSize)); |
| 487 __ add(r6, r7, Operand(r5, LSL, 2)); | 488 __ add(r6, r9, Operand(r5, LSL, 2)); |
| 488 __ mov(r4, Operand(kHoleNanLower32)); | 489 __ mov(r4, Operand(kHoleNanLower32)); |
| 489 __ mov(r5, Operand(kHoleNanUpper32)); | 490 __ mov(r5, Operand(kHoleNanUpper32)); |
| 490 // r3: begin of source FixedArray element fields, not tagged | 491 // r3: begin of source FixedArray element fields, not tagged |
| 491 // r4: kHoleNanLower32 | 492 // r4: kHoleNanLower32 |
| 492 // r5: kHoleNanUpper32 | 493 // r5: kHoleNanUpper32 |
| 493 // r6: end of destination FixedDoubleArray, not tagged | 494 // r6: end of destination FixedDoubleArray, not tagged |
| 494 // r7: begin of FixedDoubleArray element fields, not tagged | 495 // r9: begin of FixedDoubleArray element fields, not tagged |
| 495 | 496 |
| 496 __ b(&entry); | 497 __ b(&entry); |
| 497 | 498 |
| 498 __ bind(&only_change_map); | 499 __ bind(&only_change_map); |
| 499 __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); | 500 __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 500 __ RecordWriteField(r2, | 501 __ RecordWriteField(r2, |
| 501 HeapObject::kMapOffset, | 502 HeapObject::kMapOffset, |
| 502 r3, | 503 r3, |
| 503 r9, | 504 r9, |
| 504 kLRHasNotBeenSaved, | 505 kLRHasNotBeenSaved, |
| 505 kDontSaveFPRegs, | 506 kDontSaveFPRegs, |
| 506 OMIT_REMEMBERED_SET, | 507 OMIT_REMEMBERED_SET, |
| 507 OMIT_SMI_CHECK); | 508 OMIT_SMI_CHECK); |
| 508 __ b(&done); | 509 __ b(&done); |
| 509 | 510 |
| 510 // Call into runtime if GC is required. | 511 // Call into runtime if GC is required. |
| 511 __ bind(&gc_required); | 512 __ bind(&gc_required); |
| 512 __ pop(lr); | 513 __ pop(lr); |
| 513 __ b(fail); | 514 __ b(fail); |
| 514 | 515 |
| 515 // Convert and copy elements. | 516 // Convert and copy elements. |
| 516 __ bind(&loop); | 517 __ bind(&loop); |
| 517 __ ldr(r9, MemOperand(r3, 4, PostIndex)); | 518 __ ldr(lr, MemOperand(r3, 4, PostIndex)); |
| 518 // r9: current element | 519 // lr: current element |
| 519 __ UntagAndJumpIfNotSmi(r9, r9, &convert_hole); | 520 __ UntagAndJumpIfNotSmi(lr, lr, &convert_hole); |
| 520 | 521 |
| 521 // Normal smi, convert to double and store. | 522 // Normal smi, convert to double and store. |
| 522 __ vmov(s0, r9); | 523 __ vmov(s0, lr); |
| 523 __ vcvt_f64_s32(d0, s0); | 524 __ vcvt_f64_s32(d0, s0); |
| 524 __ vstr(d0, r7, 0); | 525 __ vstr(d0, r9, 0); |
| 525 __ add(r7, r7, Operand(8)); | 526 __ add(r9, r9, Operand(8)); |
| 526 __ b(&entry); | 527 __ b(&entry); |
| 527 | 528 |
| 528 // Hole found, store the-hole NaN. | 529 // Hole found, store the-hole NaN. |
| 529 __ bind(&convert_hole); | 530 __ bind(&convert_hole); |
| 530 if (FLAG_debug_code) { | 531 if (FLAG_debug_code) { |
| 531 // Restore a "smi-untagged" heap object. | 532 // Restore a "smi-untagged" heap object. |
| 532 __ SmiTag(r9); | 533 __ SmiTag(lr); |
| 533 __ orr(r9, r9, Operand(1)); | 534 __ orr(lr, lr, Operand(1)); |
| 534 __ CompareRoot(r9, Heap::kTheHoleValueRootIndex); | 535 __ CompareRoot(lr, Heap::kTheHoleValueRootIndex); |
| 535 __ Assert(eq, kObjectFoundInSmiOnlyArray); | 536 __ Assert(eq, kObjectFoundInSmiOnlyArray); |
| 536 } | 537 } |
| 537 __ Strd(r4, r5, MemOperand(r7, 8, PostIndex)); | 538 __ Strd(r4, r5, MemOperand(r9, 8, PostIndex)); |
| 538 | 539 |
| 539 __ bind(&entry); | 540 __ bind(&entry); |
| 540 __ cmp(r7, r6); | 541 __ cmp(r9, r6); |
| 541 __ b(lt, &loop); | 542 __ b(lt, &loop); |
| 542 | 543 |
| 543 __ pop(lr); | 544 __ pop(lr); |
| 544 __ bind(&done); | 545 __ bind(&done); |
| 545 } | 546 } |
| 546 | 547 |
| 547 | 548 |
| 548 void ElementsTransitionGenerator::GenerateDoubleToObject( | 549 void ElementsTransitionGenerator::GenerateDoubleToObject( |
| 549 MacroAssembler* masm, AllocationSiteMode mode, Label* fail) { | 550 MacroAssembler* masm, AllocationSiteMode mode, Label* fail) { |
| 550 // ----------- S t a t e ------------- | 551 // ----------- S t a t e ------------- |
| (...skipping 19 matching lines...) Expand all Loading... |
| 570 | 571 |
| 571 __ push(lr); | 572 __ push(lr); |
| 572 __ Push(r3, r2, r1, r0); | 573 __ Push(r3, r2, r1, r0); |
| 573 __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); | 574 __ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset)); |
| 574 // r4: source FixedDoubleArray | 575 // r4: source FixedDoubleArray |
| 575 // r5: number of elements (smi-tagged) | 576 // r5: number of elements (smi-tagged) |
| 576 | 577 |
| 577 // Allocate new FixedArray. | 578 // Allocate new FixedArray. |
| 578 __ mov(r0, Operand(FixedDoubleArray::kHeaderSize)); | 579 __ mov(r0, Operand(FixedDoubleArray::kHeaderSize)); |
| 579 __ add(r0, r0, Operand(r5, LSL, 1)); | 580 __ add(r0, r0, Operand(r5, LSL, 1)); |
| 580 __ Allocate(r0, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS); | 581 __ Allocate(r0, r6, r3, r9, &gc_required, NO_ALLOCATION_FLAGS); |
| 581 // r6: destination FixedArray, not tagged as heap object | 582 // r6: destination FixedArray, not tagged as heap object |
| 582 // Set destination FixedDoubleArray's length and map. | 583 // Set destination FixedDoubleArray's length and map. |
| 583 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex); | 584 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex); |
| 584 __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); | 585 __ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset)); |
| 585 __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); | 586 __ str(r9, MemOperand(r6, HeapObject::kMapOffset)); |
| 586 | 587 |
| 587 // Prepare for conversion loop. | 588 // Prepare for conversion loop. |
| 588 __ add(r4, r4, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4)); | 589 __ add(r4, r4, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4)); |
| 589 __ add(r3, r6, Operand(FixedArray::kHeaderSize)); | 590 __ add(r3, r6, Operand(FixedArray::kHeaderSize)); |
| 590 __ add(r6, r6, Operand(kHeapObjectTag)); | 591 __ add(r6, r6, Operand(kHeapObjectTag)); |
| 591 __ add(r5, r3, Operand(r5, LSL, 1)); | 592 __ add(r5, r3, Operand(r5, LSL, 1)); |
| 592 __ LoadRoot(r7, Heap::kTheHoleValueRootIndex); | |
| 593 __ LoadRoot(r9, Heap::kHeapNumberMapRootIndex); | 593 __ LoadRoot(r9, Heap::kHeapNumberMapRootIndex); |
| 594 // Using offsetted addresses in r4 to fully take advantage of post-indexing. | 594 // Using offsetted addresses in r4 to fully take advantage of post-indexing. |
| 595 // r3: begin of destination FixedArray element fields, not tagged | 595 // r3: begin of destination FixedArray element fields, not tagged |
| 596 // r4: begin of source FixedDoubleArray element fields, not tagged, +4 | 596 // r4: begin of source FixedDoubleArray element fields, not tagged, +4 |
| 597 // r5: end of destination FixedArray, not tagged | 597 // r5: end of destination FixedArray, not tagged |
| 598 // r6: destination FixedArray | 598 // r6: destination FixedArray |
| 599 // r7: the-hole pointer | |
| 600 // r9: heap number map | 599 // r9: heap number map |
| 601 __ b(&entry); | 600 __ b(&entry); |
| 602 | 601 |
| 603 // Call into runtime if GC is required. | 602 // Call into runtime if GC is required. |
| 604 __ bind(&gc_required); | 603 __ bind(&gc_required); |
| 605 __ Pop(r3, r2, r1, r0); | 604 __ Pop(r3, r2, r1, r0); |
| 606 __ pop(lr); | 605 __ pop(lr); |
| 607 __ b(fail); | 606 __ b(fail); |
| 608 | 607 |
| 609 __ bind(&loop); | 608 __ bind(&loop); |
| 610 __ ldr(r1, MemOperand(r4, 8, PostIndex)); | 609 __ ldr(r1, MemOperand(r4, 8, PostIndex)); |
| 611 // lr: current element's upper 32 bit | 610 // r1: current element's upper 32 bit |
| 612 // r4: address of next element's upper 32 bit | 611 // r4: address of next element's upper 32 bit |
| 613 __ cmp(r1, Operand(kHoleNanUpper32)); | 612 __ cmp(r1, Operand(kHoleNanUpper32)); |
| 614 __ b(eq, &convert_hole); | 613 __ b(eq, &convert_hole); |
| 615 | 614 |
| 616 // Non-hole double, copy value into a heap number. | 615 // Non-hole double, copy value into a heap number. |
| 617 __ AllocateHeapNumber(r2, r0, lr, r9, &gc_required); | 616 __ AllocateHeapNumber(r2, r0, lr, r9, &gc_required); |
| 618 // r2: new heap number | 617 // r2: new heap number |
| 619 __ ldr(r0, MemOperand(r4, 12, NegOffset)); | 618 __ ldr(r0, MemOperand(r4, 12, NegOffset)); |
| 620 __ Strd(r0, r1, FieldMemOperand(r2, HeapNumber::kValueOffset)); | 619 __ Strd(r0, r1, FieldMemOperand(r2, HeapNumber::kValueOffset)); |
| 621 __ mov(r0, r3); | 620 __ mov(r0, r3); |
| 622 __ str(r2, MemOperand(r3, 4, PostIndex)); | 621 __ str(r2, MemOperand(r3, 4, PostIndex)); |
| 623 __ RecordWrite(r6, | 622 __ RecordWrite(r6, |
| 624 r0, | 623 r0, |
| 625 r2, | 624 r2, |
| 626 kLRHasBeenSaved, | 625 kLRHasBeenSaved, |
| 627 kDontSaveFPRegs, | 626 kDontSaveFPRegs, |
| 628 EMIT_REMEMBERED_SET, | 627 EMIT_REMEMBERED_SET, |
| 629 OMIT_SMI_CHECK); | 628 OMIT_SMI_CHECK); |
| 630 __ b(&entry); | 629 __ b(&entry); |
| 631 | 630 |
| 632 // Replace the-hole NaN with the-hole pointer. | 631 // Replace the-hole NaN with the-hole pointer. |
| 633 __ bind(&convert_hole); | 632 __ bind(&convert_hole); |
| 634 __ str(r7, MemOperand(r3, 4, PostIndex)); | 633 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); |
| 634 __ str(r0, MemOperand(r3, 4, PostIndex)); |
| 635 | 635 |
| 636 __ bind(&entry); | 636 __ bind(&entry); |
| 637 __ cmp(r3, r5); | 637 __ cmp(r3, r5); |
| 638 __ b(lt, &loop); | 638 __ b(lt, &loop); |
| 639 | 639 |
| 640 __ Pop(r3, r2, r1, r0); | 640 __ Pop(r3, r2, r1, r0); |
| 641 // Replace receiver's backing store with newly created and filled FixedArray. | 641 // Replace receiver's backing store with newly created and filled FixedArray. |
| 642 __ str(r6, FieldMemOperand(r2, JSObject::kElementsOffset)); | 642 __ str(r6, FieldMemOperand(r2, JSObject::kElementsOffset)); |
| 643 __ RecordWriteField(r2, | 643 __ RecordWriteField(r2, |
| 644 JSObject::kElementsOffset, | 644 JSObject::kElementsOffset, |
| (...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 768 ASSERT(!input.is(double_scratch1)); | 768 ASSERT(!input.is(double_scratch1)); |
| 769 ASSERT(!input.is(double_scratch2)); | 769 ASSERT(!input.is(double_scratch2)); |
| 770 ASSERT(!result.is(double_scratch1)); | 770 ASSERT(!result.is(double_scratch1)); |
| 771 ASSERT(!result.is(double_scratch2)); | 771 ASSERT(!result.is(double_scratch2)); |
| 772 ASSERT(!double_scratch1.is(double_scratch2)); | 772 ASSERT(!double_scratch1.is(double_scratch2)); |
| 773 ASSERT(!temp1.is(temp2)); | 773 ASSERT(!temp1.is(temp2)); |
| 774 ASSERT(!temp1.is(temp3)); | 774 ASSERT(!temp1.is(temp3)); |
| 775 ASSERT(!temp2.is(temp3)); | 775 ASSERT(!temp2.is(temp3)); |
| 776 ASSERT(ExternalReference::math_exp_constants(0).address() != NULL); | 776 ASSERT(ExternalReference::math_exp_constants(0).address() != NULL); |
| 777 | 777 |
| 778 Label done; | 778 Label zero, infinity, done; |
| 779 | 779 |
| 780 __ mov(temp3, Operand(ExternalReference::math_exp_constants(0))); | 780 __ mov(temp3, Operand(ExternalReference::math_exp_constants(0))); |
| 781 | 781 |
| 782 __ vldr(double_scratch1, ExpConstant(0, temp3)); | 782 __ vldr(double_scratch1, ExpConstant(0, temp3)); |
| 783 __ vmov(result, kDoubleRegZero); | |
| 784 __ VFPCompareAndSetFlags(double_scratch1, input); | 783 __ VFPCompareAndSetFlags(double_scratch1, input); |
| 785 __ b(ge, &done); | 784 __ b(ge, &zero); |
| 785 |
| 786 __ vldr(double_scratch2, ExpConstant(1, temp3)); | 786 __ vldr(double_scratch2, ExpConstant(1, temp3)); |
| 787 __ VFPCompareAndSetFlags(input, double_scratch2); | 787 __ VFPCompareAndSetFlags(input, double_scratch2); |
| 788 __ vldr(result, ExpConstant(2, temp3)); | 788 __ b(ge, &infinity); |
| 789 __ b(ge, &done); | 789 |
| 790 __ vldr(double_scratch1, ExpConstant(3, temp3)); | 790 __ vldr(double_scratch1, ExpConstant(3, temp3)); |
| 791 __ vldr(result, ExpConstant(4, temp3)); | 791 __ vldr(result, ExpConstant(4, temp3)); |
| 792 __ vmul(double_scratch1, double_scratch1, input); | 792 __ vmul(double_scratch1, double_scratch1, input); |
| 793 __ vadd(double_scratch1, double_scratch1, result); | 793 __ vadd(double_scratch1, double_scratch1, result); |
| 794 __ vmov(temp2, temp1, double_scratch1); | 794 __ VmovLow(temp2, double_scratch1); |
| 795 __ vsub(double_scratch1, double_scratch1, result); | 795 __ vsub(double_scratch1, double_scratch1, result); |
| 796 __ vldr(result, ExpConstant(6, temp3)); | 796 __ vldr(result, ExpConstant(6, temp3)); |
| 797 __ vldr(double_scratch2, ExpConstant(5, temp3)); | 797 __ vldr(double_scratch2, ExpConstant(5, temp3)); |
| 798 __ vmul(double_scratch1, double_scratch1, double_scratch2); | 798 __ vmul(double_scratch1, double_scratch1, double_scratch2); |
| 799 __ vsub(double_scratch1, double_scratch1, input); | 799 __ vsub(double_scratch1, double_scratch1, input); |
| 800 __ vsub(result, result, double_scratch1); | 800 __ vsub(result, result, double_scratch1); |
| 801 __ vmul(input, double_scratch1, double_scratch1); | 801 __ vmul(double_scratch2, double_scratch1, double_scratch1); |
| 802 __ vmul(result, result, input); | 802 __ vmul(result, result, double_scratch2); |
| 803 __ mov(temp1, Operand(temp2, LSR, 11)); | |
| 804 __ vldr(double_scratch2, ExpConstant(7, temp3)); | 803 __ vldr(double_scratch2, ExpConstant(7, temp3)); |
| 805 __ vmul(result, result, double_scratch2); | 804 __ vmul(result, result, double_scratch2); |
| 806 __ vsub(result, result, double_scratch1); | 805 __ vsub(result, result, double_scratch1); |
| 807 __ vldr(double_scratch2, ExpConstant(8, temp3)); | 806 // Mov 1 in double_scratch2 as math_exp_constants_array[8] == 1. |
| 807 ASSERT(*reinterpret_cast<double*> |
| 808 (ExternalReference::math_exp_constants(8).address()) == 1); |
| 809 __ vmov(double_scratch2, 1); |
| 808 __ vadd(result, result, double_scratch2); | 810 __ vadd(result, result, double_scratch2); |
| 809 __ movw(ip, 0x7ff); | 811 __ mov(temp1, Operand(temp2, LSR, 11)); |
| 810 __ and_(temp2, temp2, Operand(ip)); | 812 __ Ubfx(temp2, temp2, 0, 11); |
| 811 __ add(temp1, temp1, Operand(0x3ff)); | 813 __ add(temp1, temp1, Operand(0x3ff)); |
| 812 __ mov(temp1, Operand(temp1, LSL, 20)); | |
| 813 | 814 |
| 814 // Must not call ExpConstant() after overwriting temp3! | 815 // Must not call ExpConstant() after overwriting temp3! |
| 815 __ mov(temp3, Operand(ExternalReference::math_exp_log_table())); | 816 __ mov(temp3, Operand(ExternalReference::math_exp_log_table())); |
| 816 __ ldr(ip, MemOperand(temp3, temp2, LSL, 3)); | 817 __ add(temp3, temp3, Operand(temp2, LSL, 3)); |
| 817 __ add(temp3, temp3, Operand(kPointerSize)); | 818 __ ldm(ia, temp3, temp2.bit() | temp3.bit()); |
| 818 __ ldr(temp2, MemOperand(temp3, temp2, LSL, 3)); | 819 // The first word is loaded is the lower number register. |
| 819 __ orr(temp1, temp1, temp2); | 820 if (temp2.code() < temp3.code()) { |
| 820 __ vmov(input, ip, temp1); | 821 __ orr(temp1, temp3, Operand(temp1, LSL, 20)); |
| 821 __ vmul(result, result, input); | 822 __ vmov(double_scratch1, temp2, temp1); |
| 823 } else { |
| 824 __ orr(temp1, temp2, Operand(temp1, LSL, 20)); |
| 825 __ vmov(double_scratch1, temp3, temp1); |
| 826 } |
| 827 __ vmul(result, result, double_scratch1); |
| 828 __ b(&done); |
| 829 |
| 830 __ bind(&zero); |
| 831 __ vmov(result, kDoubleRegZero); |
| 832 __ b(&done); |
| 833 |
| 834 __ bind(&infinity); |
| 835 __ vldr(result, ExpConstant(2, temp3)); |
| 836 |
| 822 __ bind(&done); | 837 __ bind(&done); |
| 823 } | 838 } |
| 824 | 839 |
| 825 #undef __ | 840 #undef __ |
| 826 | 841 |
| 827 // add(r0, pc, Operand(-8)) | 842 // add(r0, pc, Operand(-8)) |
| 828 static const uint32_t kCodeAgePatchFirstInstruction = 0xe24f0008; | 843 static const uint32_t kCodeAgePatchFirstInstruction = 0xe24f0008; |
| 829 | 844 |
| 830 static byte* GetNoCodeAgeSequence(uint32_t* length) { | 845 static byte* GetNoCodeAgeSequence(uint32_t* length) { |
| 831 // The sequence of instructions that is patched out for aging code is the | 846 // The sequence of instructions that is patched out for aging code is the |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 885 patcher.masm()->add(r0, pc, Operand(-8)); | 900 patcher.masm()->add(r0, pc, Operand(-8)); |
| 886 patcher.masm()->ldr(pc, MemOperand(pc, -4)); | 901 patcher.masm()->ldr(pc, MemOperand(pc, -4)); |
| 887 patcher.masm()->dd(reinterpret_cast<uint32_t>(stub->instruction_start())); | 902 patcher.masm()->dd(reinterpret_cast<uint32_t>(stub->instruction_start())); |
| 888 } | 903 } |
| 889 } | 904 } |
| 890 | 905 |
| 891 | 906 |
| 892 } } // namespace v8::internal | 907 } } // namespace v8::internal |
| 893 | 908 |
| 894 #endif // V8_TARGET_ARCH_ARM | 909 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |