Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1067)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 7778013: NewGC: Merge bleeding edge up to 9009. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/deoptimizer-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 531 matching lines...) Expand 10 before | Expand all | Expand 10 after
542 if (destination == kCoreRegisters) { 542 if (destination == kCoreRegisters) {
543 __ vmov(dst1, dst2, double_dst); 543 __ vmov(dst1, dst2, double_dst);
544 } 544 }
545 } else { 545 } else {
546 Label fewer_than_20_useful_bits; 546 Label fewer_than_20_useful_bits;
547 // Expected output: 547 // Expected output:
548 // | dst2 | dst1 | 548 // | dst2 | dst1 |
549 // | s | exp | mantissa | 549 // | s | exp | mantissa |
550 550
551 // Check for zero. 551 // Check for zero.
552 __ cmp(int_scratch, Operand(0)); 552 __ cmp(int_scratch, Operand::Zero());
553 __ mov(dst2, int_scratch); 553 __ mov(dst2, int_scratch);
554 __ mov(dst1, int_scratch); 554 __ mov(dst1, int_scratch);
555 __ b(eq, &done); 555 __ b(eq, &done);
556 556
557 // Preload the sign of the value. 557 // Preload the sign of the value.
558 __ and_(dst2, int_scratch, Operand(HeapNumber::kSignMask), SetCC); 558 __ and_(dst2, int_scratch, Operand(HeapNumber::kSignMask), SetCC);
559 // Get the absolute value of the object (as an unsigned integer). 559 // Get the absolute value of the object (as an unsigned integer).
560 __ rsb(int_scratch, int_scratch, Operand(0), SetCC, mi); 560 __ rsb(int_scratch, int_scratch, Operand::Zero(), SetCC, mi);
561 561
562 // Get mantisssa[51:20]. 562 // Get mantisssa[51:20].
563 563
564 // Get the position of the first set bit. 564 // Get the position of the first set bit.
565 __ CountLeadingZeros(dst1, int_scratch, scratch2); 565 __ CountLeadingZeros(dst1, int_scratch, scratch2);
566 __ rsb(dst1, dst1, Operand(31)); 566 __ rsb(dst1, dst1, Operand(31));
567 567
568 // Set the exponent. 568 // Set the exponent.
569 __ add(scratch2, dst1, Operand(HeapNumber::kExponentBias)); 569 __ add(scratch2, dst1, Operand(HeapNumber::kExponentBias));
570 __ Bfi(dst2, scratch2, scratch2, 570 __ Bfi(dst2, scratch2, scratch2,
(...skipping 11 matching lines...) Expand all
582 __ orr(dst2, dst2, Operand(int_scratch, LSR, scratch2)); 582 __ orr(dst2, dst2, Operand(int_scratch, LSR, scratch2));
583 __ rsb(scratch2, scratch2, Operand(32)); 583 __ rsb(scratch2, scratch2, Operand(32));
584 __ mov(dst1, Operand(int_scratch, LSL, scratch2)); 584 __ mov(dst1, Operand(int_scratch, LSL, scratch2));
585 __ b(&done); 585 __ b(&done);
586 586
587 __ bind(&fewer_than_20_useful_bits); 587 __ bind(&fewer_than_20_useful_bits);
588 __ rsb(scratch2, dst1, Operand(HeapNumber::kMantissaBitsInTopWord)); 588 __ rsb(scratch2, dst1, Operand(HeapNumber::kMantissaBitsInTopWord));
589 __ mov(scratch2, Operand(int_scratch, LSL, scratch2)); 589 __ mov(scratch2, Operand(int_scratch, LSL, scratch2));
590 __ orr(dst2, dst2, scratch2); 590 __ orr(dst2, dst2, scratch2);
591 // Set dst1 to 0. 591 // Set dst1 to 0.
592 __ mov(dst1, Operand(0)); 592 __ mov(dst1, Operand::Zero());
593 } 593 }
594 __ bind(&done); 594 __ bind(&done);
595 } 595 }
596 596
597 597
598 void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm, 598 void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
599 Register object, 599 Register object,
600 Destination destination, 600 Destination destination,
601 DwVfpRegister double_dst, 601 DwVfpRegister double_dst,
602 Register dst1, 602 Register dst1,
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
650 } 650 }
651 651
652 } else { 652 } else {
653 ASSERT(!scratch1.is(object) && !scratch2.is(object)); 653 ASSERT(!scratch1.is(object) && !scratch2.is(object));
654 // Load the double value in the destination registers.. 654 // Load the double value in the destination registers..
655 __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset)); 655 __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset));
656 656
657 // Check for 0 and -0. 657 // Check for 0 and -0.
658 __ bic(scratch1, dst1, Operand(HeapNumber::kSignMask)); 658 __ bic(scratch1, dst1, Operand(HeapNumber::kSignMask));
659 __ orr(scratch1, scratch1, Operand(dst2)); 659 __ orr(scratch1, scratch1, Operand(dst2));
660 __ cmp(scratch1, Operand(0)); 660 __ cmp(scratch1, Operand::Zero());
661 __ b(eq, &done); 661 __ b(eq, &done);
662 662
663 // Check that the value can be exactly represented by a 32-bit integer. 663 // Check that the value can be exactly represented by a 32-bit integer.
664 // Jump to not_int32 if that's not the case. 664 // Jump to not_int32 if that's not the case.
665 DoubleIs32BitInteger(masm, dst1, dst2, scratch1, scratch2, not_int32); 665 DoubleIs32BitInteger(masm, dst1, dst2, scratch1, scratch2, not_int32);
666 666
667 // dst1 and dst2 were trashed. Reload the double value. 667 // dst1 and dst2 were trashed. Reload the double value.
668 __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset)); 668 __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset));
669 } 669 }
670 670
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
723 __ vmov(dst, single_scratch); 723 __ vmov(dst, single_scratch);
724 724
725 } else { 725 } else {
726 // Load the double value in the destination registers. 726 // Load the double value in the destination registers.
727 __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset)); 727 __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
728 __ ldr(scratch2, FieldMemOperand(object, HeapNumber::kMantissaOffset)); 728 __ ldr(scratch2, FieldMemOperand(object, HeapNumber::kMantissaOffset));
729 729
730 // Check for 0 and -0. 730 // Check for 0 and -0.
731 __ bic(dst, scratch1, Operand(HeapNumber::kSignMask)); 731 __ bic(dst, scratch1, Operand(HeapNumber::kSignMask));
732 __ orr(dst, scratch2, Operand(dst)); 732 __ orr(dst, scratch2, Operand(dst));
733 __ cmp(dst, Operand(0)); 733 __ cmp(dst, Operand::Zero());
734 __ b(eq, &done); 734 __ b(eq, &done);
735 735
736 DoubleIs32BitInteger(masm, scratch1, scratch2, dst, scratch3, not_int32); 736 DoubleIs32BitInteger(masm, scratch1, scratch2, dst, scratch3, not_int32);
737 737
738 // Registers state after DoubleIs32BitInteger. 738 // Registers state after DoubleIs32BitInteger.
739 // dst: mantissa[51:20]. 739 // dst: mantissa[51:20].
740 // scratch2: 1 740 // scratch2: 1
741 741
742 // Shift back the higher bits of the mantissa. 742 // Shift back the higher bits of the mantissa.
743 __ mov(dst, Operand(dst, LSR, scratch3)); 743 __ mov(dst, Operand(dst, LSR, scratch3));
744 // Set the implicit first bit. 744 // Set the implicit first bit.
745 __ rsb(scratch3, scratch3, Operand(32)); 745 __ rsb(scratch3, scratch3, Operand(32));
746 __ orr(dst, dst, Operand(scratch2, LSL, scratch3)); 746 __ orr(dst, dst, Operand(scratch2, LSL, scratch3));
747 // Set the sign. 747 // Set the sign.
748 __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset)); 748 __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
749 __ tst(scratch1, Operand(HeapNumber::kSignMask)); 749 __ tst(scratch1, Operand(HeapNumber::kSignMask));
750 __ rsb(dst, dst, Operand(0), LeaveCC, mi); 750 __ rsb(dst, dst, Operand::Zero(), LeaveCC, mi);
751 } 751 }
752 752
753 __ bind(&done); 753 __ bind(&done);
754 } 754 }
755 755
756 756
757 void FloatingPointHelper::DoubleIs32BitInteger(MacroAssembler* masm, 757 void FloatingPointHelper::DoubleIs32BitInteger(MacroAssembler* masm,
758 Register src1, 758 Register src1,
759 Register src2, 759 Register src2,
760 Register dst, 760 Register dst,
(...skipping 845 matching lines...) Expand 10 before | Expand all | Expand 10 after
1606 // The stub expects its argument in the tos_ register and returns its result in 1606 // The stub expects its argument in the tos_ register and returns its result in
1607 // it, too: zero for false, and a non-zero value for true. 1607 // it, too: zero for false, and a non-zero value for true.
1608 void ToBooleanStub::Generate(MacroAssembler* masm) { 1608 void ToBooleanStub::Generate(MacroAssembler* masm) {
1609 // This stub uses VFP3 instructions. 1609 // This stub uses VFP3 instructions.
1610 CpuFeatures::Scope scope(VFP3); 1610 CpuFeatures::Scope scope(VFP3);
1611 1611
1612 Label patch; 1612 Label patch;
1613 const Register map = r9.is(tos_) ? r7 : r9; 1613 const Register map = r9.is(tos_) ? r7 : r9;
1614 1614
1615 // undefined -> false. 1615 // undefined -> false.
1616 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch); 1616 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
1617 1617
1618 // Boolean -> its value. 1618 // Boolean -> its value.
1619 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch); 1619 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false);
1620 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch); 1620 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true);
1621 1621
1622 // 'null' -> false. 1622 // 'null' -> false.
1623 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch); 1623 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false);
1624 1624
1625 if (types_.Contains(SMI)) { 1625 if (types_.Contains(SMI)) {
1626 // Smis: 0 -> false, all other -> true 1626 // Smis: 0 -> false, all other -> true
1627 __ tst(tos_, Operand(kSmiTagMask)); 1627 __ tst(tos_, Operand(kSmiTagMask));
1628 // tos_ contains the correct return value already 1628 // tos_ contains the correct return value already
1629 __ Ret(eq); 1629 __ Ret(eq);
1630 } else if (types_.NeedsMap()) { 1630 } else if (types_.NeedsMap()) {
1631 // If we need a map later and have a Smi -> patch. 1631 // If we need a map later and have a Smi -> patch.
1632 __ JumpIfSmi(tos_, &patch); 1632 __ JumpIfSmi(tos_, &patch);
1633 } 1633 }
1634 1634
1635 if (types_.NeedsMap()) { 1635 if (types_.NeedsMap()) {
1636 __ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset)); 1636 __ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset));
1637 1637
1638 // Everything with a map could be undetectable, so check this now. 1638 if (types_.CanBeUndetectable()) {
1639 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset)); 1639 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
1640 __ tst(ip, Operand(1 << Map::kIsUndetectable)); 1640 __ tst(ip, Operand(1 << Map::kIsUndetectable));
1641 // Undetectable -> false. 1641 // Undetectable -> false.
1642 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne); 1642 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne);
1643 __ Ret(ne); 1643 __ Ret(ne);
1644 }
1644 } 1645 }
1645 1646
1646 if (types_.Contains(SPEC_OBJECT)) { 1647 if (types_.Contains(SPEC_OBJECT)) {
1647 // Spec object -> true. 1648 // Spec object -> true.
1648 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE); 1649 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
1649 // tos_ contains the correct non-zero return value already. 1650 // tos_ contains the correct non-zero return value already.
1650 __ Ret(ge); 1651 __ Ret(ge);
1651 } else if (types_.Contains(INTERNAL_OBJECT)) {
1652 // We've seen a spec object for the first time -> patch.
1653 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
1654 __ b(ge, &patch);
1655 } 1652 }
1656 1653
1657 if (types_.Contains(STRING)) { 1654 if (types_.Contains(STRING)) {
1658 // String value -> false iff empty. 1655 // String value -> false iff empty.
1659 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); 1656 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
1660 __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset), lt); 1657 __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset), lt);
1661 __ Ret(lt); // the string length is OK as the return value 1658 __ Ret(lt); // the string length is OK as the return value
1662 } else if (types_.Contains(INTERNAL_OBJECT)) {
1663 // We've seen a string for the first time -> patch
1664 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
1665 __ b(lt, &patch);
1666 } 1659 }
1667 1660
1668 if (types_.Contains(HEAP_NUMBER)) { 1661 if (types_.Contains(HEAP_NUMBER)) {
1669 // Heap number -> false iff +0, -0, or NaN. 1662 // Heap number -> false iff +0, -0, or NaN.
1670 Label not_heap_number; 1663 Label not_heap_number;
1671 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); 1664 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1672 __ b(ne, &not_heap_number); 1665 __ b(ne, &not_heap_number);
1673 __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset)); 1666 __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset));
1674 __ VFPCompareAndSetFlags(d1, 0.0); 1667 __ VFPCompareAndSetFlags(d1, 0.0);
1675 // "tos_" is a register, and contains a non zero value by default. 1668 // "tos_" is a register, and contains a non zero value by default.
1676 // Hence we only need to overwrite "tos_" with zero to return false for 1669 // Hence we only need to overwrite "tos_" with zero to return false for
1677 // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. 1670 // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
1678 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO 1671 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO
1679 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN 1672 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN
1680 __ Ret(); 1673 __ Ret();
1681 __ bind(&not_heap_number); 1674 __ bind(&not_heap_number);
1682 } else if (types_.Contains(INTERNAL_OBJECT)) {
1683 // We've seen a heap number for the first time -> patch
1684 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1685 __ b(eq, &patch);
1686 } 1675 }
1687 1676
1688 if (types_.Contains(INTERNAL_OBJECT)) { 1677 __ bind(&patch);
1689 // Internal objects -> true. 1678 GenerateTypeTransition(masm);
1690 __ mov(tos_, Operand(1, RelocInfo::NONE));
1691 __ Ret();
1692 }
1693
1694 if (!types_.IsAll()) {
1695 __ bind(&patch);
1696 GenerateTypeTransition(masm);
1697 }
1698 } 1679 }
1699 1680
1700 1681
1701 void ToBooleanStub::CheckOddball(MacroAssembler* masm, 1682 void ToBooleanStub::CheckOddball(MacroAssembler* masm,
1702 Type type, 1683 Type type,
1703 Heap::RootListIndex value, 1684 Heap::RootListIndex value,
1704 bool result, 1685 bool result) {
1705 Label* patch) {
1706 if (types_.Contains(type)) { 1686 if (types_.Contains(type)) {
1707 // If we see an expected oddball, return its ToBoolean value tos_. 1687 // If we see an expected oddball, return its ToBoolean value tos_.
1708 __ LoadRoot(ip, value); 1688 __ LoadRoot(ip, value);
1709 __ cmp(tos_, ip); 1689 __ cmp(tos_, ip);
1710 // The value of a root is never NULL, so we can avoid loading a non-null 1690 // The value of a root is never NULL, so we can avoid loading a non-null
1711 // value into tos_ when we want to return 'true'. 1691 // value into tos_ when we want to return 'true'.
1712 if (!result) { 1692 if (!result) {
1713 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); 1693 __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq);
1714 } 1694 }
1715 __ Ret(eq); 1695 __ Ret(eq);
1716 } else if (types_.Contains(INTERNAL_OBJECT)) {
1717 // If we see an unexpected oddball and handle internal objects, we must
1718 // patch because the code for internal objects doesn't handle it explictly.
1719 __ LoadRoot(ip, value);
1720 __ cmp(tos_, ip);
1721 __ b(eq, patch);
1722 } 1696 }
1723 } 1697 }
1724 1698
1725 1699
1726 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { 1700 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
1727 if (!tos_.is(r3)) { 1701 if (!tos_.is(r3)) {
1728 __ mov(r3, Operand(tos_)); 1702 __ mov(r3, Operand(tos_));
1729 } 1703 }
1730 __ mov(r2, Operand(Smi::FromInt(tos_.code()))); 1704 __ mov(r2, Operand(Smi::FromInt(tos_.code())));
1731 __ mov(r1, Operand(Smi::FromInt(types_.ToByte()))); 1705 __ mov(r1, Operand(Smi::FromInt(types_.ToByte())));
(...skipping 744 matching lines...) Expand 10 before | Expand all | Expand 10 after
2476 void BinaryOpStub::GenerateSmiCode( 2450 void BinaryOpStub::GenerateSmiCode(
2477 MacroAssembler* masm, 2451 MacroAssembler* masm,
2478 Label* use_runtime, 2452 Label* use_runtime,
2479 Label* gc_required, 2453 Label* gc_required,
2480 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { 2454 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
2481 Label not_smis; 2455 Label not_smis;
2482 2456
2483 Register left = r1; 2457 Register left = r1;
2484 Register right = r0; 2458 Register right = r0;
2485 Register scratch1 = r7; 2459 Register scratch1 = r7;
2486 Register scratch2 = r9;
2487 2460
2488 // Perform combined smi check on both operands. 2461 // Perform combined smi check on both operands.
2489 __ orr(scratch1, left, Operand(right)); 2462 __ orr(scratch1, left, Operand(right));
2490 STATIC_ASSERT(kSmiTag == 0); 2463 STATIC_ASSERT(kSmiTag == 0);
2491 __ JumpIfNotSmi(scratch1, &not_smis); 2464 __ JumpIfNotSmi(scratch1, &not_smis);
2492 2465
2493 // If the smi-smi operation results in a smi return is generated. 2466 // If the smi-smi operation results in a smi return is generated.
2494 GenerateSmiSmiOperation(masm); 2467 GenerateSmiSmiOperation(masm);
2495 2468
2496 // If heap number results are possible generate the result in an allocated 2469 // If heap number results are possible generate the result in an allocated
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
2670 __ b(ne, &transition); 2643 __ b(ne, &transition);
2671 } 2644 }
2672 2645
2673 // Check if the result fits in a smi. 2646 // Check if the result fits in a smi.
2674 __ vmov(scratch1, single_scratch); 2647 __ vmov(scratch1, single_scratch);
2675 __ add(scratch2, scratch1, Operand(0x40000000), SetCC); 2648 __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
2676 // If not try to return a heap number. 2649 // If not try to return a heap number.
2677 __ b(mi, &return_heap_number); 2650 __ b(mi, &return_heap_number);
2678 // Check for minus zero. Return heap number for minus zero. 2651 // Check for minus zero. Return heap number for minus zero.
2679 Label not_zero; 2652 Label not_zero;
2680 __ cmp(scratch1, Operand(0)); 2653 __ cmp(scratch1, Operand::Zero());
2681 __ b(ne, &not_zero); 2654 __ b(ne, &not_zero);
2682 __ vmov(scratch2, d5.high()); 2655 __ vmov(scratch2, d5.high());
2683 __ tst(scratch2, Operand(HeapNumber::kSignMask)); 2656 __ tst(scratch2, Operand(HeapNumber::kSignMask));
2684 __ b(ne, &return_heap_number); 2657 __ b(ne, &return_heap_number);
2685 __ bind(&not_zero); 2658 __ bind(&not_zero);
2686 2659
2687 // Tag the result and return. 2660 // Tag the result and return.
2688 __ SmiTag(r0, scratch1); 2661 __ SmiTag(r0, scratch1);
2689 __ Ret(); 2662 __ Ret();
2690 } else { 2663 } else {
(...skipping 471 matching lines...) Expand 10 before | Expand all | Expand 10 after
3162 __ bind(&invalid_cache); 3135 __ bind(&invalid_cache);
3163 ExternalReference runtime_function = 3136 ExternalReference runtime_function =
3164 ExternalReference(RuntimeFunction(), masm->isolate()); 3137 ExternalReference(RuntimeFunction(), masm->isolate());
3165 __ TailCallExternalReference(runtime_function, 1, 1); 3138 __ TailCallExternalReference(runtime_function, 1, 1);
3166 } else { 3139 } else {
3167 if (!CpuFeatures::IsSupported(VFP3)) UNREACHABLE(); 3140 if (!CpuFeatures::IsSupported(VFP3)) UNREACHABLE();
3168 CpuFeatures::Scope scope(VFP3); 3141 CpuFeatures::Scope scope(VFP3);
3169 3142
3170 Label no_update; 3143 Label no_update;
3171 Label skip_cache; 3144 Label skip_cache;
3172 const Register heap_number_map = r5;
3173 3145
3174 // Call C function to calculate the result and update the cache. 3146 // Call C function to calculate the result and update the cache.
3175 // Register r0 holds precalculated cache entry address; preserve 3147 // Register r0 holds precalculated cache entry address; preserve
3176 // it on the stack and pop it into register cache_entry after the 3148 // it on the stack and pop it into register cache_entry after the
3177 // call. 3149 // call.
3178 __ push(cache_entry); 3150 __ push(cache_entry);
3179 GenerateCallCFunction(masm, scratch0); 3151 GenerateCallCFunction(masm, scratch0);
3180 __ GetCFunctionDoubleResult(d2); 3152 __ GetCFunctionDoubleResult(d2);
3181 3153
3182 // Try to update the cache. If we cannot allocate a 3154 // Try to update the cache. If we cannot allocate a
(...skipping 450 matching lines...) Expand 10 before | Expand all | Expand 10 after
3633 __ Push(r8, r7, r6, r5); 3605 __ Push(r8, r7, r6, r5);
3634 3606
3635 // Setup frame pointer for the frame to be pushed. 3607 // Setup frame pointer for the frame to be pushed.
3636 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); 3608 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
3637 3609
3638 // If this is the outermost JS call, set js_entry_sp value. 3610 // If this is the outermost JS call, set js_entry_sp value.
3639 Label non_outermost_js; 3611 Label non_outermost_js;
3640 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate); 3612 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
3641 __ mov(r5, Operand(ExternalReference(js_entry_sp))); 3613 __ mov(r5, Operand(ExternalReference(js_entry_sp)));
3642 __ ldr(r6, MemOperand(r5)); 3614 __ ldr(r6, MemOperand(r5));
3643 __ cmp(r6, Operand(0)); 3615 __ cmp(r6, Operand::Zero());
3644 __ b(ne, &non_outermost_js); 3616 __ b(ne, &non_outermost_js);
3645 __ str(fp, MemOperand(r5)); 3617 __ str(fp, MemOperand(r5));
3646 __ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); 3618 __ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
3647 Label cont; 3619 Label cont;
3648 __ b(&cont); 3620 __ b(&cont);
3649 __ bind(&non_outermost_js); 3621 __ bind(&non_outermost_js);
3650 __ mov(ip, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); 3622 __ mov(ip, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
3651 __ bind(&cont); 3623 __ bind(&cont);
3652 __ push(ip); 3624 __ push(ip);
3653 3625
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
3708 3680
3709 // Unlink this frame from the handler chain. 3681 // Unlink this frame from the handler chain.
3710 __ PopTryHandler(); 3682 __ PopTryHandler();
3711 3683
3712 __ bind(&exit); // r0 holds result 3684 __ bind(&exit); // r0 holds result
3713 // Check if the current stack frame is marked as the outermost JS frame. 3685 // Check if the current stack frame is marked as the outermost JS frame.
3714 Label non_outermost_js_2; 3686 Label non_outermost_js_2;
3715 __ pop(r5); 3687 __ pop(r5);
3716 __ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); 3688 __ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
3717 __ b(ne, &non_outermost_js_2); 3689 __ b(ne, &non_outermost_js_2);
3718 __ mov(r6, Operand(0)); 3690 __ mov(r6, Operand::Zero());
3719 __ mov(r5, Operand(ExternalReference(js_entry_sp))); 3691 __ mov(r5, Operand(ExternalReference(js_entry_sp)));
3720 __ str(r6, MemOperand(r5)); 3692 __ str(r6, MemOperand(r5));
3721 __ bind(&non_outermost_js_2); 3693 __ bind(&non_outermost_js_2);
3722 3694
3723 // Restore the top frame descriptors from the stack. 3695 // Restore the top frame descriptors from the stack.
3724 __ pop(r3); 3696 __ pop(r3);
3725 __ mov(ip, 3697 __ mov(ip,
3726 Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate))); 3698 Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate)));
3727 __ str(r3, MemOperand(ip)); 3699 __ str(r3, MemOperand(ip));
3728 3700
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after
3909 if (!ReturnTrueFalseObject()) { 3881 if (!ReturnTrueFalseObject()) {
3910 if (HasArgsInRegisters()) { 3882 if (HasArgsInRegisters()) {
3911 __ Push(r0, r1); 3883 __ Push(r0, r1);
3912 } 3884 }
3913 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 3885 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3914 } else { 3886 } else {
3915 __ EnterInternalFrame(); 3887 __ EnterInternalFrame();
3916 __ Push(r0, r1); 3888 __ Push(r0, r1);
3917 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); 3889 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
3918 __ LeaveInternalFrame(); 3890 __ LeaveInternalFrame();
3919 __ cmp(r0, Operand(0)); 3891 __ cmp(r0, Operand::Zero());
3920 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); 3892 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
3921 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); 3893 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
3922 __ Ret(HasArgsInRegisters() ? 0 : 2); 3894 __ Ret(HasArgsInRegisters() ? 0 : 2);
3923 } 3895 }
3924 } 3896 }
3925 3897
3926 3898
3927 Register InstanceofStub::left() { return r0; } 3899 Register InstanceofStub::left() { return r0; }
3928 3900
3929 3901
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after
4043 __ mov(r1, Operand(r2), LeaveCC, gt); 4015 __ mov(r1, Operand(r2), LeaveCC, gt);
4044 4016
4045 __ bind(&try_allocate); 4017 __ bind(&try_allocate);
4046 4018
4047 // Compute the sizes of backing store, parameter map, and arguments object. 4019 // Compute the sizes of backing store, parameter map, and arguments object.
4048 // 1. Parameter map, has 2 extra words containing context and backing store. 4020 // 1. Parameter map, has 2 extra words containing context and backing store.
4049 const int kParameterMapHeaderSize = 4021 const int kParameterMapHeaderSize =
4050 FixedArray::kHeaderSize + 2 * kPointerSize; 4022 FixedArray::kHeaderSize + 2 * kPointerSize;
4051 // If there are no mapped parameters, we do not need the parameter_map. 4023 // If there are no mapped parameters, we do not need the parameter_map.
4052 __ cmp(r1, Operand(Smi::FromInt(0))); 4024 __ cmp(r1, Operand(Smi::FromInt(0)));
4053 __ mov(r9, Operand(0), LeaveCC, eq); 4025 __ mov(r9, Operand::Zero(), LeaveCC, eq);
4054 __ mov(r9, Operand(r1, LSL, 1), LeaveCC, ne); 4026 __ mov(r9, Operand(r1, LSL, 1), LeaveCC, ne);
4055 __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne); 4027 __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
4056 4028
4057 // 2. Backing store. 4029 // 2. Backing store.
4058 __ add(r9, r9, Operand(r2, LSL, 1)); 4030 __ add(r9, r9, Operand(r2, LSL, 1));
4059 __ add(r9, r9, Operand(FixedArray::kHeaderSize)); 4031 __ add(r9, r9, Operand(FixedArray::kHeaderSize));
4060 4032
4061 // 3. Arguments object. 4033 // 3. Arguments object.
4062 __ add(r9, r9, Operand(Heap::kArgumentsObjectSize)); 4034 __ add(r9, r9, Operand(Heap::kArgumentsObjectSize));
4063 4035
4064 // Do the allocation of all three objects in one go. 4036 // Do the allocation of all three objects in one go.
4065 __ AllocateInNewSpace(r9, r0, r3, r4, &runtime, TAG_OBJECT); 4037 __ AllocateInNewSpace(r9, r0, r3, r4, &runtime, TAG_OBJECT);
4066 4038
4067 // r0 = address of new object(s) (tagged) 4039 // r0 = address of new object(s) (tagged)
4068 // r2 = argument count (tagged) 4040 // r2 = argument count (tagged)
4069 // Get the arguments boilerplate from the current (global) context into r4. 4041 // Get the arguments boilerplate from the current (global) context into r4.
4070 const int kNormalOffset = 4042 const int kNormalOffset =
4071 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX); 4043 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
4072 const int kAliasedOffset = 4044 const int kAliasedOffset =
4073 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX); 4045 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX);
4074 4046
4075 __ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_INDEX))); 4047 __ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_INDEX)));
4076 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset)); 4048 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset));
4077 __ cmp(r1, Operand(0)); 4049 __ cmp(r1, Operand::Zero());
4078 __ ldr(r4, MemOperand(r4, kNormalOffset), eq); 4050 __ ldr(r4, MemOperand(r4, kNormalOffset), eq);
4079 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne); 4051 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
4080 4052
4081 // r0 = address of new object (tagged) 4053 // r0 = address of new object (tagged)
4082 // r1 = mapped parameter count (tagged) 4054 // r1 = mapped parameter count (tagged)
4083 // r2 = argument count (tagged) 4055 // r2 = argument count (tagged)
4084 // r4 = address of boilerplate object (tagged) 4056 // r4 = address of boilerplate object (tagged)
4085 // Copy the JS object part. 4057 // Copy the JS object part.
4086 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 4058 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
4087 __ ldr(r3, FieldMemOperand(r4, i)); 4059 __ ldr(r3, FieldMemOperand(r4, i));
(...skipping 1670 matching lines...) Expand 10 before | Expand all | Expand 10 after
5758 Register scratch2, 5730 Register scratch2,
5759 Label* chars_not_equal) { 5731 Label* chars_not_equal) {
5760 // Change index to run from -length to -1 by adding length to string 5732 // Change index to run from -length to -1 by adding length to string
5761 // start. This means that loop ends when index reaches zero, which 5733 // start. This means that loop ends when index reaches zero, which
5762 // doesn't need an additional compare. 5734 // doesn't need an additional compare.
5763 __ SmiUntag(length); 5735 __ SmiUntag(length);
5764 __ add(scratch1, length, 5736 __ add(scratch1, length,
5765 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 5737 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5766 __ add(left, left, Operand(scratch1)); 5738 __ add(left, left, Operand(scratch1));
5767 __ add(right, right, Operand(scratch1)); 5739 __ add(right, right, Operand(scratch1));
5768 __ rsb(length, length, Operand(0)); 5740 __ rsb(length, length, Operand::Zero());
5769 Register index = length; // index = -length; 5741 Register index = length; // index = -length;
5770 5742
5771 // Compare loop. 5743 // Compare loop.
5772 Label loop; 5744 Label loop;
5773 __ bind(&loop); 5745 __ bind(&loop);
5774 __ ldrb(scratch1, MemOperand(left, index)); 5746 __ ldrb(scratch1, MemOperand(left, index));
5775 __ ldrb(scratch2, MemOperand(right, index)); 5747 __ ldrb(scratch2, MemOperand(right, index));
5776 __ cmp(scratch1, scratch2); 5748 __ cmp(scratch1, scratch2);
5777 __ b(ne, chars_not_equal); 5749 __ b(ne, chars_not_equal);
5778 __ add(index, index, Operand(1), SetCC); 5750 __ add(index, index, Operand(1), SetCC);
(...skipping 597 matching lines...) Expand 10 before | Expand all | Expand 10 after
6376 } 6348 }
6377 6349
6378 6350
6379 void DirectCEntryStub::Generate(MacroAssembler* masm) { 6351 void DirectCEntryStub::Generate(MacroAssembler* masm) {
6380 __ ldr(pc, MemOperand(sp, 0)); 6352 __ ldr(pc, MemOperand(sp, 0));
6381 } 6353 }
6382 6354
6383 6355
6384 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, 6356 void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
6385 ExternalReference function) { 6357 ExternalReference function) {
6386 __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
6387 RelocInfo::CODE_TARGET));
6388 __ mov(r2, Operand(function)); 6358 __ mov(r2, Operand(function));
6389 // Push return address (accessible to GC through exit frame pc). 6359 GenerateCall(masm, r2);
6390 __ str(pc, MemOperand(sp, 0));
6391 __ Jump(r2); // Call the api function.
6392 } 6360 }
6393 6361
6394 6362
6395 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, 6363 void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
6396 Register target) { 6364 Register target) {
6397 __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()), 6365 __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
6398 RelocInfo::CODE_TARGET)); 6366 RelocInfo::CODE_TARGET));
6399 // Push return address (accessible to GC through exit frame pc). 6367 // Push return address (accessible to GC through exit frame pc).
6400 __ str(pc, MemOperand(sp, 0)); 6368 // Note that using pc with str is deprecated.
6369 Label start;
6370 __ bind(&start);
6371 __ add(ip, pc, Operand(Assembler::kInstrSize));
6372 __ str(ip, MemOperand(sp, 0));
6401 __ Jump(target); // Call the C++ function. 6373 __ Jump(target); // Call the C++ function.
6374 ASSERT_EQ(Assembler::kInstrSize + Assembler::kPcLoadDelta,
6375 masm->SizeOfCodeGeneratedSince(&start));
6402 } 6376 }
6403 6377
6404 6378
6405 MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( 6379 MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup(
6406 MacroAssembler* masm, 6380 MacroAssembler* masm,
6407 Label* miss, 6381 Label* miss,
6408 Label* done, 6382 Label* done,
6409 Register receiver, 6383 Register receiver,
6410 Register properties, 6384 Register properties,
6411 String* name, 6385 String* name,
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after
6614 __ tst(entry_key, Operand(kIsSymbolMask)); 6588 __ tst(entry_key, Operand(kIsSymbolMask));
6615 __ b(eq, &maybe_in_dictionary); 6589 __ b(eq, &maybe_in_dictionary);
6616 } 6590 }
6617 } 6591 }
6618 6592
6619 __ bind(&maybe_in_dictionary); 6593 __ bind(&maybe_in_dictionary);
6620 // If we are doing negative lookup then probing failure should be 6594 // If we are doing negative lookup then probing failure should be
6621 // treated as a lookup success. For positive lookup probing failure 6595 // treated as a lookup success. For positive lookup probing failure
6622 // should be treated as lookup failure. 6596 // should be treated as lookup failure.
6623 if (mode_ == POSITIVE_LOOKUP) { 6597 if (mode_ == POSITIVE_LOOKUP) {
6624 __ mov(result, Operand(0)); 6598 __ mov(result, Operand::Zero());
6625 __ Ret(); 6599 __ Ret();
6626 } 6600 }
6627 6601
6628 __ bind(&in_dictionary); 6602 __ bind(&in_dictionary);
6629 __ mov(result, Operand(1)); 6603 __ mov(result, Operand(1));
6630 __ Ret(); 6604 __ Ret();
6631 6605
6632 __ bind(&not_in_dictionary); 6606 __ bind(&not_in_dictionary);
6633 __ mov(result, Operand(0)); 6607 __ mov(result, Operand::Zero());
6634 __ Ret(); 6608 __ Ret();
6635 } 6609 }
6636 6610
6637 6611
6638 // Takes the input in 3 registers: address_ value_ and object_. A pointer to 6612 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
6639 // the value has just been written into the object, now this stub makes sure 6613 // the value has just been written into the object, now this stub makes sure
6640 // we keep the GC informed. The word in the object where the value has been 6614 // we keep the GC informed. The word in the object where the value has been
6641 // written is in the address register. 6615 // written is in the address register.
6642 void RecordWriteStub::Generate(MacroAssembler* masm) { 6616 void RecordWriteStub::Generate(MacroAssembler* masm) {
6643 Label skip_to_incremental_noncompacting; 6617 Label skip_to_incremental_noncompacting;
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
6811 6785
6812 // Fall through when we need to inform the incremental marker. 6786 // Fall through when we need to inform the incremental marker.
6813 } 6787 }
6814 6788
6815 6789
6816 #undef __ 6790 #undef __
6817 6791
6818 } } // namespace v8::internal 6792 } } // namespace v8::internal
6819 6793
6820 #endif // V8_TARGET_ARCH_ARM 6794 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/deoptimizer-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698