Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 117 void MacroAssembler::RecordWrite(Register object, | 117 void MacroAssembler::RecordWrite(Register object, |
| 118 int offset, | 118 int offset, |
| 119 Register value, | 119 Register value, |
| 120 Register index) { | 120 Register index) { |
| 121 // The compiled code assumes that record write doesn't change the | 121 // The compiled code assumes that record write doesn't change the |
| 122 // context register, so we check that none of the clobbered | 122 // context register, so we check that none of the clobbered |
| 123 // registers are rsi. | 123 // registers are rsi. |
| 124 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi)); | 124 ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi)); |
| 125 | 125 |
| 126 // First, check if a write barrier is even needed. The tests below | 126 // First, check if a write barrier is even needed. The tests below |
| 127 // catch stores of Smis and stores into young gen. | 127 // catch stores of smis and stores into young gen. |
|
Lasse Reichstein
2011/03/15 09:07:01
gen -> generation.
Erik Corry
2011/03/15 10:00:50
Done.
| |
| 128 Label done; | 128 Label done; |
| 129 JumpIfSmi(value, &done); | 129 JumpIfSmi(value, &done); |
| 130 | 130 |
| 131 RecordWriteNonSmi(object, offset, value, index); | 131 RecordWriteNonSmi(object, offset, value, index); |
| 132 bind(&done); | 132 bind(&done); |
| 133 | 133 |
| 134 // Clobber all input registers when running with the debug-code flag | 134 // Clobber all input registers when running with the debug-code flag |
| 135 // turned on to provoke errors. This clobbering repeats the | 135 // turned on to provoke errors. This clobbering repeats the |
| 136 // clobbering done inside RecordWriteNonSmi but it's necessary to | 136 // clobbering done inside RecordWriteNonSmi but it's necessary to |
| 137 // avoid having the fast case for smis leave the registers | 137 // avoid having the fast case for smis leave the registers |
| 138 // unchanged. | 138 // unchanged. |
| 139 if (FLAG_debug_code) { | 139 if (FLAG_debug_code) { |
| 140 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE); | 140 movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
| 141 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); | 141 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
| 142 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE); | 142 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
| 143 } | 143 } |
| 144 } | 144 } |
| 145 | 145 |
| 146 | 146 |
| 147 void MacroAssembler::RecordWrite(Register object, | 147 void MacroAssembler::RecordWrite(Register object, |
| 148 Register address, | 148 Register address, |
| 149 Register value) { | 149 Register value) { |
| 150 // The compiled code assumes that record write doesn't change the | 150 // The compiled code assumes that record write doesn't change the |
| 151 // context register, so we check that none of the clobbered | 151 // context register, so we check that none of the clobbered |
| 152 // registers are rsi. | 152 // registers are rsi. |
| 153 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi)); | 153 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi)); |
| 154 | 154 |
| 155 // First, check if a write barrier is even needed. The tests below | 155 // First, check if a write barrier is even needed. The tests below |
| 156 // catch stores of Smis and stores into young gen. | 156 // catch stores of smis and stores into young gen. |
|
Lasse Reichstein
2011/03/15 09:07:01
ditto.
Erik Corry
2011/03/15 10:00:50
Done.
| |
| 157 Label done; | 157 Label done; |
| 158 JumpIfSmi(value, &done); | 158 JumpIfSmi(value, &done); |
| 159 | 159 |
| 160 InNewSpace(object, value, equal, &done); | 160 InNewSpace(object, value, equal, &done); |
| 161 | 161 |
| 162 RecordWriteHelper(object, address, value); | 162 RecordWriteHelper(object, address, value); |
| 163 | 163 |
| 164 bind(&done); | 164 bind(&done); |
| 165 | 165 |
| 166 // Clobber all input registers when running with the debug-code flag | 166 // Clobber all input registers when running with the debug-code flag |
| (...skipping 663 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 830 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) { | 830 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) { |
| 831 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); | 831 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); |
| 832 } | 832 } |
| 833 | 833 |
| 834 | 834 |
| 835 void MacroAssembler::SmiTest(Register src) { | 835 void MacroAssembler::SmiTest(Register src) { |
| 836 testq(src, src); | 836 testq(src, src); |
| 837 } | 837 } |
| 838 | 838 |
| 839 | 839 |
| 840 void MacroAssembler::SmiCompare(Register dst, Register src) { | 840 void MacroAssembler::SmiCompare(Register smi1, Register smi2) { |
| 841 cmpq(dst, src); | 841 if (FLAG_debug_code) { |
| 842 AbortIfNotSmi(smi1); | |
| 843 AbortIfNotSmi(smi2); | |
|
Lasse Reichstein
2011/03/15 09:07:01
Good stuff!
Erik Corry
2011/03/15 10:00:50
yup
| |
| 844 } | |
| 845 cmpq(smi1, smi2); | |
| 842 } | 846 } |
| 843 | 847 |
| 844 | 848 |
| 845 void MacroAssembler::SmiCompare(Register dst, Smi* src) { | 849 void MacroAssembler::SmiCompare(Register dst, Smi* src) { |
| 850 if (FLAG_debug_code) { | |
| 851 AbortIfNotSmi(dst); | |
| 852 } | |
| 853 // Actually, knowing the register is a smi doesn't enable any optimizations | |
| 854 // with the current tagging scheme. | |
| 855 Cmp(dst, src); | |
|
Lasse Reichstein
2011/03/15 09:07:01
... apart from the optimizations that Cmp also doe
Erik Corry
2011/03/15 10:00:50
Comment deleted.
| |
| 856 } | |
| 857 | |
| 858 | |
| 859 void MacroAssembler::Cmp(Register dst, Smi* src) { | |
| 846 ASSERT(!dst.is(kScratchRegister)); | 860 ASSERT(!dst.is(kScratchRegister)); |
| 847 if (src->value() == 0) { | 861 if (src->value() == 0) { |
| 848 testq(dst, dst); | 862 testq(dst, dst); |
| 849 } else { | 863 } else { |
| 850 Register constant_reg = GetSmiConstant(src); | 864 Register constant_reg = GetSmiConstant(src); |
| 851 cmpq(dst, constant_reg); | 865 cmpq(dst, constant_reg); |
| 852 } | 866 } |
| 853 } | 867 } |
| 854 | 868 |
| 855 | 869 |
| 856 void MacroAssembler::SmiCompare(Register dst, const Operand& src) { | 870 void MacroAssembler::SmiCompare(Register dst, const Operand& src) { |
| 871 if (FLAG_debug_code) { | |
| 872 AbortIfNotSmi(dst); | |
| 873 AbortIfNotSmi(src); | |
| 874 } | |
| 857 cmpq(dst, src); | 875 cmpq(dst, src); |
| 858 } | 876 } |
| 859 | 877 |
| 860 | 878 |
| 861 void MacroAssembler::SmiCompare(const Operand& dst, Register src) { | 879 void MacroAssembler::SmiCompare(const Operand& dst, Register src) { |
| 880 if (FLAG_debug_code) { | |
| 881 AbortIfNotSmi(dst); | |
| 882 AbortIfNotSmi(src); | |
| 883 } | |
| 862 cmpq(dst, src); | 884 cmpq(dst, src); |
| 863 } | 885 } |
| 864 | 886 |
| 865 | 887 |
| 866 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) { | 888 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) { |
| 889 if (FLAG_debug_code) { | |
| 890 AbortIfNotSmi(dst); | |
| 891 } | |
| 867 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value())); | 892 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value())); |
| 868 } | 893 } |
| 869 | 894 |
| 870 | 895 |
| 896 void MacroAssembler::Cmp(const Operand& dst, Smi* src) { | |
| 897 // The Operand cannot use the smi register, since we may use the scratch | |
|
Lasse Reichstein
2011/03/15 09:07:01
Notice that smi_reg is either kScratchRegister or
Erik Corry
2011/03/15 10:00:50
Comment updated.
| |
| 898 // register to get around the lack of 64 bit immediates in the instruction | |
| 899 // set. | |
| 900 Register smi_reg = GetSmiConstant(src); | |
| 901 ASSERT(!dst.AddressUsesRegister(smi_reg)); | |
| 902 cmpq(dst, smi_reg); | |
| 903 } | |
| 904 | |
| 905 | |
| 871 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) { | 906 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) { |
| 872 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src); | 907 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src); |
| 873 } | 908 } |
| 874 | 909 |
| 875 | 910 |
| 876 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst, | 911 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst, |
| 877 Register src, | 912 Register src, |
| 878 int power) { | 913 int power) { |
| 879 ASSERT(power >= 0); | 914 ASSERT(power >= 0); |
| 880 ASSERT(power < 64); | 915 ASSERT(power < 64); |
| (...skipping 464 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1345 Move(dst, Smi::cast(*source)); | 1380 Move(dst, Smi::cast(*source)); |
| 1346 } else { | 1381 } else { |
| 1347 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); | 1382 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); |
| 1348 movq(dst, kScratchRegister); | 1383 movq(dst, kScratchRegister); |
| 1349 } | 1384 } |
| 1350 } | 1385 } |
| 1351 | 1386 |
| 1352 | 1387 |
| 1353 void MacroAssembler::Cmp(Register dst, Handle<Object> source) { | 1388 void MacroAssembler::Cmp(Register dst, Handle<Object> source) { |
| 1354 if (source->IsSmi()) { | 1389 if (source->IsSmi()) { |
| 1355 SmiCompare(dst, Smi::cast(*source)); | 1390 Cmp(dst, Smi::cast(*source)); |
| 1356 } else { | 1391 } else { |
| 1357 Move(kScratchRegister, source); | 1392 Move(kScratchRegister, source); |
| 1358 cmpq(dst, kScratchRegister); | 1393 cmpq(dst, kScratchRegister); |
| 1359 } | 1394 } |
| 1360 } | 1395 } |
| 1361 | 1396 |
| 1362 | 1397 |
| 1363 void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) { | 1398 void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) { |
| 1364 if (source->IsSmi()) { | 1399 if (source->IsSmi()) { |
| 1365 SmiCompare(dst, Smi::cast(*source)); | 1400 Cmp(dst, Smi::cast(*source)); |
| 1366 } else { | 1401 } else { |
| 1367 ASSERT(source->IsHeapObject()); | 1402 ASSERT(source->IsHeapObject()); |
| 1368 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); | 1403 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); |
| 1369 cmpq(dst, kScratchRegister); | 1404 cmpq(dst, kScratchRegister); |
| 1370 } | 1405 } |
| 1371 } | 1406 } |
| 1372 | 1407 |
| 1373 | 1408 |
| 1374 void MacroAssembler::Push(Handle<Object> source) { | 1409 void MacroAssembler::Push(Handle<Object> source) { |
| 1375 if (source->IsSmi()) { | 1410 if (source->IsSmi()) { |
| (...skipping 370 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1746 | 1781 |
| 1747 | 1782 |
| 1748 void MacroAssembler::AbortIfSmi(Register object) { | 1783 void MacroAssembler::AbortIfSmi(Register object) { |
| 1749 NearLabel ok; | 1784 NearLabel ok; |
| 1750 Condition is_smi = CheckSmi(object); | 1785 Condition is_smi = CheckSmi(object); |
| 1751 Assert(NegateCondition(is_smi), "Operand is a smi"); | 1786 Assert(NegateCondition(is_smi), "Operand is a smi"); |
| 1752 } | 1787 } |
| 1753 | 1788 |
| 1754 | 1789 |
| 1755 void MacroAssembler::AbortIfNotSmi(Register object) { | 1790 void MacroAssembler::AbortIfNotSmi(Register object) { |
| 1756 NearLabel ok; | 1791 Condition is_smi = CheckSmi(object); |
| 1792 Assert(is_smi, "Operand is not a smi"); | |
| 1793 } | |
| 1794 | |
| 1795 | |
| 1796 void MacroAssembler::AbortIfNotSmi(const Operand& object) { | |
| 1757 Condition is_smi = CheckSmi(object); | 1797 Condition is_smi = CheckSmi(object); |
|
Lasse Reichstein
2011/03/15 09:07:01
Negate condition?
Lasse Reichstein
2011/03/15 09:08:19
Ignore me, this is correct.
Erik Corry
2011/03/15 10:00:50
Done.
Erik Corry
2011/03/15 10:00:50
Not done.
| |
| 1758 Assert(is_smi, "Operand is not a smi"); | 1798 Assert(is_smi, "Operand is not a smi"); |
| 1759 } | 1799 } |
| 1760 | 1800 |
| 1761 | 1801 |
| 1762 void MacroAssembler::AbortIfNotString(Register object) { | 1802 void MacroAssembler::AbortIfNotString(Register object) { |
| 1763 testb(object, Immediate(kSmiTagMask)); | 1803 testb(object, Immediate(kSmiTagMask)); |
| 1764 Assert(not_equal, "Operand is not a string"); | 1804 Assert(not_equal, "Operand is not a string"); |
| 1765 push(object); | 1805 push(object); |
| 1766 movq(object, FieldOperand(object, HeapObject::kMapOffset)); | 1806 movq(object, FieldOperand(object, HeapObject::kMapOffset)); |
| 1767 CmpInstanceType(object, FIRST_NONSTRING_TYPE); | 1807 CmpInstanceType(object, FIRST_NONSTRING_TYPE); |
| (...skipping 918 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2686 CPU::FlushICache(address_, size_); | 2726 CPU::FlushICache(address_, size_); |
| 2687 | 2727 |
| 2688 // Check that the code was patched as expected. | 2728 // Check that the code was patched as expected. |
| 2689 ASSERT(masm_.pc_ == address_ + size_); | 2729 ASSERT(masm_.pc_ == address_ + size_); |
| 2690 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2730 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 2691 } | 2731 } |
| 2692 | 2732 |
| 2693 } } // namespace v8::internal | 2733 } } // namespace v8::internal |
| 2694 | 2734 |
| 2695 #endif // V8_TARGET_ARCH_X64 | 2735 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |