Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(324)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 7776010: Convert a bunch of ASSERTs to STATIC_ASSERTs (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/ic-x64.cc ('k') | src/x64/stub-cache-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 905 matching lines...) Expand 10 before | Expand all | Expand 10 after
916 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE); 916 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
917 return; 917 return;
918 } 918 }
919 if (negative) { 919 if (negative) {
920 neg(dst); 920 neg(dst);
921 } 921 }
922 } 922 }
923 923
924 924
925 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { 925 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
926 ASSERT_EQ(0, kSmiTag); 926 STATIC_ASSERT(kSmiTag == 0);
927 if (!dst.is(src)) { 927 if (!dst.is(src)) {
928 movl(dst, src); 928 movl(dst, src);
929 } 929 }
930 shl(dst, Immediate(kSmiShift)); 930 shl(dst, Immediate(kSmiShift));
931 } 931 }
932 932
933 933
934 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { 934 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
935 if (emit_debug_code()) { 935 if (emit_debug_code()) {
936 testb(dst, Immediate(0x01)); 936 testb(dst, Immediate(0x01));
(...skipping 17 matching lines...) Expand all
954 if (dst.is(src)) { 954 if (dst.is(src)) {
955 addl(dst, Immediate(constant)); 955 addl(dst, Immediate(constant));
956 } else { 956 } else {
957 leal(dst, Operand(src, constant)); 957 leal(dst, Operand(src, constant));
958 } 958 }
959 shl(dst, Immediate(kSmiShift)); 959 shl(dst, Immediate(kSmiShift));
960 } 960 }
961 961
962 962
963 void MacroAssembler::SmiToInteger32(Register dst, Register src) { 963 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
964 ASSERT_EQ(0, kSmiTag); 964 STATIC_ASSERT(kSmiTag == 0);
965 if (!dst.is(src)) { 965 if (!dst.is(src)) {
966 movq(dst, src); 966 movq(dst, src);
967 } 967 }
968 shr(dst, Immediate(kSmiShift)); 968 shr(dst, Immediate(kSmiShift));
969 } 969 }
970 970
971 971
972 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) { 972 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
973 movl(dst, Operand(src, kSmiShift / kBitsPerByte)); 973 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
974 } 974 }
975 975
976 976
977 void MacroAssembler::SmiToInteger64(Register dst, Register src) { 977 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
978 ASSERT_EQ(0, kSmiTag); 978 STATIC_ASSERT(kSmiTag == 0);
979 if (!dst.is(src)) { 979 if (!dst.is(src)) {
980 movq(dst, src); 980 movq(dst, src);
981 } 981 }
982 sar(dst, Immediate(kSmiShift)); 982 sar(dst, Immediate(kSmiShift));
983 } 983 }
984 984
985 985
986 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) { 986 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
987 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); 987 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
988 } 988 }
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
1104 movq(dst, kScratchRegister); 1104 movq(dst, kScratchRegister);
1105 } else { 1105 } else {
1106 movq(dst, src1); 1106 movq(dst, src1);
1107 or_(dst, src2); 1107 or_(dst, src2);
1108 JumpIfNotSmi(dst, on_not_smis, near_jump); 1108 JumpIfNotSmi(dst, on_not_smis, near_jump);
1109 } 1109 }
1110 } 1110 }
1111 1111
1112 1112
1113 Condition MacroAssembler::CheckSmi(Register src) { 1113 Condition MacroAssembler::CheckSmi(Register src) {
1114 ASSERT_EQ(0, kSmiTag); 1114 STATIC_ASSERT(kSmiTag == 0);
1115 testb(src, Immediate(kSmiTagMask)); 1115 testb(src, Immediate(kSmiTagMask));
1116 return zero; 1116 return zero;
1117 } 1117 }
1118 1118
1119 1119
1120 Condition MacroAssembler::CheckSmi(const Operand& src) { 1120 Condition MacroAssembler::CheckSmi(const Operand& src) {
1121 ASSERT_EQ(0, kSmiTag); 1121 STATIC_ASSERT(kSmiTag == 0);
1122 testb(src, Immediate(kSmiTagMask)); 1122 testb(src, Immediate(kSmiTagMask));
1123 return zero; 1123 return zero;
1124 } 1124 }
1125 1125
1126 1126
1127 Condition MacroAssembler::CheckNonNegativeSmi(Register src) { 1127 Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
1128 ASSERT_EQ(0, kSmiTag); 1128 STATIC_ASSERT(kSmiTag == 0);
1129 // Test that both bits of the mask 0x8000000000000001 are zero. 1129 // Test that both bits of the mask 0x8000000000000001 are zero.
1130 movq(kScratchRegister, src); 1130 movq(kScratchRegister, src);
1131 rol(kScratchRegister, Immediate(1)); 1131 rol(kScratchRegister, Immediate(1));
1132 testb(kScratchRegister, Immediate(3)); 1132 testb(kScratchRegister, Immediate(3));
1133 return zero; 1133 return zero;
1134 } 1134 }
1135 1135
1136 1136
1137 Condition MacroAssembler::CheckBothSmi(Register first, Register second) { 1137 Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1138 if (first.is(second)) { 1138 if (first.is(second)) {
1139 return CheckSmi(first); 1139 return CheckSmi(first);
1140 } 1140 }
1141 ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3); 1141 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
1142 leal(kScratchRegister, Operand(first, second, times_1, 0)); 1142 leal(kScratchRegister, Operand(first, second, times_1, 0));
1143 testb(kScratchRegister, Immediate(0x03)); 1143 testb(kScratchRegister, Immediate(0x03));
1144 return zero; 1144 return zero;
1145 } 1145 }
1146 1146
1147 1147
1148 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first, 1148 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1149 Register second) { 1149 Register second) {
1150 if (first.is(second)) { 1150 if (first.is(second)) {
1151 return CheckNonNegativeSmi(first); 1151 return CheckNonNegativeSmi(first);
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after
1287 } 1287 }
1288 1288
1289 1289
1290 void MacroAssembler::SmiTryAddConstant(Register dst, 1290 void MacroAssembler::SmiTryAddConstant(Register dst,
1291 Register src, 1291 Register src,
1292 Smi* constant, 1292 Smi* constant,
1293 Label* on_not_smi_result, 1293 Label* on_not_smi_result,
1294 Label::Distance near_jump) { 1294 Label::Distance near_jump) {
1295 // Does not assume that src is a smi. 1295 // Does not assume that src is a smi.
1296 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask)); 1296 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1297 ASSERT_EQ(0, kSmiTag); 1297 STATIC_ASSERT(kSmiTag == 0);
1298 ASSERT(!dst.is(kScratchRegister)); 1298 ASSERT(!dst.is(kScratchRegister));
1299 ASSERT(!src.is(kScratchRegister)); 1299 ASSERT(!src.is(kScratchRegister));
1300 1300
1301 JumpIfNotSmi(src, on_not_smi_result, near_jump); 1301 JumpIfNotSmi(src, on_not_smi_result, near_jump);
1302 Register tmp = (dst.is(src) ? kScratchRegister : dst); 1302 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1303 LoadSmiConstant(tmp, constant); 1303 LoadSmiConstant(tmp, constant);
1304 addq(tmp, src); 1304 addq(tmp, src);
1305 j(overflow, on_not_smi_result, near_jump); 1305 j(overflow, on_not_smi_result, near_jump);
1306 if (dst.is(src)) { 1306 if (dst.is(src)) {
1307 movq(dst, tmp); 1307 movq(dst, tmp);
(...skipping 683 matching lines...) Expand 10 before | Expand all | Expand 10 after
1991 ASSERT(!src2.is(kScratchRegister)); 1991 ASSERT(!src2.is(kScratchRegister));
1992 ASSERT(!dst.is(src1)); 1992 ASSERT(!dst.is(src1));
1993 ASSERT(!dst.is(src2)); 1993 ASSERT(!dst.is(src2));
1994 // Both operands must not be smis. 1994 // Both operands must not be smis.
1995 #ifdef DEBUG 1995 #ifdef DEBUG
1996 if (allow_stub_calls()) { // Check contains a stub call. 1996 if (allow_stub_calls()) { // Check contains a stub call.
1997 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2)); 1997 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1998 Check(not_both_smis, "Both registers were smis in SelectNonSmi."); 1998 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1999 } 1999 }
2000 #endif 2000 #endif
2001 ASSERT_EQ(0, kSmiTag); 2001 STATIC_ASSERT(kSmiTag == 0);
2002 ASSERT_EQ(0, Smi::FromInt(0)); 2002 ASSERT_EQ(0, Smi::FromInt(0));
2003 movl(kScratchRegister, Immediate(kSmiTagMask)); 2003 movl(kScratchRegister, Immediate(kSmiTagMask));
2004 and_(kScratchRegister, src1); 2004 and_(kScratchRegister, src1);
2005 testl(kScratchRegister, src2); 2005 testl(kScratchRegister, src2);
2006 // If non-zero then both are smis. 2006 // If non-zero then both are smis.
2007 j(not_zero, on_not_smis, near_jump); 2007 j(not_zero, on_not_smis, near_jump);
2008 2008
2009 // Exactly one operand is a smi. 2009 // Exactly one operand is a smi.
2010 ASSERT_EQ(1, static_cast<int>(kSmiTagMask)); 2010 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
2011 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one. 2011 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
(...skipping 680 matching lines...) Expand 10 before | Expand all | Expand 10 after
2692 Check(equal, message); 2692 Check(equal, message);
2693 } 2693 }
2694 2694
2695 2695
2696 2696
2697 Condition MacroAssembler::IsObjectStringType(Register heap_object, 2697 Condition MacroAssembler::IsObjectStringType(Register heap_object,
2698 Register map, 2698 Register map,
2699 Register instance_type) { 2699 Register instance_type) {
2700 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset)); 2700 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
2701 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); 2701 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
2702 ASSERT(kNotStringTag != 0); 2702 STATIC_ASSERT(kNotStringTag != 0);
2703 testb(instance_type, Immediate(kIsNotStringMask)); 2703 testb(instance_type, Immediate(kIsNotStringMask));
2704 return zero; 2704 return zero;
2705 } 2705 }
2706 2706
2707 2707
2708 void MacroAssembler::TryGetFunctionPrototype(Register function, 2708 void MacroAssembler::TryGetFunctionPrototype(Register function,
2709 Register result, 2709 Register result,
2710 Label* miss) { 2710 Label* miss) {
2711 // Check that the receiver isn't a smi. 2711 // Check that the receiver isn't a smi.
2712 testl(function, Immediate(kSmiTagMask)); 2712 testl(function, Immediate(kSmiTagMask));
(...skipping 1139 matching lines...) Expand 10 before | Expand all | Expand 10 after
3852 CPU::FlushICache(address_, size_); 3852 CPU::FlushICache(address_, size_);
3853 3853
3854 // Check that the code was patched as expected. 3854 // Check that the code was patched as expected.
3855 ASSERT(masm_.pc_ == address_ + size_); 3855 ASSERT(masm_.pc_ == address_ + size_);
3856 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 3856 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3857 } 3857 }
3858 3858
3859 } } // namespace v8::internal 3859 } } // namespace v8::internal
3860 3860
3861 #endif // V8_TARGET_ARCH_X64 3861 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/ic-x64.cc ('k') | src/x64/stub-cache-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698