Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(39)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 196118: X64: Abstract indexing by a smi to the macro assembler. (Closed)
Patch Set: Created 11 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1074 matching lines...) Expand 10 before | Expand all | Expand 10 after
1085 // If src1 is a smi, then scratch register all 1s, else it is all 0s. 1085 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1086 movq(dst, src1); 1086 movq(dst, src1);
1087 xor_(dst, src2); 1087 xor_(dst, src2);
1088 and_(dst, kScratchRegister); 1088 and_(dst, kScratchRegister);
1089 // If src1 is a smi, dst holds src1 ^ src2, else it is zero. 1089 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1090 xor_(dst, src1); 1090 xor_(dst, src1);
1091 // If src1 is a smi, dst is src2, else it is src1, i.e., a non-smi. 1091 // If src1 is a smi, dst is src2, else it is src1, i.e., a non-smi.
1092 } 1092 }
1093 1093
1094 1094
1095 SmiIndex MacroAssembler::SmiToIndex(Register dst, Register src, int shift) {
1096 ASSERT(is_uint6(shift));
1097 if (shift == 0) { // times_1.
1098 SmiToInteger32(dst, src);
1099 return SmiIndex(dst, times_1);
1100 }
1101 if (shift <= 4) { // 2 - 16 times multiplier.
1102 return SmiIndex(src, static_cast<ScaleFactor>(shift - kSmiTagSize));
William Hesse 2009/09/15 10:12:09 How are we making sure that src is zero-extended h
Lasse Reichstein 2009/09/15 10:39:08 We should make sure. I'll add a movel(dst, src).
1103 }
1104 // Shift by shift-kSmiTagSize.
1105 if (!dst.is(src)) {
1106 movl(dst, src);
1107 }
1108 shl(dst, Immediate(shift - kSmiTagSize));
1109 return SmiIndex(dst, times_1);
1110 }
1111
1112
1113 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1114 Register src,
1115 int shift) {
1116 // Register src holds a positive smi.
1117 ASSERT(is_uint6(shift));
1118 if (shift == 0) { // times_1.
1119 SmiToInteger32(dst, src);
1120 neg(dst);
1121 return SmiIndex(dst, times_1);
1122 }
1123 if (shift <= 4) { // 2 - 16 times multiplier is handled using ScaleFactor.
1124 if (!dst.is(src)) {
1125 movl(dst, src);
1126 }
1127 neg(dst);
1128 return SmiIndex(dst, static_cast<ScaleFactor>(shift - kSmiTagSize));
1129 }
1130 // Shift by shift-kSmiTagSize.
1131 if (!dst.is(src)) {
1132 movl(dst, src);
1133 }
1134 neg(dst);
1135 shl(dst, Immediate(shift - kSmiTagSize));
1136 return SmiIndex(dst, times_1);
1137 }
1138
1139
1095 1140
1096 bool MacroAssembler::IsUnsafeSmi(Smi* value) { 1141 bool MacroAssembler::IsUnsafeSmi(Smi* value) {
1097 return false; 1142 return false;
1098 } 1143 }
1099 1144
1100 void MacroAssembler::LoadUnsafeSmi(Register dst, Smi* source) { 1145 void MacroAssembler::LoadUnsafeSmi(Register dst, Smi* source) {
1101 UNIMPLEMENTED(); 1146 UNIMPLEMENTED();
1102 } 1147 }
1103 1148
1104 1149
(...skipping 995 matching lines...) Expand 10 before | Expand all | Expand 10 after
2100 // Indicate that code has changed. 2145 // Indicate that code has changed.
2101 CPU::FlushICache(address_, size_); 2146 CPU::FlushICache(address_, size_);
2102 2147
2103 // Check that the code was patched as expected. 2148 // Check that the code was patched as expected.
2104 ASSERT(masm_.pc_ == address_ + size_); 2149 ASSERT(masm_.pc_ == address_ + size_);
2105 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 2150 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2106 } 2151 }
2107 2152
2108 2153
2109 } } // namespace v8::internal 2154 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698