OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_ | 5 #ifndef V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_ |
6 #define V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_ | 6 #define V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_ |
7 | 7 |
8 #include <ctype.h> | 8 #include <ctype.h> |
9 | 9 |
10 #include "src/globals.h" | 10 #include "src/globals.h" |
(...skipping 1290 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1301 | 1301 |
1302 | 1302 |
1303 void MacroAssembler::InitializeRootRegister() { | 1303 void MacroAssembler::InitializeRootRegister() { |
1304 ExternalReference roots_array_start = | 1304 ExternalReference roots_array_start = |
1305 ExternalReference::roots_array_start(isolate()); | 1305 ExternalReference::roots_array_start(isolate()); |
1306 Mov(root, Operand(roots_array_start)); | 1306 Mov(root, Operand(roots_array_start)); |
1307 } | 1307 } |
1308 | 1308 |
1309 | 1309 |
1310 void MacroAssembler::SmiTag(Register dst, Register src) { | 1310 void MacroAssembler::SmiTag(Register dst, Register src) { |
| 1311 STATIC_ASSERT(kXRegSizeInBits == |
| 1312 static_cast<unsigned>(kSmiShift + kSmiValueSize)); |
1311 ASSERT(dst.Is64Bits() && src.Is64Bits()); | 1313 ASSERT(dst.Is64Bits() && src.Is64Bits()); |
1312 Lsl(dst, src, kSmiShift); | 1314 Lsl(dst, src, kSmiShift); |
1313 } | 1315 } |
1314 | 1316 |
1315 | 1317 |
1316 void MacroAssembler::SmiTag(Register smi) { SmiTag(smi, smi); } | 1318 void MacroAssembler::SmiTag(Register smi) { SmiTag(smi, smi); } |
1317 | 1319 |
1318 | 1320 |
1319 void MacroAssembler::SmiUntag(Register dst, Register src) { | 1321 void MacroAssembler::SmiUntag(Register dst, Register src) { |
| 1322 STATIC_ASSERT(kXRegSizeInBits == |
| 1323 static_cast<unsigned>(kSmiShift + kSmiValueSize)); |
1320 ASSERT(dst.Is64Bits() && src.Is64Bits()); | 1324 ASSERT(dst.Is64Bits() && src.Is64Bits()); |
1321 if (FLAG_enable_slow_asserts) { | 1325 if (FLAG_enable_slow_asserts) { |
1322 AssertSmi(src); | 1326 AssertSmi(src); |
1323 } | 1327 } |
1324 Asr(dst, src, kSmiShift); | 1328 Asr(dst, src, kSmiShift); |
1325 } | 1329 } |
1326 | 1330 |
1327 | 1331 |
1328 void MacroAssembler::SmiUntag(Register smi) { SmiUntag(smi, smi); } | 1332 void MacroAssembler::SmiUntag(Register smi) { SmiUntag(smi, smi); } |
1329 | 1333 |
(...skipping 14 matching lines...) Expand all Loading... |
1344 UntagMode mode) { | 1348 UntagMode mode) { |
1345 ASSERT(dst.Is32Bits() && src.Is64Bits()); | 1349 ASSERT(dst.Is32Bits() && src.Is64Bits()); |
1346 if (FLAG_enable_slow_asserts && (mode == kNotSpeculativeUntag)) { | 1350 if (FLAG_enable_slow_asserts && (mode == kNotSpeculativeUntag)) { |
1347 AssertSmi(src); | 1351 AssertSmi(src); |
1348 } | 1352 } |
1349 Scvtf(dst, src, kSmiShift); | 1353 Scvtf(dst, src, kSmiShift); |
1350 } | 1354 } |
1351 | 1355 |
1352 | 1356 |
1353 void MacroAssembler::SmiTagAndPush(Register src) { | 1357 void MacroAssembler::SmiTagAndPush(Register src) { |
1354 STATIC_ASSERT((kSmiShift == 32) && (kSmiTag == 0)); | 1358 STATIC_ASSERT((static_cast<unsigned>(kSmiShift) == kWRegSizeInBits) && |
| 1359 (static_cast<unsigned>(kSmiValueSize) == kWRegSizeInBits) && |
| 1360 (kSmiTag == 0)); |
1355 Push(src.W(), wzr); | 1361 Push(src.W(), wzr); |
1356 } | 1362 } |
1357 | 1363 |
1358 | 1364 |
1359 void MacroAssembler::SmiTagAndPush(Register src1, Register src2) { | 1365 void MacroAssembler::SmiTagAndPush(Register src1, Register src2) { |
1360 STATIC_ASSERT((kSmiShift == 32) && (kSmiTag == 0)); | 1366 STATIC_ASSERT((static_cast<unsigned>(kSmiShift) == kWRegSizeInBits) && |
| 1367 (static_cast<unsigned>(kSmiValueSize) == kWRegSizeInBits) && |
| 1368 (kSmiTag == 0)); |
1361 Push(src1.W(), wzr, src2.W(), wzr); | 1369 Push(src1.W(), wzr, src2.W(), wzr); |
1362 } | 1370 } |
1363 | 1371 |
1364 | 1372 |
1365 void MacroAssembler::JumpIfSmi(Register value, | 1373 void MacroAssembler::JumpIfSmi(Register value, |
1366 Label* smi_label, | 1374 Label* smi_label, |
1367 Label* not_smi_label) { | 1375 Label* not_smi_label) { |
1368 STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag == 0)); | 1376 STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag == 0)); |
1369 // Check if the tag bit is set. | 1377 // Check if the tag bit is set. |
1370 if (smi_label) { | 1378 if (smi_label) { |
(...skipping 330 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1701 // characters are reserved for controlling features of the instrumentation. | 1709 // characters are reserved for controlling features of the instrumentation. |
1702 ASSERT(isprint(marker_name[0]) && isprint(marker_name[1])); | 1710 ASSERT(isprint(marker_name[0]) && isprint(marker_name[1])); |
1703 | 1711 |
1704 InstructionAccurateScope scope(this, 1); | 1712 InstructionAccurateScope scope(this, 1); |
1705 movn(xzr, (marker_name[1] << 8) | marker_name[0]); | 1713 movn(xzr, (marker_name[1] << 8) | marker_name[0]); |
1706 } | 1714 } |
1707 | 1715 |
1708 } } // namespace v8::internal | 1716 } } // namespace v8::internal |
1709 | 1717 |
1710 #endif // V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_ | 1718 #endif // V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_ |
OLD | NEW |