Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1264)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 236723004: Update CheckBothSmi, CheckInteger32ValidSmiValue, CheckUInteger32ValidSmiValue, SmiAddConstant, Smi… (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1317 matching lines...) Expand 10 before | Expand all | Expand 10 after
1328 testb(kScratchRegister, Immediate(3)); 1328 testb(kScratchRegister, Immediate(3));
1329 return zero; 1329 return zero;
1330 } 1330 }
1331 1331
1332 1332
1333 Condition MacroAssembler::CheckBothSmi(Register first, Register second) { 1333 Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1334 if (first.is(second)) { 1334 if (first.is(second)) {
1335 return CheckSmi(first); 1335 return CheckSmi(first);
1336 } 1336 }
1337 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3); 1337 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
1338 leal(kScratchRegister, Operand(first, second, times_1, 0)); 1338 if (SmiValuesAre32Bits()) {
1339 testb(kScratchRegister, Immediate(0x03)); 1339 leal(kScratchRegister, Operand(first, second, times_1, 0));
1340 testb(kScratchRegister, Immediate(0x03));
1341 } else {
1342 ASSERT(SmiValuesAre31Bits());
1343 movl(kScratchRegister, first);
1344 orl(kScratchRegister, second);
1345 testb(kScratchRegister, Immediate(kSmiTagMask));
1346 }
1340 return zero; 1347 return zero;
1341 } 1348 }
1342 1349
1343 1350
1344 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first, 1351 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1345 Register second) { 1352 Register second) {
1346 if (first.is(second)) { 1353 if (first.is(second)) {
1347 return CheckNonNegativeSmi(first); 1354 return CheckNonNegativeSmi(first);
1348 } 1355 }
1349 movp(kScratchRegister, first); 1356 movp(kScratchRegister, first);
(...skipping 25 matching lines...) Expand all
1375 1382
1376 Condition MacroAssembler::CheckIsMinSmi(Register src) { 1383 Condition MacroAssembler::CheckIsMinSmi(Register src) {
1377 ASSERT(!src.is(kScratchRegister)); 1384 ASSERT(!src.is(kScratchRegister));
1378 // If we overflow by subtracting one, it's the minimal smi value. 1385 // If we overflow by subtracting one, it's the minimal smi value.
1379 cmpp(src, kSmiConstantRegister); 1386 cmpp(src, kSmiConstantRegister);
1380 return overflow; 1387 return overflow;
1381 } 1388 }
1382 1389
1383 1390
1384 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) { 1391 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
1385 // A 32-bit integer value can always be converted to a smi. 1392 if (SmiValuesAre32Bits()) {
1386 return always; 1393 // A 32-bit integer value can always be converted to a smi.
1394 return always;
1395 } else {
1396 ASSERT(SmiValuesAre31Bits());
1397 cmpl(src, Immediate(0xc0000000));
1398 return positive;
1399 }
1387 } 1400 }
1388 1401
1389 1402
1390 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) { 1403 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1391 // An unsigned 32-bit integer value is valid as long as the high bit 1404 if (SmiValuesAre32Bits()) {
1392 // is not set. 1405 // An unsigned 32-bit integer value is valid as long as the high bit
1393 testl(src, src); 1406 // is not set.
1394 return positive; 1407 testl(src, src);
1408 return positive;
1409 } else {
1410 ASSERT(SmiValuesAre31Bits());
1411 testl(src, Immediate(0xc0000000));
1412 return zero;
1413 }
1395 } 1414 }
1396 1415
1397 1416
1398 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) { 1417 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1399 if (dst.is(src)) { 1418 if (dst.is(src)) {
1400 andl(dst, Immediate(kSmiTagMask)); 1419 andl(dst, Immediate(kSmiTagMask));
1401 } else { 1420 } else {
1402 movl(dst, Immediate(kSmiTagMask)); 1421 movl(dst, Immediate(kSmiTagMask));
1403 andl(dst, src); 1422 andl(dst, src);
1404 } 1423 }
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
1527 LoadSmiConstant(dst, constant); 1546 LoadSmiConstant(dst, constant);
1528 addp(dst, src); 1547 addp(dst, src);
1529 return; 1548 return;
1530 } 1549 }
1531 } 1550 }
1532 } 1551 }
1533 1552
1534 1553
1535 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { 1554 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1536 if (constant->value() != 0) { 1555 if (constant->value() != 0) {
1537 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value())); 1556 if (SmiValuesAre32Bits()) {
1557 addl(Operand(dst, kSmiShift / kBitsPerByte),
1558 Immediate(constant->value()));
1559 } else {
1560 ASSERT(SmiValuesAre31Bits());
1561 addp(dst, Immediate(constant));
1562 }
1538 } 1563 }
1539 } 1564 }
1540 1565
1541 1566
1542 void MacroAssembler::SmiAddConstant(Register dst, 1567 void MacroAssembler::SmiAddConstant(Register dst,
1543 Register src, 1568 Register src,
1544 Smi* constant, 1569 Smi* constant,
1545 SmiOperationExecutionMode mode, 1570 SmiOperationExecutionMode mode,
1546 Label* bailout_label, 1571 Label* bailout_label,
1547 Label::Distance near_jump) { 1572 Label::Distance near_jump) {
(...skipping 437 matching lines...) Expand 10 before | Expand all | Expand 10 after
1985 testp(src1, src1); 2010 testp(src1, src1);
1986 j(negative, on_not_smi_result, near_jump); 2011 j(negative, on_not_smi_result, near_jump);
1987 bind(&smi_result); 2012 bind(&smi_result);
1988 Integer32ToSmi(dst, rdx); 2013 Integer32ToSmi(dst, rdx);
1989 } 2014 }
1990 2015
1991 2016
1992 void MacroAssembler::SmiNot(Register dst, Register src) { 2017 void MacroAssembler::SmiNot(Register dst, Register src) {
1993 ASSERT(!dst.is(kScratchRegister)); 2018 ASSERT(!dst.is(kScratchRegister));
1994 ASSERT(!src.is(kScratchRegister)); 2019 ASSERT(!src.is(kScratchRegister));
1995 // Set tag and padding bits before negating, so that they are zero afterwards. 2020 if (SmiValuesAre32Bits()) {
1996 movl(kScratchRegister, Immediate(~0)); 2021 // Set tag and padding bits before negating, so that they are zero
2022 // afterwards.
2023 movl(kScratchRegister, Immediate(~0));
2024 } else {
2025 ASSERT(SmiValuesAre31Bits());
2026 movl(kScratchRegister, Immediate(1));
2027 }
1997 if (dst.is(src)) { 2028 if (dst.is(src)) {
1998 xorp(dst, kScratchRegister); 2029 xorp(dst, kScratchRegister);
1999 } else { 2030 } else {
2000 leap(dst, Operand(src, kScratchRegister, times_1, 0)); 2031 leap(dst, Operand(src, kScratchRegister, times_1, 0));
2001 } 2032 }
2002 notp(dst); 2033 notp(dst);
2003 } 2034 }
2004 2035
2005 2036
2006 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) { 2037 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
(...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after
2260 if (shift < kSmiShift) { 2291 if (shift < kSmiShift) {
2261 sarq(dst, Immediate(kSmiShift - shift)); 2292 sarq(dst, Immediate(kSmiShift - shift));
2262 } else { 2293 } else {
2263 shlq(dst, Immediate(shift - kSmiShift)); 2294 shlq(dst, Immediate(shift - kSmiShift));
2264 } 2295 }
2265 return SmiIndex(dst, times_1); 2296 return SmiIndex(dst, times_1);
2266 } 2297 }
2267 2298
2268 2299
2269 void MacroAssembler::AddSmiField(Register dst, const Operand& src) { 2300 void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
2270 ASSERT_EQ(0, kSmiShift % kBitsPerByte); 2301 if (SmiValuesAre32Bits()) {
2271 addl(dst, Operand(src, kSmiShift / kBitsPerByte)); 2302 ASSERT_EQ(0, kSmiShift % kBitsPerByte);
2303 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2304 } else {
2305 ASSERT(SmiValuesAre31Bits());
2306 SmiToInteger32(kScratchRegister, src);
2307 addl(dst, kScratchRegister);
2308 }
2272 } 2309 }
2273 2310
2274 2311
2275 void MacroAssembler::Push(Smi* source) { 2312 void MacroAssembler::Push(Smi* source) {
2276 intptr_t smi = reinterpret_cast<intptr_t>(source); 2313 intptr_t smi = reinterpret_cast<intptr_t>(source);
2277 if (is_int32(smi)) { 2314 if (is_int32(smi)) {
2278 Push(Immediate(static_cast<int32_t>(smi))); 2315 Push(Immediate(static_cast<int32_t>(smi)));
2279 } else { 2316 } else {
2280 Register constant = GetSmiConstant(source); 2317 Register constant = GetSmiConstant(source);
2281 Push(constant); 2318 Push(constant);
(...skipping 21 matching lines...) Expand all
2303 shrp(scratch, Immediate(kSmiShift)); 2340 shrp(scratch, Immediate(kSmiShift));
2304 Pop(dst); 2341 Pop(dst);
2305 shrp(dst, Immediate(kSmiShift)); 2342 shrp(dst, Immediate(kSmiShift));
2306 // High bits. 2343 // High bits.
2307 shlp(dst, Immediate(kPointerSize * kBitsPerByte - kSmiShift)); 2344 shlp(dst, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2308 orp(dst, scratch); 2345 orp(dst, scratch);
2309 } 2346 }
2310 2347
2311 2348
2312 void MacroAssembler::Test(const Operand& src, Smi* source) { 2349 void MacroAssembler::Test(const Operand& src, Smi* source) {
2313 testl(Operand(src, kIntSize), Immediate(source->value())); 2350 if (SmiValuesAre32Bits()) {
2351 testl(Operand(src, kIntSize), Immediate(source->value()));
2352 } else {
2353 ASSERT(SmiValuesAre31Bits());
2354 testl(src, Immediate(source));
2355 }
2314 } 2356 }
2315 2357
2316 2358
2317 // ---------------------------------------------------------------------------- 2359 // ----------------------------------------------------------------------------
2318 2360
2319 2361
2320 void MacroAssembler::LookupNumberStringCache(Register object, 2362 void MacroAssembler::LookupNumberStringCache(Register object,
2321 Register result, 2363 Register result,
2322 Register scratch1, 2364 Register scratch1,
2323 Register scratch2, 2365 Register scratch2,
(...skipping 2831 matching lines...) Expand 10 before | Expand all | Expand 10 after
5155 if (ms.shift() > 0) sarl(rdx, Immediate(ms.shift())); 5197 if (ms.shift() > 0) sarl(rdx, Immediate(ms.shift()));
5156 movl(rax, dividend); 5198 movl(rax, dividend);
5157 shrl(rax, Immediate(31)); 5199 shrl(rax, Immediate(31));
5158 addl(rdx, rax); 5200 addl(rdx, rax);
5159 } 5201 }
5160 5202
5161 5203
5162 } } // namespace v8::internal 5204 } } // namespace v8::internal
5163 5205
5164 #endif // V8_TARGET_ARCH_X64 5206 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698