OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1429 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1440 | 1440 |
1441 void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1, | 1441 void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1, |
1442 Register src2, | 1442 Register src2, |
1443 Label* on_not_both_smi, | 1443 Label* on_not_both_smi, |
1444 Label::Distance near_jump) { | 1444 Label::Distance near_jump) { |
1445 Condition both_smi = CheckBothNonNegativeSmi(src1, src2); | 1445 Condition both_smi = CheckBothNonNegativeSmi(src1, src2); |
1446 j(NegateCondition(both_smi), on_not_both_smi, near_jump); | 1446 j(NegateCondition(both_smi), on_not_both_smi, near_jump); |
1447 } | 1447 } |
1448 | 1448 |
1449 | 1449 |
1450 void MacroAssembler::SmiTryAddConstant(Register dst, | |
1451 Register src, | |
1452 Smi* constant, | |
1453 Label* on_not_smi_result, | |
1454 Label::Distance near_jump) { | |
1455 // Does not assume that src is a smi. | |
1456 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask)); | |
1457 STATIC_ASSERT(kSmiTag == 0); | |
1458 ASSERT(!dst.is(kScratchRegister)); | |
1459 ASSERT(!src.is(kScratchRegister)); | |
1460 | |
1461 JumpIfNotSmi(src, on_not_smi_result, near_jump); | |
1462 Register tmp = (dst.is(src) ? kScratchRegister : dst); | |
1463 LoadSmiConstant(tmp, constant); | |
1464 addq(tmp, src); | |
1465 j(overflow, on_not_smi_result, near_jump); | |
1466 if (dst.is(src)) { | |
1467 movq(dst, tmp); | |
1468 } | |
1469 } | |
1470 | |
1471 | |
1472 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { | 1450 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { |
1473 if (constant->value() == 0) { | 1451 if (constant->value() == 0) { |
1474 if (!dst.is(src)) { | 1452 if (!dst.is(src)) { |
1475 movq(dst, src); | 1453 movq(dst, src); |
1476 } | 1454 } |
1477 return; | 1455 return; |
1478 } else if (dst.is(src)) { | 1456 } else if (dst.is(src)) { |
1479 ASSERT(!dst.is(kScratchRegister)); | 1457 ASSERT(!dst.is(kScratchRegister)); |
1480 switch (constant->value()) { | 1458 switch (constant->value()) { |
1481 case 1: | 1459 case 1: |
(...skipping 3445 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4927 j(greater, &no_memento_available); | 4905 j(greater, &no_memento_available); |
4928 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 4906 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
4929 Heap::kAllocationMementoMapRootIndex); | 4907 Heap::kAllocationMementoMapRootIndex); |
4930 bind(&no_memento_available); | 4908 bind(&no_memento_available); |
4931 } | 4909 } |
4932 | 4910 |
4933 | 4911 |
4934 } } // namespace v8::internal | 4912 } } // namespace v8::internal |
4935 | 4913 |
4936 #endif // V8_TARGET_ARCH_X64 | 4914 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |