Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1546 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1557 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0)); | 1557 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0)); |
| 1558 Register scratch = scratch0(); | 1558 Register scratch = scratch0(); |
| 1559 | 1559 |
| 1560 // Test the double value. Zero and NaN are false. | 1560 // Test the double value. Zero and NaN are false. |
| 1561 __ VFPCompareAndLoadFlags(reg, 0.0, scratch); | 1561 __ VFPCompareAndLoadFlags(reg, 0.0, scratch); |
| 1562 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit)); | 1562 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit)); |
| 1563 EmitBranch(true_block, false_block, eq); | 1563 EmitBranch(true_block, false_block, eq); |
| 1564 } else { | 1564 } else { |
| 1565 ASSERT(r.IsTagged()); | 1565 ASSERT(r.IsTagged()); |
| 1566 Register reg = ToRegister(instr->InputAt(0)); | 1566 Register reg = ToRegister(instr->InputAt(0)); |
| 1567 if (instr->hydrogen()->value()->type().IsBoolean()) { | 1567 HType type = instr->hydrogen()->value()->type(); |
| 1568 if (type.IsBoolean()) { | |
| 1568 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | 1569 __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 1569 __ cmp(reg, ip); | 1570 __ cmp(reg, ip); |
| 1570 EmitBranch(true_block, false_block, eq); | 1571 EmitBranch(true_block, false_block, eq); |
| 1572 } else if (type.IsSmi()) { | |
| 1573 __ cmp(reg, Operand(0)); | |
| 1574 EmitBranch(true_block, false_block, ne); | |
| 1571 } else { | 1575 } else { |
| 1572 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 1576 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
| 1573 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 1577 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
| 1574 | 1578 |
| 1575 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 1579 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); |
| 1576 __ cmp(reg, ip); | 1580 // Avoid deopts in the case where we've never executed this path before. |
| 1577 __ b(eq, false_label); | 1581 if (expected.IsEmpty()) expected = ToBooleanStub::all_types(); |
| 1578 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | |
| 1579 __ cmp(reg, ip); | |
| 1580 __ b(eq, true_label); | |
| 1581 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | |
| 1582 __ cmp(reg, ip); | |
| 1583 __ b(eq, false_label); | |
| 1584 __ cmp(reg, Operand(0)); | |
| 1585 __ b(eq, false_label); | |
| 1586 __ JumpIfSmi(reg, true_label); | |
| 1587 | 1582 |
| 1588 // Test double values. Zero and NaN are false. | 1583 if (expected.Contains(ToBooleanStub::UNDEFINED)) { |
| 1589 Label call_stub; | 1584 // undefined -> false. |
| 1590 DoubleRegister dbl_scratch = double_scratch0(); | 1585 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 1591 Register scratch = scratch0(); | 1586 __ cmp(reg, ip); |
| 1592 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); | 1587 __ b(eq, false_label); |
| 1593 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 1588 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1594 __ cmp(scratch, Operand(ip)); | 1589 // We've seen undefined for the first time -> deopt. |
|
Erik Corry
2011/08/05 12:50:46
We should just bail out if we have seen an interna
Sven Panne
2011/08/09 07:58:21
See other comment regarding internal objects.
| |
| 1595 __ b(ne, &call_stub); | 1590 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 1596 __ sub(ip, reg, Operand(kHeapObjectTag)); | 1591 DeoptimizeIf(eq, instr->environment()); |
| 1597 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset); | 1592 } |
| 1598 __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch); | |
| 1599 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit)); | |
| 1600 __ b(ne, false_label); | |
| 1601 __ b(true_label); | |
| 1602 | 1593 |
| 1603 // The conversion stub doesn't cause garbage collections so it's | 1594 if (expected.Contains(ToBooleanStub::BOOLEAN)) { |
|
Erik Corry
2011/08/05 12:50:46
This 'if' and the next can be merged.
Sven Panne
2011/08/09 07:58:21
This is a leftover from an attempt to do an "extra
| |
| 1604 // safe to not record a safepoint after the call. | 1595 // true -> true. |
| 1605 __ bind(&call_stub); | 1596 __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 1606 ToBooleanStub stub(reg); | 1597 __ cmp(reg, ip); |
| 1607 RegList saved_regs = kJSCallerSaved | kCalleeSaved; | 1598 __ b(eq, true_label); |
| 1608 __ stm(db_w, sp, saved_regs); | 1599 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1609 __ CallStub(&stub); | 1600 // We've seen a boolean for the first time -> deopt. |
| 1610 __ cmp(reg, Operand(0)); | 1601 __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
|
Erik Corry
2011/08/05 12:50:46
We have CompareRoot in the macro assembler.
Sven Panne
2011/08/09 07:58:21
... and I'm even using it below. :-P Using it in o
| |
| 1611 __ ldm(ia_w, sp, saved_regs); | 1602 __ cmp(reg, ip); |
| 1612 EmitBranch(true_block, false_block, ne); | 1603 DeoptimizeIf(eq, instr->environment()); |
| 1604 } | |
| 1605 | |
| 1606 if (expected.Contains(ToBooleanStub::BOOLEAN)) { | |
| 1607 // false -> false. | |
| 1608 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | |
| 1609 __ cmp(reg, ip); | |
| 1610 __ b(eq, false_label); | |
| 1611 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { | |
| 1612 // We've seen a boolean for the first time -> deopt. | |
| 1613 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | |
| 1614 __ cmp(reg, ip); | |
| 1615 DeoptimizeIf(eq, instr->environment()); | |
| 1616 } | |
| 1617 | |
| 1618 if (expected.Contains(ToBooleanStub::NULL_TYPE)) { | |
| 1619 // 'null' -> false. | |
| 1620 __ LoadRoot(ip, Heap::kNullValueRootIndex); | |
| 1621 __ cmp(reg, ip); | |
| 1622 __ b(eq, false_label); | |
| 1623 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { | |
| 1624 // We've seen null for the first time -> deopt. | |
| 1625 __ LoadRoot(ip, Heap::kNullValueRootIndex); | |
| 1626 __ cmp(reg, ip); | |
| 1627 DeoptimizeIf(eq, instr->environment()); | |
| 1628 } | |
| 1629 | |
| 1630 if (expected.Contains(ToBooleanStub::SMI)) { | |
| 1631 // Smis: 0 -> false, all other -> true. | |
| 1632 __ cmp(reg, Operand(0)); | |
| 1633 __ b(eq, false_label); | |
| 1634 __ JumpIfSmi(reg, true_label); | |
| 1635 } else if (expected.NeedsMap()) { | |
| 1636 // If we need a map later and have a Smi -> deopt. | |
| 1637 __ tst(reg, Operand(kSmiTagMask)); | |
| 1638 DeoptimizeIf(eq, instr->environment()); | |
| 1639 } | |
| 1640 | |
| 1641 const Register map = scratch0(); | |
| 1642 if (expected.NeedsMap()) { | |
| 1643 __ ldr(map, FieldMemOperand(reg, HeapObject::kMapOffset)); | |
| 1644 // Everything with a map could be undetectable, so check this now. | |
| 1645 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset)); | |
| 1646 __ tst(ip, Operand(1 << Map::kIsUndetectable)); | |
| 1647 __ b(ne, false_label); | |
| 1648 } | |
| 1649 | |
| 1650 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) { | |
| 1651 // spec object -> true. | |
| 1652 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE); | |
| 1653 __ b(ge, true_label); | |
| 1654 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { | |
| 1655 // We've seen a spec object for the first time -> deopt. | |
| 1656 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE); | |
| 1657 DeoptimizeIf(ge, instr->environment()); | |
| 1658 } | |
| 1659 | |
| 1660 if (expected.Contains(ToBooleanStub::STRING)) { | |
| 1661 // String value -> false iff empty. | |
| 1662 Label not_string; | |
| 1663 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); | |
| 1664 __ b(ge, ¬_string); | |
| 1665 __ ldr(ip, FieldMemOperand(reg, String::kLengthOffset)); | |
| 1666 __ cmp(ip, Operand(0)); | |
| 1667 __ b(ne, true_label); | |
| 1668 __ b(false_label); | |
| 1669 __ bind(¬_string); | |
| 1670 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { | |
| 1671 // We've seen a string for the first time -> deopt | |
| 1672 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); | |
| 1673 DeoptimizeIf(lt, instr->environment()); | |
| 1674 } | |
| 1675 | |
| 1676 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { | |
| 1677 // heap number -> false iff +0, -0, or NaN. | |
| 1678 DoubleRegister dbl_scratch = double_scratch0(); | |
| 1679 Label not_heap_number; | |
| 1680 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); | |
| 1681 __ b(ne, ¬_heap_number); | |
| 1682 __ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); | |
| 1683 __ VFPCompareAndSetFlags(dbl_scratch, 0.0); | |
| 1684 __ b(vs, false_label); // NaN -> false. | |
| 1685 __ b(eq, false_label); // +0, -0 -> false. | |
| 1686 __ b(true_label); | |
| 1687 __ bind(¬_heap_number); | |
| 1688 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { | |
| 1689 // We've seen a heap number for the first time -> deopt. | |
| 1690 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); | |
| 1691 DeoptimizeIf(eq, instr->environment()); | |
| 1692 } | |
| 1693 | |
| 1694 if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { | |
| 1695 // internal objects -> true | |
| 1696 __ b(true_label); | |
| 1697 } else { | |
| 1698 // We've seen something for the first time -> deopt. | |
| 1699 DeoptimizeIf(al, instr->environment()); | |
| 1700 } | |
| 1613 } | 1701 } |
| 1614 } | 1702 } |
| 1615 } | 1703 } |
| 1616 | 1704 |
| 1617 | 1705 |
| 1618 void LCodeGen::EmitGoto(int block) { | 1706 void LCodeGen::EmitGoto(int block) { |
| 1619 block = chunk_->LookupDestination(block); | 1707 block = chunk_->LookupDestination(block); |
| 1620 int next_block = GetNextEmittedBlock(current_block_); | 1708 int next_block = GetNextEmittedBlock(current_block_); |
| 1621 if (block != next_block) { | 1709 if (block != next_block) { |
| 1622 __ jmp(chunk_->GetAssemblyLabel(block)); | 1710 __ jmp(chunk_->GetAssemblyLabel(block)); |
| (...skipping 2862 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4485 ASSERT(osr_pc_offset_ == -1); | 4573 ASSERT(osr_pc_offset_ == -1); |
| 4486 osr_pc_offset_ = masm()->pc_offset(); | 4574 osr_pc_offset_ = masm()->pc_offset(); |
| 4487 } | 4575 } |
| 4488 | 4576 |
| 4489 | 4577 |
| 4490 | 4578 |
| 4491 | 4579 |
| 4492 #undef __ | 4580 #undef __ |
| 4493 | 4581 |
| 4494 } } // namespace v8::internal | 4582 } } // namespace v8::internal |
| OLD | NEW |