OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1600 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1611 return true; | 1611 return true; |
1612 } | 1612 } |
1613 | 1613 |
1614 return false; | 1614 return false; |
1615 } | 1615 } |
1616 | 1616 |
1617 | 1617 |
1618 void BackEdgeTable::Patch(Isolate* isolate, | 1618 void BackEdgeTable::Patch(Isolate* isolate, |
1619 Code* unoptimized) { | 1619 Code* unoptimized) { |
1620 DisallowHeapAllocation no_gc; | 1620 DisallowHeapAllocation no_gc; |
1621 Code* replacement_code = | 1621 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
1622 isolate->builtins()->builtin(Builtins::kOnStackReplacement); | |
1623 | 1622 |
1624 // Iterate over the back edge table and patch every interrupt | 1623 // Iterate over the back edge table and patch every interrupt |
1625 // call to an unconditional call to the replacement code. | 1624 // call to an unconditional call to the replacement code. |
1626 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); | 1625 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); |
1627 | 1626 |
1628 BackEdgeTable back_edges(unoptimized, &no_gc); | 1627 BackEdgeTable back_edges(unoptimized, &no_gc); |
1629 for (uint32_t i = 0; i < back_edges.length(); i++) { | 1628 for (uint32_t i = 0; i < back_edges.length(); i++) { |
1630 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) { | 1629 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) { |
1631 ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate, | 1630 ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate, |
1632 unoptimized, | 1631 unoptimized, |
1633 back_edges.pc(i))); | 1632 back_edges.pc(i))); |
1634 PatchAt(unoptimized, back_edges.pc(i), replacement_code); | 1633 PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch); |
1635 } | 1634 } |
1636 } | 1635 } |
1637 | 1636 |
1638 unoptimized->set_back_edges_patched_for_osr(true); | 1637 unoptimized->set_back_edges_patched_for_osr(true); |
1639 ASSERT(Verify(isolate, unoptimized, loop_nesting_level)); | 1638 ASSERT(Verify(isolate, unoptimized, loop_nesting_level)); |
1640 } | 1639 } |
1641 | 1640 |
1642 | 1641 |
1643 void BackEdgeTable::Revert(Isolate* isolate, | 1642 void BackEdgeTable::Revert(Isolate* isolate, |
1644 Code* unoptimized) { | 1643 Code* unoptimized) { |
1645 DisallowHeapAllocation no_gc; | 1644 DisallowHeapAllocation no_gc; |
1646 Code* interrupt_code = | 1645 Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck); |
1647 isolate->builtins()->builtin(Builtins::kInterruptCheck); | |
1648 | 1646 |
1649 // Iterate over the back edge table and revert the patched interrupt calls. | 1647 // Iterate over the back edge table and revert the patched interrupt calls. |
1650 ASSERT(unoptimized->back_edges_patched_for_osr()); | 1648 ASSERT(unoptimized->back_edges_patched_for_osr()); |
1651 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); | 1649 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); |
1652 | 1650 |
1653 BackEdgeTable back_edges(unoptimized, &no_gc); | 1651 BackEdgeTable back_edges(unoptimized, &no_gc); |
1654 for (uint32_t i = 0; i < back_edges.length(); i++) { | 1652 for (uint32_t i = 0; i < back_edges.length(); i++) { |
1655 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) { | 1653 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) { |
1656 ASSERT_EQ(ON_STACK_REPLACEMENT, GetBackEdgeState(isolate, | 1654 ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate, |
1657 unoptimized, | 1655 unoptimized, |
1658 back_edges.pc(i))); | 1656 back_edges.pc(i))); |
1659 RevertAt(unoptimized, back_edges.pc(i), interrupt_code); | 1657 PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch); |
1660 } | 1658 } |
1661 } | 1659 } |
1662 | 1660 |
1663 unoptimized->set_back_edges_patched_for_osr(false); | 1661 unoptimized->set_back_edges_patched_for_osr(false); |
1664 unoptimized->set_allow_osr_at_loop_nesting_level(0); | 1662 unoptimized->set_allow_osr_at_loop_nesting_level(0); |
1665 // Assert that none of the back edges are patched anymore. | 1663 // Assert that none of the back edges are patched anymore. |
1666 ASSERT(Verify(isolate, unoptimized, -1)); | 1664 ASSERT(Verify(isolate, unoptimized, -1)); |
1667 } | 1665 } |
1668 | 1666 |
1669 | 1667 |
| 1668 void BackEdgeTable::AddStackCheck(CompilationInfo* info) { |
| 1669 DisallowHeapAllocation no_gc; |
| 1670 Isolate* isolate = info->isolate(); |
| 1671 Code* code = info->shared_info()->code(); |
| 1672 Address pc = code->instruction_start() + info->osr_pc_offset(); |
| 1673 ASSERT_EQ(ON_STACK_REPLACEMENT, GetBackEdgeState(isolate, code, pc)); |
| 1674 Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck); |
| 1675 PatchAt(code, pc, OSR_AFTER_STACK_CHECK, patch); |
| 1676 } |
| 1677 |
| 1678 |
| 1679 void BackEdgeTable::RemoveStackCheck(CompilationInfo* info) { |
| 1680 DisallowHeapAllocation no_gc; |
| 1681 Isolate* isolate = info->isolate(); |
| 1682 Code* code = info->shared_info()->code(); |
| 1683 Address pc = code->instruction_start() + info->osr_pc_offset(); |
| 1684 if (GetBackEdgeState(isolate, code, pc) == OSR_AFTER_STACK_CHECK) { |
| 1685 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
| 1686 PatchAt(code, pc, ON_STACK_REPLACEMENT, patch); |
| 1687 } |
| 1688 } |
| 1689 |
| 1690 |
1670 #ifdef DEBUG | 1691 #ifdef DEBUG |
1671 bool BackEdgeTable::Verify(Isolate* isolate, | 1692 bool BackEdgeTable::Verify(Isolate* isolate, |
1672 Code* unoptimized, | 1693 Code* unoptimized, |
1673 int loop_nesting_level) { | 1694 int loop_nesting_level) { |
1674 DisallowHeapAllocation no_gc; | 1695 DisallowHeapAllocation no_gc; |
1675 BackEdgeTable back_edges(unoptimized, &no_gc); | 1696 BackEdgeTable back_edges(unoptimized, &no_gc); |
1676 for (uint32_t i = 0; i < back_edges.length(); i++) { | 1697 for (uint32_t i = 0; i < back_edges.length(); i++) { |
1677 uint32_t loop_depth = back_edges.loop_depth(i); | 1698 uint32_t loop_depth = back_edges.loop_depth(i); |
1678 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker); | 1699 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker); |
1679 // Assert that all back edges for shallower loops (and only those) | 1700 // Assert that all back edges for shallower loops (and only those) |
1680 // have already been patched. | 1701 // have already been patched. |
1681 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), | 1702 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), |
1682 GetBackEdgeState(isolate, | 1703 GetBackEdgeState(isolate, |
1683 unoptimized, | 1704 unoptimized, |
1684 back_edges.pc(i)) != INTERRUPT); | 1705 back_edges.pc(i)) != INTERRUPT); |
1685 } | 1706 } |
1686 return true; | 1707 return true; |
1687 } | 1708 } |
1688 #endif // DEBUG | 1709 #endif // DEBUG |
1689 | 1710 |
1690 | 1711 |
1691 #undef __ | 1712 #undef __ |
1692 | 1713 |
1693 | 1714 |
1694 } } // namespace v8::internal | 1715 } } // namespace v8::internal |
OLD | NEW |