OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1597 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1608 | 1608 |
1609 if (expr->IsLiteralCompareNull(&sub_expr)) { | 1609 if (expr->IsLiteralCompareNull(&sub_expr)) { |
1610 EmitLiteralCompareNil(expr, sub_expr, kNullValue); | 1610 EmitLiteralCompareNil(expr, sub_expr, kNullValue); |
1611 return true; | 1611 return true; |
1612 } | 1612 } |
1613 | 1613 |
1614 return false; | 1614 return false; |
1615 } | 1615 } |
1616 | 1616 |
1617 | 1617 |
| 1618 void BackEdgeTable::Patch(Isolate* isolate, |
| 1619 Code* unoptimized) { |
| 1620 DisallowHeapAllocation no_gc; |
| 1621 Code* replacement_code = |
| 1622 isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
| 1623 |
| 1624 // Iterate over the back edge table and patch every interrupt |
| 1625 // call to an unconditional call to the replacement code. |
| 1626 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); |
| 1627 |
| 1628 BackEdgeTable back_edges(unoptimized, &no_gc); |
| 1629 for (uint32_t i = 0; i < back_edges.length(); i++) { |
| 1630 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) { |
| 1631 ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate, |
| 1632 unoptimized, |
| 1633 back_edges.pc(i))); |
| 1634 PatchAt(unoptimized, back_edges.pc(i), replacement_code); |
| 1635 } |
| 1636 } |
| 1637 |
| 1638 unoptimized->set_back_edges_patched_for_osr(true); |
| 1639 ASSERT(Verify(isolate, unoptimized, loop_nesting_level)); |
| 1640 } |
| 1641 |
| 1642 |
| 1643 void BackEdgeTable::Revert(Isolate* isolate, |
| 1644 Code* unoptimized) { |
| 1645 DisallowHeapAllocation no_gc; |
| 1646 Code* interrupt_code = |
| 1647 isolate->builtins()->builtin(Builtins::kInterruptCheck); |
| 1648 |
| 1649 // Iterate over the back edge table and revert the patched interrupt calls. |
| 1650 ASSERT(unoptimized->back_edges_patched_for_osr()); |
| 1651 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); |
| 1652 |
| 1653 BackEdgeTable back_edges(unoptimized, &no_gc); |
| 1654 for (uint32_t i = 0; i < back_edges.length(); i++) { |
| 1655 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) { |
| 1656 ASSERT_EQ(ON_STACK_REPLACEMENT, GetBackEdgeState(isolate, |
| 1657 unoptimized, |
| 1658 back_edges.pc(i))); |
| 1659 RevertAt(unoptimized, back_edges.pc(i), interrupt_code); |
| 1660 } |
| 1661 } |
| 1662 |
| 1663 unoptimized->set_back_edges_patched_for_osr(false); |
| 1664 unoptimized->set_allow_osr_at_loop_nesting_level(0); |
| 1665 // Assert that none of the back edges are patched anymore. |
| 1666 ASSERT(Verify(isolate, unoptimized, -1)); |
| 1667 } |
| 1668 |
| 1669 |
| 1670 #ifdef DEBUG |
| 1671 bool BackEdgeTable::Verify(Isolate* isolate, |
| 1672 Code* unoptimized, |
| 1673 int loop_nesting_level) { |
| 1674 DisallowHeapAllocation no_gc; |
| 1675 BackEdgeTable back_edges(unoptimized, &no_gc); |
| 1676 for (uint32_t i = 0; i < back_edges.length(); i++) { |
| 1677 uint32_t loop_depth = back_edges.loop_depth(i); |
| 1678 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker); |
| 1679 // Assert that all back edges for shallower loops (and only those) |
| 1680 // have already been patched. |
| 1681 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), |
| 1682 GetBackEdgeState(isolate, |
| 1683 unoptimized, |
| 1684 back_edges.pc(i)) != INTERRUPT); |
| 1685 } |
| 1686 return true; |
| 1687 } |
| 1688 #endif // DEBUG |
| 1689 |
| 1690 |
1618 #undef __ | 1691 #undef __ |
1619 | 1692 |
1620 | 1693 |
1621 } } // namespace v8::internal | 1694 } } // namespace v8::internal |
OLD | NEW |