OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
305 | 305 |
306 | 306 |
307 void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) { | 307 void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) { |
308 } | 308 } |
309 | 309 |
310 | 310 |
311 #define __ ACCESS_MASM(masm()) | 311 #define __ ACCESS_MASM(masm()) |
312 | 312 |
313 bool FullCodeGenerator::MakeCode(CompilationInfo* info) { | 313 bool FullCodeGenerator::MakeCode(CompilationInfo* info) { |
314 Isolate* isolate = info->isolate(); | 314 Isolate* isolate = info->isolate(); |
| 315 |
| 316 Logger::TimerEventScope timer( |
| 317 isolate, Logger::TimerEventScope::v8_compile_full_code); |
| 318 |
315 Handle<Script> script = info->script(); | 319 Handle<Script> script = info->script(); |
316 if (!script->IsUndefined() && !script->source()->IsUndefined()) { | 320 if (!script->IsUndefined() && !script->source()->IsUndefined()) { |
317 int len = String::cast(script->source())->length(); | 321 int len = String::cast(script->source())->length(); |
318 isolate->counters()->total_full_codegen_source_size()->Increment(len); | 322 isolate->counters()->total_full_codegen_source_size()->Increment(len); |
319 } | 323 } |
320 CodeGenerator::MakeCodePrologue(info, "full"); | 324 CodeGenerator::MakeCodePrologue(info, "full"); |
321 const int kInitialBufferSize = 4 * KB; | 325 const int kInitialBufferSize = 4 * KB; |
322 MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize); | 326 MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize); |
323 #ifdef ENABLE_GDB_JIT_INTERFACE | 327 #ifdef ENABLE_GDB_JIT_INTERFACE |
324 masm.positions_recorder()->StartGDBJITLineInfoRecording(); | 328 masm.positions_recorder()->StartGDBJITLineInfoRecording(); |
(...skipping 1312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1637 | 1641 |
1638 if (expr->IsLiteralCompareNull(&sub_expr)) { | 1642 if (expr->IsLiteralCompareNull(&sub_expr)) { |
1639 EmitLiteralCompareNil(expr, sub_expr, kNullValue); | 1643 EmitLiteralCompareNil(expr, sub_expr, kNullValue); |
1640 return true; | 1644 return true; |
1641 } | 1645 } |
1642 | 1646 |
1643 return false; | 1647 return false; |
1644 } | 1648 } |
1645 | 1649 |
1646 | 1650 |
1647 void BackEdgeTable::Patch(Isolate* isolate, | 1651 void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) { |
1648 Code* unoptimized) { | |
1649 DisallowHeapAllocation no_gc; | 1652 DisallowHeapAllocation no_gc; |
1650 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement); | 1653 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
1651 | 1654 |
1652 // Iterate over the back edge table and patch every interrupt | 1655 // Iterate over the back edge table and patch every interrupt |
1653 // call to an unconditional call to the replacement code. | 1656 // call to an unconditional call to the replacement code. |
1654 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); | 1657 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); |
1655 | 1658 |
1656 BackEdgeTable back_edges(unoptimized, &no_gc); | 1659 BackEdgeTable back_edges(unoptimized, &no_gc); |
1657 for (uint32_t i = 0; i < back_edges.length(); i++) { | 1660 for (uint32_t i = 0; i < back_edges.length(); i++) { |
1658 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) { | 1661 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) { |
1659 ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate, | 1662 ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate, |
1660 unoptimized, | 1663 unoptimized, |
1661 back_edges.pc(i))); | 1664 back_edges.pc(i))); |
1662 PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch); | 1665 PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch); |
1663 } | 1666 } |
1664 } | 1667 } |
1665 | 1668 |
1666 unoptimized->set_back_edges_patched_for_osr(true); | 1669 unoptimized->set_back_edges_patched_for_osr(true); |
1667 ASSERT(Verify(isolate, unoptimized, loop_nesting_level)); | 1670 ASSERT(Verify(isolate, unoptimized, loop_nesting_level)); |
1668 } | 1671 } |
1669 | 1672 |
1670 | 1673 |
1671 void BackEdgeTable::Revert(Isolate* isolate, | 1674 void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) { |
1672 Code* unoptimized) { | |
1673 DisallowHeapAllocation no_gc; | 1675 DisallowHeapAllocation no_gc; |
1674 Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck); | 1676 Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck); |
1675 | 1677 |
1676 // Iterate over the back edge table and revert the patched interrupt calls. | 1678 // Iterate over the back edge table and revert the patched interrupt calls. |
1677 ASSERT(unoptimized->back_edges_patched_for_osr()); | 1679 ASSERT(unoptimized->back_edges_patched_for_osr()); |
1678 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); | 1680 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level(); |
1679 | 1681 |
1680 BackEdgeTable back_edges(unoptimized, &no_gc); | 1682 BackEdgeTable back_edges(unoptimized, &no_gc); |
1681 for (uint32_t i = 0; i < back_edges.length(); i++) { | 1683 for (uint32_t i = 0; i < back_edges.length(); i++) { |
1682 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) { | 1684 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) { |
1683 ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate, | 1685 ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate, |
1684 unoptimized, | 1686 unoptimized, |
1685 back_edges.pc(i))); | 1687 back_edges.pc(i))); |
1686 PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch); | 1688 PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch); |
1687 } | 1689 } |
1688 } | 1690 } |
1689 | 1691 |
1690 unoptimized->set_back_edges_patched_for_osr(false); | 1692 unoptimized->set_back_edges_patched_for_osr(false); |
1691 unoptimized->set_allow_osr_at_loop_nesting_level(0); | 1693 unoptimized->set_allow_osr_at_loop_nesting_level(0); |
1692 // Assert that none of the back edges are patched anymore. | 1694 // Assert that none of the back edges are patched anymore. |
1693 ASSERT(Verify(isolate, unoptimized, -1)); | 1695 ASSERT(Verify(isolate, unoptimized, -1)); |
1694 } | 1696 } |
1695 | 1697 |
1696 | 1698 |
1697 void BackEdgeTable::AddStackCheck(CompilationInfo* info) { | 1699 void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) { |
1698 DisallowHeapAllocation no_gc; | 1700 DisallowHeapAllocation no_gc; |
1699 Isolate* isolate = info->isolate(); | 1701 Isolate* isolate = code->GetIsolate(); |
1700 Code* code = *info->osr_patched_code(); | 1702 Address pc = code->instruction_start() + pc_offset; |
1701 Address pc = code->instruction_start() + info->osr_pc_offset(); | |
1702 ASSERT_EQ(info->osr_ast_id().ToInt(), | |
1703 code->TranslatePcOffsetToAstId(info->osr_pc_offset()).ToInt()); | |
1704 ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate, code, pc)); | |
1705 Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck); | 1703 Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck); |
1706 PatchAt(code, pc, OSR_AFTER_STACK_CHECK, patch); | 1704 PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch); |
1707 } | 1705 } |
1708 | 1706 |
1709 | 1707 |
1710 void BackEdgeTable::RemoveStackCheck(CompilationInfo* info) { | 1708 void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) { |
1711 DisallowHeapAllocation no_gc; | 1709 DisallowHeapAllocation no_gc; |
1712 Isolate* isolate = info->isolate(); | 1710 Isolate* isolate = code->GetIsolate(); |
1713 Code* code = *info->osr_patched_code(); | 1711 Address pc = code->instruction_start() + pc_offset; |
1714 Address pc = code->instruction_start() + info->osr_pc_offset(); | 1712 |
1715 ASSERT_EQ(info->osr_ast_id().ToInt(), | 1713 if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) { |
1716 code->TranslatePcOffsetToAstId(info->osr_pc_offset()).ToInt()); | |
1717 if (GetBackEdgeState(isolate, code, pc) == OSR_AFTER_STACK_CHECK) { | |
1718 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement); | 1714 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
1719 PatchAt(code, pc, ON_STACK_REPLACEMENT, patch); | 1715 PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch); |
1720 } | 1716 } |
| 1717 ASSERT(Verify(isolate, *code, code->allow_osr_at_loop_nesting_level())); |
1721 } | 1718 } |
1722 | 1719 |
1723 | 1720 |
1724 #ifdef DEBUG | 1721 #ifdef DEBUG |
1725 bool BackEdgeTable::Verify(Isolate* isolate, | 1722 bool BackEdgeTable::Verify(Isolate* isolate, |
1726 Code* unoptimized, | 1723 Code* unoptimized, |
1727 int loop_nesting_level) { | 1724 int loop_nesting_level) { |
1728 DisallowHeapAllocation no_gc; | 1725 DisallowHeapAllocation no_gc; |
1729 BackEdgeTable back_edges(unoptimized, &no_gc); | 1726 BackEdgeTable back_edges(unoptimized, &no_gc); |
1730 for (uint32_t i = 0; i < back_edges.length(); i++) { | 1727 for (uint32_t i = 0; i < back_edges.length(); i++) { |
1731 uint32_t loop_depth = back_edges.loop_depth(i); | 1728 uint32_t loop_depth = back_edges.loop_depth(i); |
1732 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker); | 1729 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker); |
1733 // Assert that all back edges for shallower loops (and only those) | 1730 // Assert that all back edges for shallower loops (and only those) |
1734 // have already been patched. | 1731 // have already been patched. |
1735 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), | 1732 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), |
1736 GetBackEdgeState(isolate, | 1733 GetBackEdgeState(isolate, |
1737 unoptimized, | 1734 unoptimized, |
1738 back_edges.pc(i)) != INTERRUPT); | 1735 back_edges.pc(i)) != INTERRUPT); |
1739 } | 1736 } |
1740 return true; | 1737 return true; |
1741 } | 1738 } |
1742 #endif // DEBUG | 1739 #endif // DEBUG |
1743 | 1740 |
1744 | 1741 |
1745 #undef __ | 1742 #undef __ |
1746 | 1743 |
1747 | 1744 |
1748 } } // namespace v8::internal | 1745 } } // namespace v8::internal |
OLD | NEW |