Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1709 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1720 } | 1720 } |
| 1721 | 1721 |
| 1722 | 1722 |
| 1723 void Debug::ClearStepNext() { | 1723 void Debug::ClearStepNext() { |
| 1724 thread_local_.last_step_action_ = StepNone; | 1724 thread_local_.last_step_action_ = StepNone; |
| 1725 thread_local_.last_statement_position_ = RelocInfo::kNoPosition; | 1725 thread_local_.last_statement_position_ = RelocInfo::kNoPosition; |
| 1726 thread_local_.last_fp_ = 0; | 1726 thread_local_.last_fp_ = 0; |
| 1727 } | 1727 } |
| 1728 | 1728 |
| 1729 | 1729 |
| 1730 // Compare function to compare the object pointer value of two | |
| 1731 // handlified objects. The handles are passed as pointers to the | |
| 1732 // handles. | |
| 1733 template <typename T> | |
| 1734 static int HandleObjectPointerCompare(const Handle<T>* a, const Handle<T>* b) { | |
|
Kevin Millikin (Chromium)
2011/09/29 10:47:35
Hmm. There are a couple of other Compare function
Søren Thygesen Gjesse
2011/09/30 08:33:22
Good point. I moved it to utils.h together with a
| |
| 1735 return Compare<T*>(*(*a), *(*b)); | |
| 1736 } | |
| 1737 | |
| 1738 | |
| 1739 // Helper function to compile full code for debugging. This code will | |
| 1740 // have debug break slots and deoptimization | |
| 1741 // information. Deoptimization information is required in case that an | |
| 1742 // optimized version of this function is still activated on the | |
| 1743 // stack. It will also make sure that the full code is compiled with | |
| 1744 // the same flags as the previous version - that is flags which can | |
| 1745 // change the code generated. The current method of mapping from | |
| 1746 // already compiled full code without debug break slots to full code | |
| 1747 // with debug break slots depends on the generated code is otherwise | |
| 1748 // exactly the same. | |
| 1749 static bool CompileFullCodeForDebugging(Handle<SharedFunctionInfo> shared, | |
| 1750 Handle<Code> current_code) { | |
| 1751 ASSERT(!current_code->has_debug_break_slots()); | |
| 1752 | |
| 1753 CompilationInfo info(shared); | |
| 1754 info.MarkCompilingForDebugging(current_code); | |
| 1755 ASSERT(!info.shared_info()->is_compiled()); | |
| 1756 ASSERT(!info.isolate()->has_pending_exception()); | |
| 1757 | |
| 1758 // Use compile lazy which will end up compiling the full code in the | |
| 1759 // configuration configured above. | |
| 1760 bool result = Compiler::CompileLazy(&info); | |
| 1761 ASSERT(result != Isolate::Current()->has_pending_exception()); | |
| 1762 info.isolate()->clear_pending_exception(); | |
| 1763 #if DEBUG | |
| 1764 if (result) { | |
| 1765 Handle<Code> new_code(shared->code()); | |
| 1766 ASSERT(new_code->has_debug_break_slots()); | |
| 1767 ASSERT(current_code->is_compiled_optimizable() == | |
| 1768 new_code->is_compiled_optimizable()); | |
| 1769 ASSERT(current_code->instruction_size() <= new_code->instruction_size()); | |
| 1770 } | |
| 1771 #endif | |
| 1772 return result; | |
| 1773 } | |
| 1774 | |
| 1775 | |
| 1730 void Debug::PrepareForBreakPoints() { | 1776 void Debug::PrepareForBreakPoints() { |
| 1731 // If preparing for the first break point make sure to deoptimize all | 1777 // If preparing for the first break point make sure to deoptimize all |
| 1732 // functions as debugging does not work with optimized code. | 1778 // functions as debugging does not work with optimized code. |
| 1733 if (!has_break_points_) { | 1779 if (!has_break_points_) { |
| 1734 Deoptimizer::DeoptimizeAll(); | 1780 Deoptimizer::DeoptimizeAll(); |
| 1735 | 1781 |
| 1736 // We are going to iterate heap to find all functions without | 1782 Handle<Code> lazy_compile( |
|
Kevin Millikin (Chromium)
2011/09/29 10:47:35
Handle<Code> lazy_compile = isolate_->builtins()->
Søren Thygesen Gjesse
2011/09/30 08:33:22
Done, actually
Handle<Code> lazy_compile = Handle
| |
| 1737 // debug break slots. | 1783 isolate_->builtins()->builtin(Builtins::kLazyCompile)); |
| 1738 isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask); | 1784 |
| 1739 | 1785 // Keep the list of activated functions in a handlified list as it |
| 1740 AssertNoAllocation no_allocation; | 1786 // is used both in GC and non-GC code. |
| 1741 Builtins* builtins = isolate_->builtins(); | 1787 List<Handle<JSFunction> > active_functions(100); |
| 1742 Code* lazy_compile = builtins->builtin(Builtins::kLazyCompile); | 1788 |
| 1743 | 1789 { |
| 1744 // Find all non-optimized code functions with activation frames on | 1790 // Ensure no GC in this scope as we are comparing raw pointer |
| 1745 // the stack. | 1791 // values and performing a heap iteration. |
| 1746 List<JSFunction*> active_functions(100); | 1792 AssertNoAllocation no_allocation; |
| 1747 for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { | 1793 |
| 1748 JavaScriptFrame* frame = it.frame(); | 1794 // Find all non-optimized code functions with activation frames on |
| 1749 if (frame->function()->IsJSFunction()) { | 1795 // the stack. |
| 1750 JSFunction* function = JSFunction::cast(frame->function()); | 1796 for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { |
| 1751 if (function->code()->kind() == Code::FUNCTION) | 1797 JavaScriptFrame* frame = it.frame(); |
| 1752 active_functions.Add(function); | 1798 if (frame->function()->IsJSFunction()) { |
| 1799 JSFunction* function = JSFunction::cast(frame->function()); | |
| 1800 if (function->code()->kind() == Code::FUNCTION && | |
| 1801 !function->code()->has_debug_break_slots()) | |
| 1802 active_functions.Add(Handle<JSFunction>(function)); | |
| 1803 } | |
| 1804 } | |
| 1805 // Sort the functions on the object pointer value to prepare for | |
| 1806 // the binary search below. | |
| 1807 active_functions.Sort(HandleObjectPointerCompare<JSFunction>); | |
| 1808 | |
| 1809 // Scan the heap for all non-optimized functions which has no | |
| 1810 // debug break slots. | |
| 1811 HeapIterator iterator; | |
| 1812 HeapObject* obj = NULL; | |
| 1813 while (((obj = iterator.next()) != NULL)) { | |
|
Kevin Millikin (Chromium)
2011/09/29 10:47:35
Maybe it doesn't matter, but should you avoid scan
Søren Thygesen Gjesse
2011/09/30 08:33:22
Currently there is no space filtering in the heap
| |
| 1814 if (obj->IsJSFunction()) { | |
| 1815 JSFunction* function = JSFunction::cast(obj); | |
| 1816 if (function->shared()->allows_lazy_compilation() && | |
| 1817 function->shared()->script()->IsScript() && | |
| 1818 function->code()->kind() == Code::FUNCTION && | |
| 1819 !function->code()->has_debug_break_slots()) { | |
| 1820 bool has_activation = | |
| 1821 SortedListBSearch<Handle<JSFunction> >( | |
| 1822 active_functions, | |
| 1823 Handle<JSFunction>(function), | |
| 1824 HandleObjectPointerCompare<JSFunction>) != -1; | |
| 1825 if (!has_activation) { | |
| 1826 function->set_code(*lazy_compile); | |
| 1827 function->shared()->set_code(*lazy_compile); | |
| 1828 } | |
| 1829 } | |
| 1830 } | |
| 1753 } | 1831 } |
| 1754 } | 1832 } |
| 1755 active_functions.Sort(); | 1833 |
| 1756 | 1834 // Now the non-GC scope is left, and the sorting of the functions |
| 1757 // Scan the heap for all non-optimized functions which has no | 1835 // in active_function is not ensured any more. The code below does |
| 1758 // debug break slots. | 1836 // not rely on it. |
| 1759 HeapIterator iterator; | 1837 |
| 1760 HeapObject* obj = NULL; | 1838 // Now recompile all functions with activation frames and and |
| 1761 while (((obj = iterator.next()) != NULL)) { | 1839 // patch the return address to run in the new compiled code. |
| 1762 if (obj->IsJSFunction()) { | 1840 for (int i = 0; i < active_functions.length(); i++) { |
| 1763 JSFunction* function = JSFunction::cast(obj); | 1841 Handle<JSFunction> function = active_functions[i]; |
| 1764 if (function->shared()->allows_lazy_compilation() && | 1842 Handle<SharedFunctionInfo> shared(function->shared()); |
| 1765 function->shared()->script()->IsScript() && | 1843 // If recompilation is not possible just skip it. |
| 1766 function->code()->kind() == Code::FUNCTION && | 1844 if (shared->is_toplevel() || |
| 1767 !function->code()->has_debug_break_slots()) { | 1845 !shared->allows_lazy_compilation() || |
| 1768 bool has_activation = | 1846 shared->code()->kind() == Code::BUILTIN) { |
| 1769 SortedListBSearch<JSFunction*>(active_functions, function) != -1; | 1847 continue; |
| 1770 if (!has_activation) { | 1848 } |
| 1771 function->set_code(lazy_compile); | 1849 |
| 1772 function->shared()->set_code(lazy_compile); | 1850 // Make sure that the shared full code is compiled with debug |
| 1851 // break slots. | |
| 1852 Handle<Code> current_code(function->code()); | |
| 1853 if (shared->code()->has_debug_break_slots()) { | |
| 1854 // if the code is already recompiled to have break slots skip | |
| 1855 // recompilation. | |
| 1856 ASSERT(!function->code()->has_debug_break_slots()); | |
| 1857 } else { | |
| 1858 // Try to compile the full code with debug break slots. If it | |
| 1859 // fails just keep the current code. | |
| 1860 ASSERT(shared->code() == *current_code); | |
| 1861 ZoneScope zone_scope(isolate_, DELETE_ON_EXIT); | |
| 1862 shared->set_code(*lazy_compile); | |
| 1863 bool prev_force_debugger_active = | |
| 1864 isolate_->debugger()->force_debugger_active(); | |
| 1865 isolate_->debugger()->set_force_debugger_active(true); | |
| 1866 CompileFullCodeForDebugging(shared, current_code); | |
| 1867 isolate_->debugger()->set_force_debugger_active( | |
| 1868 prev_force_debugger_active); | |
| 1869 if (!shared->is_compiled()) { | |
| 1870 shared->set_code(*current_code); | |
| 1871 continue; | |
| 1872 } | |
| 1873 } | |
| 1874 Handle<Code> new_code(shared->code()); | |
| 1875 | |
| 1876 // Find the function and patch return address. | |
| 1877 for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) { | |
| 1878 JavaScriptFrame* frame = it.frame(); | |
| 1879 // If the current frame is for this function in its | |
| 1880 // non-optimized form rewrite the return address to continue | |
| 1881 // in the newly compiled full code with debug break slots. | |
| 1882 if (frame->function()->IsJSFunction() && | |
| 1883 frame->function() == *function && | |
| 1884 frame->LookupCode()->kind() == Code::FUNCTION) { | |
| 1885 int delta = frame->pc() - current_code->instruction_start(); | |
| 1886 int debug_break_slot_count = 0; | |
| 1887 int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT); | |
| 1888 for (RelocIterator it(*new_code, mask); !it.done(); it.next()) { | |
| 1889 // Check if the pc in the new code with debug break | |
| 1890 // slots is before this slot. | |
| 1891 RelocInfo* info = it.rinfo(); | |
| 1892 int debug_break_slot_bytes = | |
| 1893 debug_break_slot_count * Assembler::kDebugBreakSlotLength; | |
| 1894 int new_delta = | |
| 1895 info->pc() - | |
| 1896 new_code->instruction_start() - | |
| 1897 debug_break_slot_bytes; | |
| 1898 if (new_delta > delta) { | |
| 1899 break; | |
| 1900 } | |
| 1901 | |
| 1902 // Passed a debug break slot in the full code with debug | |
| 1903 // break slots. | |
| 1904 debug_break_slot_count++; | |
| 1773 } | 1905 } |
| 1906 int debug_break_slot_bytes = | |
| 1907 debug_break_slot_count * Assembler::kDebugBreakSlotLength; | |
| 1908 if (FLAG_trace_deopt) { | |
| 1909 PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " | |
| 1910 "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " | |
| 1911 "for debugging, " | |
| 1912 "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n", | |
| 1913 reinterpret_cast<intptr_t>( | |
| 1914 current_code->instruction_start()), | |
| 1915 reinterpret_cast<intptr_t>( | |
| 1916 current_code->instruction_start()) + | |
| 1917 current_code->instruction_size(), | |
| 1918 current_code->instruction_size(), | |
| 1919 reinterpret_cast<intptr_t>(new_code->instruction_start()), | |
| 1920 reinterpret_cast<intptr_t>(new_code->instruction_start()) + | |
| 1921 new_code->instruction_size(), | |
| 1922 new_code->instruction_size(), | |
| 1923 reinterpret_cast<intptr_t>(frame->pc()), | |
| 1924 reinterpret_cast<intptr_t>(new_code->instruction_start()) + | |
| 1925 delta + debug_break_slot_bytes); | |
| 1926 } | |
| 1927 | |
| 1928 // Patch the return address to return into the code with | |
| 1929 // debug break slots. | |
| 1930 frame->set_pc( | |
| 1931 new_code->instruction_start() + delta + debug_break_slot_bytes); | |
| 1774 } | 1932 } |
| 1775 } | 1933 } |
| 1776 } | 1934 } |
| 1777 } | 1935 } |
| 1778 } | 1936 } |
| 1779 | 1937 |
| 1780 | 1938 |
| 1781 // Ensures the debug information is present for shared. | 1939 // Ensures the debug information is present for shared. |
| 1782 bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared) { | 1940 bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared) { |
| 1783 // Return if we already have the debug info for shared. | 1941 // Return if we already have the debug info for shared. |
| (...skipping 1050 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2834 // Set the debug command break flag to have the command processed. | 2992 // Set the debug command break flag to have the command processed. |
| 2835 if (!isolate_->debug()->InDebugger()) { | 2993 if (!isolate_->debug()->InDebugger()) { |
| 2836 isolate_->stack_guard()->DebugCommand(); | 2994 isolate_->stack_guard()->DebugCommand(); |
| 2837 } | 2995 } |
| 2838 } | 2996 } |
| 2839 | 2997 |
| 2840 | 2998 |
| 2841 bool Debugger::IsDebuggerActive() { | 2999 bool Debugger::IsDebuggerActive() { |
| 2842 ScopedLock with(debugger_access_); | 3000 ScopedLock with(debugger_access_); |
| 2843 | 3001 |
| 2844 return message_handler_ != NULL || !event_listener_.is_null(); | 3002 return message_handler_ != NULL || |
| 3003 !event_listener_.is_null() || | |
| 3004 force_debugger_active_; | |
| 2845 } | 3005 } |
| 2846 | 3006 |
| 2847 | 3007 |
| 2848 Handle<Object> Debugger::Call(Handle<JSFunction> fun, | 3008 Handle<Object> Debugger::Call(Handle<JSFunction> fun, |
| 2849 Handle<Object> data, | 3009 Handle<Object> data, |
| 2850 bool* pending_exception) { | 3010 bool* pending_exception) { |
| 2851 // When calling functions in the debugger prevent it from beeing unloaded. | 3011 // When calling functions in the debugger prevent it from beeing unloaded. |
| 2852 Debugger::never_unload_debugger_ = true; | 3012 Debugger::never_unload_debugger_ = true; |
| 2853 | 3013 |
| 2854 // Enter the debugger. | 3014 // Enter the debugger. |
| (...skipping 377 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3232 { | 3392 { |
| 3233 Locker locker; | 3393 Locker locker; |
| 3234 Isolate::Current()->debugger()->CallMessageDispatchHandler(); | 3394 Isolate::Current()->debugger()->CallMessageDispatchHandler(); |
| 3235 } | 3395 } |
| 3236 } | 3396 } |
| 3237 } | 3397 } |
| 3238 | 3398 |
| 3239 #endif // ENABLE_DEBUGGER_SUPPORT | 3399 #endif // ENABLE_DEBUGGER_SUPPORT |
| 3240 | 3400 |
| 3241 } } // namespace v8::internal | 3401 } } // namespace v8::internal |
| OLD | NEW |