OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "v8.h" | 5 #include "v8.h" |
6 | 6 |
7 #include "api.h" | 7 #include "api.h" |
8 #include "arguments.h" | 8 #include "arguments.h" |
9 #include "bootstrapper.h" | 9 #include "bootstrapper.h" |
10 #include "code-stubs.h" | 10 #include "code-stubs.h" |
(...skipping 1829 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1840 } else if (frame->function()->IsJSFunction()) { | 1840 } else if (frame->function()->IsJSFunction()) { |
1841 JSFunction* function = frame->function(); | 1841 JSFunction* function = frame->function(); |
1842 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); | 1842 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); |
1843 active_functions->Add(Handle<JSFunction>(function)); | 1843 active_functions->Add(Handle<JSFunction>(function)); |
1844 function->shared()->code()->set_gc_metadata(active_code_marker); | 1844 function->shared()->code()->set_gc_metadata(active_code_marker); |
1845 } | 1845 } |
1846 } | 1846 } |
1847 } | 1847 } |
1848 | 1848 |
1849 | 1849 |
| 1850 // Figure out how many bytes of "pc_offset" correspond to actual code by |
| 1851 // subtracting off the bytes that correspond to constant/veneer pools. See |
| 1852 // Assembler::CheckConstPool() and Assembler::CheckVeneerPool(). Note that this |
| 1853 // is only useful for architectures using constant pools or veneer pools. |
| 1854 static int ComputeCodeOffsetFromPcOffset(Code *code, int pc_offset) { |
| 1855 ASSERT_EQ(code->kind(), Code::FUNCTION); |
| 1856 ASSERT(!code->has_debug_break_slots()); |
| 1857 ASSERT_LE(0, pc_offset); |
| 1858 ASSERT_LT(pc_offset, code->instruction_end() - code->instruction_start()); |
| 1859 |
| 1860 int mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) | |
| 1861 RelocInfo::ModeMask(RelocInfo::VENEER_POOL); |
| 1862 byte *pc = code->instruction_start() + pc_offset; |
| 1863 int code_offset = pc_offset; |
| 1864 for (RelocIterator it(code, mask); !it.done(); it.next()) { |
| 1865 RelocInfo* info = it.rinfo(); |
| 1866 if (info->pc() >= pc) break; |
| 1867 ASSERT(RelocInfo::IsConstPool(info->rmode())); |
| 1868 code_offset -= static_cast<int>(info->data()); |
| 1869 ASSERT_LE(0, code_offset); |
| 1870 } |
| 1871 |
| 1872 return code_offset; |
| 1873 } |
| 1874 |
| 1875 |
| 1876 // The inverse of ComputeCodeOffsetFromPcOffset. |
| 1877 static int ComputePcOffsetFromCodeOffset(Code *code, int code_offset) { |
| 1878 ASSERT_EQ(code->kind(), Code::FUNCTION); |
| 1879 |
| 1880 int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | |
| 1881 RelocInfo::ModeMask(RelocInfo::CONST_POOL) | |
| 1882 RelocInfo::ModeMask(RelocInfo::VENEER_POOL); |
| 1883 int reloc = 0; |
| 1884 for (RelocIterator it(code, mask); !it.done(); it.next()) { |
| 1885 RelocInfo* info = it.rinfo(); |
| 1886 if (info->pc() - code->instruction_start() - reloc >= code_offset) break; |
| 1887 if (RelocInfo::IsDebugBreakSlot(info->rmode())) { |
| 1888 reloc += Assembler::kDebugBreakSlotLength; |
| 1889 } else { |
| 1890 ASSERT(RelocInfo::IsConstPool(info->rmode())); |
| 1891 reloc += static_cast<int>(info->data()); |
| 1892 } |
| 1893 } |
| 1894 |
| 1895 int pc_offset = code_offset + reloc; |
| 1896 |
| 1897 ASSERT_LT(code->instruction_start() + pc_offset, code->instruction_end()); |
| 1898 |
| 1899 return pc_offset; |
| 1900 } |
| 1901 |
| 1902 |
1850 static void RedirectActivationsToRecompiledCodeOnThread( | 1903 static void RedirectActivationsToRecompiledCodeOnThread( |
1851 Isolate* isolate, | 1904 Isolate* isolate, |
1852 ThreadLocalTop* top) { | 1905 ThreadLocalTop* top) { |
1853 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) { | 1906 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) { |
1854 JavaScriptFrame* frame = it.frame(); | 1907 JavaScriptFrame* frame = it.frame(); |
1855 | 1908 |
1856 if (frame->is_optimized() || !frame->function()->IsJSFunction()) continue; | 1909 if (frame->is_optimized() || !frame->function()->IsJSFunction()) continue; |
1857 | 1910 |
1858 JSFunction* function = frame->function(); | 1911 JSFunction* function = frame->function(); |
1859 | 1912 |
1860 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); | 1913 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); |
1861 | 1914 |
1862 Handle<Code> frame_code(frame->LookupCode()); | 1915 Handle<Code> frame_code(frame->LookupCode()); |
1863 if (frame_code->has_debug_break_slots()) continue; | 1916 if (frame_code->has_debug_break_slots()) continue; |
1864 | 1917 |
1865 Handle<Code> new_code(function->shared()->code()); | 1918 Handle<Code> new_code(function->shared()->code()); |
1866 if (new_code->kind() != Code::FUNCTION || | 1919 if (new_code->kind() != Code::FUNCTION || |
1867 !new_code->has_debug_break_slots()) { | 1920 !new_code->has_debug_break_slots()) { |
1868 continue; | 1921 continue; |
1869 } | 1922 } |
1870 | 1923 |
1871 // Iterate over the RelocInfo in the original code to compute the sum of the | 1924 int old_pc_offset = frame->pc() - frame_code->instruction_start(); |
1872 // constant pools and veneer pools sizes. (See Assembler::CheckConstPool() | 1925 int code_offset = ComputeCodeOffsetFromPcOffset(*frame_code, old_pc_offset); |
1873 // and Assembler::CheckVeneerPool()) | 1926 int new_pc_offset = ComputePcOffsetFromCodeOffset(*new_code, code_offset); |
1874 // Note that this is only useful for architectures using constant pools or | |
1875 // veneer pools. | |
1876 int pool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) | | |
1877 RelocInfo::ModeMask(RelocInfo::VENEER_POOL); | |
1878 int frame_pool_size = 0; | |
1879 for (RelocIterator it(*frame_code, pool_mask); !it.done(); it.next()) { | |
1880 RelocInfo* info = it.rinfo(); | |
1881 if (info->pc() >= frame->pc()) break; | |
1882 frame_pool_size += static_cast<int>(info->data()); | |
1883 } | |
1884 intptr_t frame_offset = | |
1885 frame->pc() - frame_code->instruction_start() - frame_pool_size; | |
1886 | |
1887 // Iterate over the RelocInfo for new code to find the number of bytes | |
1888 // generated for debug slots and constant pools. | |
1889 int debug_break_slot_bytes = 0; | |
1890 int new_code_pool_size = 0; | |
1891 int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | | |
1892 RelocInfo::ModeMask(RelocInfo::CONST_POOL) | | |
1893 RelocInfo::ModeMask(RelocInfo::VENEER_POOL); | |
1894 for (RelocIterator it(*new_code, mask); !it.done(); it.next()) { | |
1895 // Check if the pc in the new code with debug break | |
1896 // slots is before this slot. | |
1897 RelocInfo* info = it.rinfo(); | |
1898 intptr_t new_offset = info->pc() - new_code->instruction_start() - | |
1899 new_code_pool_size - debug_break_slot_bytes; | |
1900 if (new_offset >= frame_offset) { | |
1901 break; | |
1902 } | |
1903 | |
1904 if (RelocInfo::IsDebugBreakSlot(info->rmode())) { | |
1905 debug_break_slot_bytes += Assembler::kDebugBreakSlotLength; | |
1906 } else { | |
1907 ASSERT(RelocInfo::IsConstPool(info->rmode())); | |
1908 // The size of the pools is encoded in the data. | |
1909 new_code_pool_size += static_cast<int>(info->data()); | |
1910 } | |
1911 } | |
1912 | 1927 |
1913 // Compute the equivalent pc in the new code. | 1928 // Compute the equivalent pc in the new code. |
1914 byte* new_pc = new_code->instruction_start() + frame_offset + | 1929 byte* new_pc = new_code->instruction_start() + new_pc_offset; |
1915 debug_break_slot_bytes + new_code_pool_size; | |
1916 | 1930 |
1917 if (FLAG_trace_deopt) { | 1931 if (FLAG_trace_deopt) { |
1918 PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " | 1932 PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " |
1919 "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " | 1933 "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " |
1920 "for debugging, " | 1934 "for debugging, " |
1921 "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n", | 1935 "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n", |
1922 reinterpret_cast<intptr_t>( | 1936 reinterpret_cast<intptr_t>( |
1923 frame_code->instruction_start()), | 1937 frame_code->instruction_start()), |
1924 reinterpret_cast<intptr_t>( | 1938 reinterpret_cast<intptr_t>( |
1925 frame_code->instruction_start()) + | 1939 frame_code->instruction_start()) + |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1961 | 1975 |
1962 | 1976 |
1963 class ActiveFunctionsRedirector : public ThreadVisitor { | 1977 class ActiveFunctionsRedirector : public ThreadVisitor { |
1964 public: | 1978 public: |
1965 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { | 1979 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { |
1966 RedirectActivationsToRecompiledCodeOnThread(isolate, top); | 1980 RedirectActivationsToRecompiledCodeOnThread(isolate, top); |
1967 } | 1981 } |
1968 }; | 1982 }; |
1969 | 1983 |
1970 | 1984 |
| 1985 class ForceDebuggerActive { |
| 1986 public: |
| 1987 explicit ForceDebuggerActive(Isolate *isolate) { |
| 1988 isolate_ = isolate; |
| 1989 old_state_ = isolate->debugger()->force_debugger_active(); |
| 1990 isolate_->debugger()->set_force_debugger_active(true); |
| 1991 } |
| 1992 |
| 1993 ~ForceDebuggerActive() { |
| 1994 isolate_->debugger()->set_force_debugger_active(old_state_); |
| 1995 } |
| 1996 |
| 1997 private: |
| 1998 Isolate *isolate_; |
| 1999 bool old_state_; |
| 2000 |
| 2001 DISALLOW_COPY_AND_ASSIGN(ForceDebuggerActive); |
| 2002 }; |
| 2003 |
| 2004 |
| 2005 void Debug::MaybeRecompileFunctionForDebugging(Handle<JSFunction> function) { |
| 2006 ASSERT_EQ(Code::FUNCTION, function->code()->kind()); |
| 2007 ASSERT_EQ(function->code(), function->shared()->code()); |
| 2008 |
| 2009 if (function->code()->has_debug_break_slots()) return; |
| 2010 |
| 2011 ForceDebuggerActive force_debugger_active(isolate_); |
| 2012 MaybeHandle<Code> code = Compiler::GetCodeForDebugging(function); |
| 2013 // Recompilation can fail. In that case leave the code as it was. |
| 2014 if (!code.is_null()) |
| 2015 function->ReplaceCode(*code.ToHandleChecked()); |
| 2016 ASSERT_EQ(function->code(), function->shared()->code()); |
| 2017 } |
| 2018 |
| 2019 |
| 2020 void Debug::RecompileAndRelocateSuspendedGenerators( |
| 2021 const List<Handle<JSGeneratorObject> > &generators) { |
| 2022 for (int i = 0; i < generators.length(); i++) { |
| 2023 Handle<JSFunction> fun(generators[i]->function()); |
| 2024 |
| 2025 MaybeRecompileFunctionForDebugging(fun); |
| 2026 |
| 2027 int code_offset = generators[i]->continuation(); |
| 2028 int pc_offset = ComputePcOffsetFromCodeOffset(fun->code(), code_offset); |
| 2029 generators[i]->set_continuation(pc_offset); |
| 2030 } |
| 2031 } |
| 2032 |
| 2033 |
1971 void Debug::PrepareForBreakPoints() { | 2034 void Debug::PrepareForBreakPoints() { |
1972 // If preparing for the first break point make sure to deoptimize all | 2035 // If preparing for the first break point make sure to deoptimize all |
1973 // functions as debugging does not work with optimized code. | 2036 // functions as debugging does not work with optimized code. |
1974 if (!has_break_points_) { | 2037 if (!has_break_points_) { |
1975 if (isolate_->concurrent_recompilation_enabled()) { | 2038 if (isolate_->concurrent_recompilation_enabled()) { |
1976 isolate_->optimizing_compiler_thread()->Flush(); | 2039 isolate_->optimizing_compiler_thread()->Flush(); |
1977 } | 2040 } |
1978 | 2041 |
1979 Deoptimizer::DeoptimizeAll(isolate_); | 2042 Deoptimizer::DeoptimizeAll(isolate_); |
1980 | 2043 |
1981 Handle<Code> lazy_compile = isolate_->builtins()->CompileUnoptimized(); | 2044 Handle<Code> lazy_compile = isolate_->builtins()->CompileUnoptimized(); |
1982 | 2045 |
1983 // There will be at least one break point when we are done. | 2046 // There will be at least one break point when we are done. |
1984 has_break_points_ = true; | 2047 has_break_points_ = true; |
1985 | 2048 |
1986 // Keep the list of activated functions in a handlified list as it | 2049 // Keep the list of activated functions in a handlified list as it |
1987 // is used both in GC and non-GC code. | 2050 // is used both in GC and non-GC code. |
1988 List<Handle<JSFunction> > active_functions(100); | 2051 List<Handle<JSFunction> > active_functions(100); |
1989 | 2052 |
| 2053 // A list of all suspended generators. |
| 2054 List<Handle<JSGeneratorObject> > suspended_generators; |
| 2055 |
| 2056 // A list of all generator functions. We need to recompile all functions, |
| 2057 // but we don't know until after visiting the whole heap which generator |
| 2058 // functions have suspended activations and which do not. As in the case of |
| 2059 // functions with activations on the stack, we need to be careful with |
| 2060 // generator functions with suspended activations because although they |
| 2061 // should be recompiled, recompilation can fail, and we need to avoid |
| 2062 // leaving the heap in an inconsistent state. |
| 2063 // |
| 2064 // We could perhaps avoid this list and instead re-use the GC metadata |
| 2065 // links. |
| 2066 List<Handle<JSFunction> > generator_functions; |
| 2067 |
1990 { | 2068 { |
1991 // We are going to iterate heap to find all functions without | 2069 // We are going to iterate heap to find all functions without |
1992 // debug break slots. | 2070 // debug break slots. |
1993 Heap* heap = isolate_->heap(); | 2071 Heap* heap = isolate_->heap(); |
1994 heap->CollectAllGarbage(Heap::kMakeHeapIterableMask, | 2072 heap->CollectAllGarbage(Heap::kMakeHeapIterableMask, |
1995 "preparing for breakpoints"); | 2073 "preparing for breakpoints"); |
1996 | 2074 |
| 2075 // Collecting the generators should not alter iterability of the heap. |
| 2076 ASSERT(heap->IsHeapIterable()); |
| 2077 |
1997 // Ensure no GC in this scope as we are going to use gc_metadata | 2078 // Ensure no GC in this scope as we are going to use gc_metadata |
1998 // field in the Code object to mark active functions. | 2079 // field in the Code object to mark active functions. |
1999 DisallowHeapAllocation no_allocation; | 2080 DisallowHeapAllocation no_allocation; |
2000 | 2081 |
2001 Object* active_code_marker = heap->the_hole_value(); | 2082 Object* active_code_marker = heap->the_hole_value(); |
2002 | 2083 |
2003 CollectActiveFunctionsFromThread(isolate_, | 2084 CollectActiveFunctionsFromThread(isolate_, |
2004 isolate_->thread_local_top(), | 2085 isolate_->thread_local_top(), |
2005 &active_functions, | 2086 &active_functions, |
2006 active_code_marker); | 2087 active_code_marker); |
(...skipping 10 matching lines...) Expand all Loading... |
2017 while (((obj = iterator.next()) != NULL)) { | 2098 while (((obj = iterator.next()) != NULL)) { |
2018 if (obj->IsJSFunction()) { | 2099 if (obj->IsJSFunction()) { |
2019 JSFunction* function = JSFunction::cast(obj); | 2100 JSFunction* function = JSFunction::cast(obj); |
2020 SharedFunctionInfo* shared = function->shared(); | 2101 SharedFunctionInfo* shared = function->shared(); |
2021 | 2102 |
2022 if (!shared->allows_lazy_compilation()) continue; | 2103 if (!shared->allows_lazy_compilation()) continue; |
2023 if (!shared->script()->IsScript()) continue; | 2104 if (!shared->script()->IsScript()) continue; |
2024 if (function->IsBuiltin()) continue; | 2105 if (function->IsBuiltin()) continue; |
2025 if (shared->code()->gc_metadata() == active_code_marker) continue; | 2106 if (shared->code()->gc_metadata() == active_code_marker) continue; |
2026 | 2107 |
| 2108 if (shared->is_generator()) { |
| 2109 generator_functions.Add(Handle<JSFunction>(function, isolate_)); |
| 2110 continue; |
| 2111 } |
| 2112 |
2027 Code::Kind kind = function->code()->kind(); | 2113 Code::Kind kind = function->code()->kind(); |
2028 if (kind == Code::FUNCTION && | 2114 if (kind == Code::FUNCTION && |
2029 !function->code()->has_debug_break_slots()) { | 2115 !function->code()->has_debug_break_slots()) { |
2030 function->set_code(*lazy_compile); | 2116 function->set_code(*lazy_compile); |
2031 function->shared()->set_code(*lazy_compile); | 2117 function->shared()->set_code(*lazy_compile); |
2032 } else if (kind == Code::BUILTIN && | 2118 } else if (kind == Code::BUILTIN && |
2033 (function->IsInOptimizationQueue() || | 2119 (function->IsInOptimizationQueue() || |
2034 function->IsMarkedForOptimization() || | 2120 function->IsMarkedForOptimization() || |
2035 function->IsMarkedForConcurrentOptimization())) { | 2121 function->IsMarkedForConcurrentOptimization())) { |
2036 // Abort in-flight compilation. | 2122 // Abort in-flight compilation. |
2037 Code* shared_code = function->shared()->code(); | 2123 Code* shared_code = function->shared()->code(); |
2038 if (shared_code->kind() == Code::FUNCTION && | 2124 if (shared_code->kind() == Code::FUNCTION && |
2039 shared_code->has_debug_break_slots()) { | 2125 shared_code->has_debug_break_slots()) { |
2040 function->set_code(shared_code); | 2126 function->set_code(shared_code); |
2041 } else { | 2127 } else { |
2042 function->set_code(*lazy_compile); | 2128 function->set_code(*lazy_compile); |
2043 function->shared()->set_code(*lazy_compile); | 2129 function->shared()->set_code(*lazy_compile); |
2044 } | 2130 } |
2045 } | 2131 } |
| 2132 } else if (obj->IsJSGeneratorObject()) { |
| 2133 JSGeneratorObject* gen = JSGeneratorObject::cast(obj); |
| 2134 if (!gen->is_suspended()) continue; |
| 2135 |
| 2136 JSFunction* fun = gen->function(); |
| 2137 ASSERT_EQ(fun->code()->kind(), Code::FUNCTION); |
| 2138 if (fun->code()->has_debug_break_slots()) continue; |
| 2139 |
| 2140 int pc_offset = gen->continuation(); |
| 2141 ASSERT_LT(0, pc_offset); |
| 2142 |
| 2143 int code_offset = |
| 2144 ComputeCodeOffsetFromPcOffset(fun->code(), pc_offset); |
| 2145 |
| 2146 // This will be fixed after we recompile the functions. |
| 2147 gen->set_continuation(code_offset); |
| 2148 |
| 2149 suspended_generators.Add(Handle<JSGeneratorObject>(gen, isolate_)); |
2046 } | 2150 } |
2047 } | 2151 } |
2048 | 2152 |
2049 // Clear gc_metadata field. | 2153 // Clear gc_metadata field. |
2050 for (int i = 0; i < active_functions.length(); i++) { | 2154 for (int i = 0; i < active_functions.length(); i++) { |
2051 Handle<JSFunction> function = active_functions[i]; | 2155 Handle<JSFunction> function = active_functions[i]; |
2052 function->shared()->code()->set_gc_metadata(Smi::FromInt(0)); | 2156 function->shared()->code()->set_gc_metadata(Smi::FromInt(0)); |
2053 } | 2157 } |
2054 } | 2158 } |
2055 | 2159 |
| 2160 // Recompile generator functions that have suspended activations, and |
| 2161 // relocate those activations. |
| 2162 RecompileAndRelocateSuspendedGenerators(suspended_generators); |
| 2163 |
| 2164 // Mark generator functions that didn't have suspended activations for lazy |
| 2165 // recompilation. Note that this set does not include any active functions. |
| 2166 for (int i = 0; i < generator_functions.length(); i++) { |
| 2167 Handle<JSFunction> &function = generator_functions[i]; |
| 2168 if (function->code()->kind() != Code::FUNCTION) continue; |
| 2169 if (function->code()->has_debug_break_slots()) continue; |
| 2170 function->set_code(*lazy_compile); |
| 2171 function->shared()->set_code(*lazy_compile); |
| 2172 } |
| 2173 |
2056 // Now recompile all functions with activation frames and and | 2174 // Now recompile all functions with activation frames and and |
2057 // patch the return address to run in the new compiled code. | 2175 // patch the return address to run in the new compiled code. It could be |
| 2176 // that some active functions were recompiled already by the suspended |
| 2177 // generator recompilation pass above; a generator with suspended |
| 2178 // activations could also have active activations. That's fine. |
2058 for (int i = 0; i < active_functions.length(); i++) { | 2179 for (int i = 0; i < active_functions.length(); i++) { |
2059 Handle<JSFunction> function = active_functions[i]; | 2180 Handle<JSFunction> function = active_functions[i]; |
2060 Handle<SharedFunctionInfo> shared(function->shared()); | 2181 Handle<SharedFunctionInfo> shared(function->shared()); |
2061 | 2182 |
2062 if (function->code()->kind() == Code::FUNCTION && | 2183 // If recompilation is not possible just skip it. |
2063 function->code()->has_debug_break_slots()) { | 2184 if (shared->is_toplevel()) continue; |
2064 // Nothing to do. Function code already had debug break slots. | 2185 if (!shared->allows_lazy_compilation()) continue; |
2065 continue; | 2186 if (shared->code()->kind() == Code::BUILTIN) continue; |
2066 } | |
2067 | 2187 |
2068 // If recompilation is not possible just skip it. | 2188 MaybeRecompileFunctionForDebugging(function); |
2069 if (shared->is_toplevel() || | |
2070 !shared->allows_lazy_compilation() || | |
2071 shared->code()->kind() == Code::BUILTIN) { | |
2072 continue; | |
2073 } | |
2074 | |
2075 // Make sure that the shared full code is compiled with debug | |
2076 // break slots. | |
2077 if (!shared->code()->has_debug_break_slots()) { | |
2078 // Try to compile the full code with debug break slots. If it | |
2079 // fails just keep the current code. | |
2080 bool prev_force_debugger_active = | |
2081 isolate_->debugger()->force_debugger_active(); | |
2082 isolate_->debugger()->set_force_debugger_active(true); | |
2083 Handle<Code> code = Compiler::GetCodeForDebugging( | |
2084 function).ToHandleChecked(); | |
2085 function->ReplaceCode(*code); | |
2086 isolate_->debugger()->set_force_debugger_active( | |
2087 prev_force_debugger_active); | |
2088 } | |
2089 | |
2090 // Keep function code in sync with shared function info. | |
2091 function->set_code(shared->code()); | |
2092 } | 2189 } |
2093 | 2190 |
2094 RedirectActivationsToRecompiledCodeOnThread(isolate_, | 2191 RedirectActivationsToRecompiledCodeOnThread(isolate_, |
2095 isolate_->thread_local_top()); | 2192 isolate_->thread_local_top()); |
2096 | 2193 |
2097 ActiveFunctionsRedirector active_functions_redirector; | 2194 ActiveFunctionsRedirector active_functions_redirector; |
2098 isolate_->thread_manager()->IterateArchivedThreads( | 2195 isolate_->thread_manager()->IterateArchivedThreads( |
2099 &active_functions_redirector); | 2196 &active_functions_redirector); |
2100 } | 2197 } |
2101 } | 2198 } |
(...skipping 1618 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3720 already_signalled_ = false; | 3817 already_signalled_ = false; |
3721 } | 3818 } |
3722 { | 3819 { |
3723 Locker locker(reinterpret_cast<v8::Isolate*>(isolate_)); | 3820 Locker locker(reinterpret_cast<v8::Isolate*>(isolate_)); |
3724 isolate_->debugger()->CallMessageDispatchHandler(); | 3821 isolate_->debugger()->CallMessageDispatchHandler(); |
3725 } | 3822 } |
3726 } | 3823 } |
3727 } | 3824 } |
3728 | 3825 |
3729 } } // namespace v8::internal | 3826 } } // namespace v8::internal |
OLD | NEW |