Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(274)

Side by Side Diff: src/debug.cc

Issue 260423002: Relocate suspended generator activations when enabling debug mode (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Added test case, fixed bugs Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/debug.h ('k') | src/objects.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "v8.h" 5 #include "v8.h"
6 6
7 #include "api.h" 7 #include "api.h"
8 #include "arguments.h" 8 #include "arguments.h"
9 #include "bootstrapper.h" 9 #include "bootstrapper.h"
10 #include "code-stubs.h" 10 #include "code-stubs.h"
(...skipping 1829 matching lines...) Expand 10 before | Expand all | Expand 10 after
1840 } else if (frame->function()->IsJSFunction()) { 1840 } else if (frame->function()->IsJSFunction()) {
1841 JSFunction* function = frame->function(); 1841 JSFunction* function = frame->function();
1842 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); 1842 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
1843 active_functions->Add(Handle<JSFunction>(function)); 1843 active_functions->Add(Handle<JSFunction>(function));
1844 function->shared()->code()->set_gc_metadata(active_code_marker); 1844 function->shared()->code()->set_gc_metadata(active_code_marker);
1845 } 1845 }
1846 } 1846 }
1847 } 1847 }
1848 1848
1849 1849
1850 // Figure out how many bytes of "pc_offset" correspond to actual code by
1851 // subtracting off the bytes that correspond to constant/veneer pools. See
1852 // Assembler::CheckConstPool() and Assembler::CheckVeneerPool(). Note that this
1853 // is only useful for architectures using constant pools or veneer pools.
1854 static int ComputeCodeOffsetFromPcOffset(Code *code, int pc_offset) {
1855 ASSERT_EQ(code->kind(), Code::FUNCTION);
1856 ASSERT(!code->has_debug_break_slots());
1857 ASSERT_LE(0, pc_offset);
1858 ASSERT_LT(pc_offset, code->instruction_end() - code->instruction_start());
1859
1860 int mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1861 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1862 byte *pc = code->instruction_start() + pc_offset;
1863 int code_offset = pc_offset;
1864 for (RelocIterator it(code, mask); !it.done(); it.next()) {
1865 RelocInfo* info = it.rinfo();
1866 if (info->pc() >= pc) break;
1867 ASSERT(RelocInfo::IsConstPool(info->rmode()));
1868 code_offset -= static_cast<int>(info->data());
1869 ASSERT_LE(0, code_offset);
1870 }
1871
1872 return code_offset;
1873 }
1874
1875
1876 // The inverse of ComputeCodeOffsetFromPcOffset.
1877 static int ComputePcOffsetFromCodeOffset(Code *code, int code_offset) {
1878 ASSERT_EQ(code->kind(), Code::FUNCTION);
1879
1880 int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
1881 RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1882 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1883 int reloc = 0;
1884 for (RelocIterator it(code, mask); !it.done(); it.next()) {
1885 RelocInfo* info = it.rinfo();
1886 if (info->pc() - code->instruction_start() - reloc >= code_offset) break;
1887 if (RelocInfo::IsDebugBreakSlot(info->rmode())) {
1888 reloc += Assembler::kDebugBreakSlotLength;
1889 } else {
1890 ASSERT(RelocInfo::IsConstPool(info->rmode()));
1891 reloc += static_cast<int>(info->data());
1892 }
1893 }
1894
1895 int pc_offset = code_offset + reloc;
1896
1897 ASSERT_LT(code->instruction_start() + pc_offset, code->instruction_end());
1898
1899 return pc_offset;
1900 }
1901
1902
1850 static void RedirectActivationsToRecompiledCodeOnThread( 1903 static void RedirectActivationsToRecompiledCodeOnThread(
1851 Isolate* isolate, 1904 Isolate* isolate,
1852 ThreadLocalTop* top) { 1905 ThreadLocalTop* top) {
1853 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) { 1906 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
1854 JavaScriptFrame* frame = it.frame(); 1907 JavaScriptFrame* frame = it.frame();
1855 1908
1856 if (frame->is_optimized() || !frame->function()->IsJSFunction()) continue; 1909 if (frame->is_optimized() || !frame->function()->IsJSFunction()) continue;
1857 1910
1858 JSFunction* function = frame->function(); 1911 JSFunction* function = frame->function();
1859 1912
1860 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); 1913 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
1861 1914
1862 Handle<Code> frame_code(frame->LookupCode()); 1915 Handle<Code> frame_code(frame->LookupCode());
1863 if (frame_code->has_debug_break_slots()) continue; 1916 if (frame_code->has_debug_break_slots()) continue;
1864 1917
1865 Handle<Code> new_code(function->shared()->code()); 1918 Handle<Code> new_code(function->shared()->code());
1866 if (new_code->kind() != Code::FUNCTION || 1919 if (new_code->kind() != Code::FUNCTION ||
1867 !new_code->has_debug_break_slots()) { 1920 !new_code->has_debug_break_slots()) {
1868 continue; 1921 continue;
1869 } 1922 }
1870 1923
1871 // Iterate over the RelocInfo in the original code to compute the sum of the 1924 int old_pc_offset = frame->pc() - frame_code->instruction_start();
1872 // constant pools and veneer pools sizes. (See Assembler::CheckConstPool() 1925 int code_offset = ComputeCodeOffsetFromPcOffset(*frame_code, old_pc_offset);
1873 // and Assembler::CheckVeneerPool()) 1926 int new_pc_offset = ComputePcOffsetFromCodeOffset(*new_code, code_offset);
1874 // Note that this is only useful for architectures using constant pools or
1875 // veneer pools.
1876 int pool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1877 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1878 int frame_pool_size = 0;
1879 for (RelocIterator it(*frame_code, pool_mask); !it.done(); it.next()) {
1880 RelocInfo* info = it.rinfo();
1881 if (info->pc() >= frame->pc()) break;
1882 frame_pool_size += static_cast<int>(info->data());
1883 }
1884 intptr_t frame_offset =
1885 frame->pc() - frame_code->instruction_start() - frame_pool_size;
1886
1887 // Iterate over the RelocInfo for new code to find the number of bytes
1888 // generated for debug slots and constant pools.
1889 int debug_break_slot_bytes = 0;
1890 int new_code_pool_size = 0;
1891 int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
1892 RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1893 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1894 for (RelocIterator it(*new_code, mask); !it.done(); it.next()) {
1895 // Check if the pc in the new code with debug break
1896 // slots is before this slot.
1897 RelocInfo* info = it.rinfo();
1898 intptr_t new_offset = info->pc() - new_code->instruction_start() -
1899 new_code_pool_size - debug_break_slot_bytes;
1900 if (new_offset >= frame_offset) {
1901 break;
1902 }
1903
1904 if (RelocInfo::IsDebugBreakSlot(info->rmode())) {
1905 debug_break_slot_bytes += Assembler::kDebugBreakSlotLength;
1906 } else {
1907 ASSERT(RelocInfo::IsConstPool(info->rmode()));
1908 // The size of the pools is encoded in the data.
1909 new_code_pool_size += static_cast<int>(info->data());
1910 }
1911 }
1912 1927
1913 // Compute the equivalent pc in the new code. 1928 // Compute the equivalent pc in the new code.
1914 byte* new_pc = new_code->instruction_start() + frame_offset + 1929 byte* new_pc = new_code->instruction_start() + new_pc_offset;
1915 debug_break_slot_bytes + new_code_pool_size;
1916 1930
1917 if (FLAG_trace_deopt) { 1931 if (FLAG_trace_deopt) {
1918 PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " 1932 PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
1919 "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " 1933 "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
1920 "for debugging, " 1934 "for debugging, "
1921 "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n", 1935 "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n",
1922 reinterpret_cast<intptr_t>( 1936 reinterpret_cast<intptr_t>(
1923 frame_code->instruction_start()), 1937 frame_code->instruction_start()),
1924 reinterpret_cast<intptr_t>( 1938 reinterpret_cast<intptr_t>(
1925 frame_code->instruction_start()) + 1939 frame_code->instruction_start()) +
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1961 1975
1962 1976
1963 class ActiveFunctionsRedirector : public ThreadVisitor { 1977 class ActiveFunctionsRedirector : public ThreadVisitor {
1964 public: 1978 public:
1965 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { 1979 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
1966 RedirectActivationsToRecompiledCodeOnThread(isolate, top); 1980 RedirectActivationsToRecompiledCodeOnThread(isolate, top);
1967 } 1981 }
1968 }; 1982 };
1969 1983
1970 1984
1985 void Debug::CollectSuspendedGenerators(List<Handle<JSGeneratorObject> > *acc) {
1986 Heap* heap = isolate_->heap();
1987
1988 ASSERT(heap->IsHeapIterable());
1989 HeapIterator iterator(heap);
1990 HeapObject* obj = NULL;
1991 while (((obj = iterator.next()) != NULL)) {
1992 if (!obj->IsJSGeneratorObject()) continue;
1993
1994 JSGeneratorObject* gen = JSGeneratorObject::cast(obj);
1995 if (!gen->is_suspended()) continue;
1996
1997 JSFunction* fun = gen->function();
1998 ASSERT_EQ(fun->code()->kind(), Code::FUNCTION);
1999 if (fun->code()->has_debug_break_slots()) continue;
2000
2001 int pc_offset = gen->continuation();
2002 ASSERT_LT(0, pc_offset);
2003
2004 int code_offset = ComputeCodeOffsetFromPcOffset(fun->code(), pc_offset);
2005
2006 // This will be fixed after we recompile the functions.
2007 gen->set_continuation(code_offset);
2008
2009 acc->Add(Handle<JSGeneratorObject>(gen, isolate_));
2010 }
2011 }
2012
2013
2014 class ForceDebuggerActive {
2015 public:
2016 explicit ForceDebuggerActive(Isolate *isolate) {
2017 isolate_ = isolate;
2018 old_state_ = isolate->debugger()->force_debugger_active();
2019 isolate_->debugger()->set_force_debugger_active(true);
2020 }
2021
2022 ~ForceDebuggerActive() {
2023 isolate_->debugger()->set_force_debugger_active(old_state_);
2024 }
2025
2026 private:
2027 Isolate *isolate_;
2028 bool old_state_;
2029
2030 DISALLOW_COPY_AND_ASSIGN(ForceDebuggerActive);
2031 };
2032
2033
2034 void Debug::MaybeRecompileFunctionForDebugging(Handle<JSFunction> function) {
2035 ASSERT_EQ(Code::FUNCTION, function->code()->kind());
2036 ASSERT_EQ(function->code(), function->shared()->code());
2037
2038 if (function->code()->has_debug_break_slots()) return;
2039
2040 ForceDebuggerActive force_debugger_active(isolate_);
2041 MaybeHandle<Code> code = Compiler::GetCodeForDebugging(function);
2042 // Recompilation can fail. In that case leave the code as it was.
2043 if (!code.is_null())
2044 function->ReplaceCode(*code.ToHandleChecked());
2045 ASSERT_EQ(function->code(), function->shared()->code());
2046 }
2047
2048
2049 void Debug::RecompileAndRelocateSuspendedGenerators(
2050 const List<Handle<JSGeneratorObject> > &generators) {
2051 for (int i = 0; i < generators.length(); i++) {
2052 Handle<JSFunction> fun(generators[i]->function());
2053
2054 MaybeRecompileFunctionForDebugging(fun);
2055
2056 int code_offset = generators[i]->continuation();
2057 int pc_offset = ComputePcOffsetFromCodeOffset(fun->code(), code_offset);
2058 generators[i]->set_continuation(pc_offset);
2059 }
2060 }
2061
2062
1971 void Debug::PrepareForBreakPoints() { 2063 void Debug::PrepareForBreakPoints() {
1972 // If preparing for the first break point make sure to deoptimize all 2064 // If preparing for the first break point make sure to deoptimize all
1973 // functions as debugging does not work with optimized code. 2065 // functions as debugging does not work with optimized code.
1974 if (!has_break_points_) { 2066 if (!has_break_points_) {
1975 if (isolate_->concurrent_recompilation_enabled()) { 2067 if (isolate_->concurrent_recompilation_enabled()) {
1976 isolate_->optimizing_compiler_thread()->Flush(); 2068 isolate_->optimizing_compiler_thread()->Flush();
1977 } 2069 }
1978 2070
1979 Deoptimizer::DeoptimizeAll(isolate_); 2071 Deoptimizer::DeoptimizeAll(isolate_);
1980 2072
1981 Handle<Code> lazy_compile = isolate_->builtins()->CompileUnoptimized(); 2073 Handle<Code> lazy_compile = isolate_->builtins()->CompileUnoptimized();
1982 2074
1983 // There will be at least one break point when we are done. 2075 // There will be at least one break point when we are done.
1984 has_break_points_ = true; 2076 has_break_points_ = true;
1985 2077
1986 // Keep the list of activated functions in a handlified list as it 2078 // Keep the list of activated functions in a handlified list as it
1987 // is used both in GC and non-GC code. 2079 // is used both in GC and non-GC code.
1988 List<Handle<JSFunction> > active_functions(100); 2080 List<Handle<JSFunction> > active_functions(100);
1989 2081
2082 // A list of all suspended generators.
2083 List<Handle<JSGeneratorObject> > suspended_generators;
2084
2085 // A list of all generator functions. We need to recompile all functions,
2086 // but we don't know until after visiting the whole heap which generator
2087 // functions have suspended activations and which do not. As in the case of
2088 // functions with activations on the stack, we need to be careful with
2089 // generator functions with suspended activations because although they
2090 // should be recompiled, recompilation can fail, and we need to avoid
2091 // leaving the heap in an inconsistent state.
2092 //
2093 // We could perhaps avoid this list and instead re-use the GC metadata
2094 // links.
2095 List<Handle<JSFunction> > generator_functions;
2096
1990 { 2097 {
1991 // We are going to iterate heap to find all functions without 2098 // We are going to iterate heap to find all functions without
1992 // debug break slots. 2099 // debug break slots.
1993 Heap* heap = isolate_->heap(); 2100 Heap* heap = isolate_->heap();
1994 heap->CollectAllGarbage(Heap::kMakeHeapIterableMask, 2101 heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1995 "preparing for breakpoints"); 2102 "preparing for breakpoints");
1996 2103
2104 CollectSuspendedGenerators(&suspended_generators);
Yang 2014/05/05 09:40:15 Do we really need to iterate the heap twice? Can t
wingo 2014/05/05 11:08:43 Hum, I think you are right. The double iteration
2105
2106 // Collecting the generators should not alter iterability of the heap.
2107 ASSERT(heap->IsHeapIterable());
2108
1997 // Ensure no GC in this scope as we are going to use gc_metadata 2109 // Ensure no GC in this scope as we are going to use gc_metadata
1998 // field in the Code object to mark active functions. 2110 // field in the Code object to mark active functions.
1999 DisallowHeapAllocation no_allocation; 2111 DisallowHeapAllocation no_allocation;
2000 2112
2001 Object* active_code_marker = heap->the_hole_value(); 2113 Object* active_code_marker = heap->the_hole_value();
2002 2114
2003 CollectActiveFunctionsFromThread(isolate_, 2115 CollectActiveFunctionsFromThread(isolate_,
2004 isolate_->thread_local_top(), 2116 isolate_->thread_local_top(),
2005 &active_functions, 2117 &active_functions,
2006 active_code_marker); 2118 active_code_marker);
(...skipping 10 matching lines...) Expand all
2017 while (((obj = iterator.next()) != NULL)) { 2129 while (((obj = iterator.next()) != NULL)) {
2018 if (obj->IsJSFunction()) { 2130 if (obj->IsJSFunction()) {
2019 JSFunction* function = JSFunction::cast(obj); 2131 JSFunction* function = JSFunction::cast(obj);
2020 SharedFunctionInfo* shared = function->shared(); 2132 SharedFunctionInfo* shared = function->shared();
2021 2133
2022 if (!shared->allows_lazy_compilation()) continue; 2134 if (!shared->allows_lazy_compilation()) continue;
2023 if (!shared->script()->IsScript()) continue; 2135 if (!shared->script()->IsScript()) continue;
2024 if (function->IsBuiltin()) continue; 2136 if (function->IsBuiltin()) continue;
2025 if (shared->code()->gc_metadata() == active_code_marker) continue; 2137 if (shared->code()->gc_metadata() == active_code_marker) continue;
2026 2138
2139 if (shared->is_generator()) {
2140 generator_functions.Add(Handle<JSFunction>(function, isolate_));
2141 continue;
2142 }
2143
2027 Code::Kind kind = function->code()->kind(); 2144 Code::Kind kind = function->code()->kind();
2028 if (kind == Code::FUNCTION && 2145 if (kind == Code::FUNCTION &&
2029 !function->code()->has_debug_break_slots()) { 2146 !function->code()->has_debug_break_slots()) {
2030 function->set_code(*lazy_compile); 2147 function->set_code(*lazy_compile);
2031 function->shared()->set_code(*lazy_compile); 2148 function->shared()->set_code(*lazy_compile);
2032 } else if (kind == Code::BUILTIN && 2149 } else if (kind == Code::BUILTIN &&
2033 (function->IsInOptimizationQueue() || 2150 (function->IsInOptimizationQueue() ||
2034 function->IsMarkedForOptimization() || 2151 function->IsMarkedForOptimization() ||
2035 function->IsMarkedForConcurrentOptimization())) { 2152 function->IsMarkedForConcurrentOptimization())) {
2036 // Abort in-flight compilation. 2153 // Abort in-flight compilation.
2037 Code* shared_code = function->shared()->code(); 2154 Code* shared_code = function->shared()->code();
2038 if (shared_code->kind() == Code::FUNCTION && 2155 if (shared_code->kind() == Code::FUNCTION &&
2039 shared_code->has_debug_break_slots()) { 2156 shared_code->has_debug_break_slots()) {
2040 function->set_code(shared_code); 2157 function->set_code(shared_code);
2041 } else { 2158 } else {
2042 function->set_code(*lazy_compile); 2159 function->set_code(*lazy_compile);
2043 function->shared()->set_code(*lazy_compile); 2160 function->shared()->set_code(*lazy_compile);
2044 } 2161 }
2045 } 2162 }
2046 } 2163 }
2047 } 2164 }
2048 2165
2049 // Clear gc_metadata field. 2166 // Clear gc_metadata field.
2050 for (int i = 0; i < active_functions.length(); i++) { 2167 for (int i = 0; i < active_functions.length(); i++) {
2051 Handle<JSFunction> function = active_functions[i]; 2168 Handle<JSFunction> function = active_functions[i];
2052 function->shared()->code()->set_gc_metadata(Smi::FromInt(0)); 2169 function->shared()->code()->set_gc_metadata(Smi::FromInt(0));
2053 } 2170 }
2054 } 2171 }
2055 2172
2173 // Recompile generator functions that have suspended activations, and
2174 // relocate those activations.
2175 RecompileAndRelocateSuspendedGenerators(suspended_generators);
2176
2177 // Mark generator functions that didn't have suspended activations for lazy
2178 // recompilation. Note that this set does not include any active functions.
2179 for (int i = 0; i < generator_functions.length(); i++) {
2180 Handle<JSFunction> &function = generator_functions[i];
2181 if (function->code()->kind() != Code::FUNCTION) continue;
2182 if (function->code()->has_debug_break_slots()) continue;
2183 function->set_code(*lazy_compile);
2184 function->shared()->set_code(*lazy_compile);
2185 }
2186
2056 // Now recompile all functions with activation frames and and 2187 // Now recompile all functions with activation frames and and
2057 // patch the return address to run in the new compiled code. 2188 // patch the return address to run in the new compiled code. It could be
2189 // that some active functions were recompiled already by the suspended
2190 // generator recompilation pass above; a generator with suspended
2191 // activations could also have active activations. That's fine.
2058 for (int i = 0; i < active_functions.length(); i++) { 2192 for (int i = 0; i < active_functions.length(); i++) {
2059 Handle<JSFunction> function = active_functions[i]; 2193 Handle<JSFunction> function = active_functions[i];
2060 Handle<SharedFunctionInfo> shared(function->shared()); 2194 Handle<SharedFunctionInfo> shared(function->shared());
2061 2195
2062 if (function->code()->kind() == Code::FUNCTION && 2196 // If recompilation is not possible just skip it.
2063 function->code()->has_debug_break_slots()) { 2197 if (shared->is_toplevel()) continue;
2064 // Nothing to do. Function code already had debug break slots. 2198 if (!shared->allows_lazy_compilation()) continue;
2065 continue; 2199 if (shared->code()->kind() == Code::BUILTIN) continue;
2066 }
2067 2200
2068 // If recompilation is not possible just skip it. 2201 MaybeRecompileFunctionForDebugging(function);
2069 if (shared->is_toplevel() ||
2070 !shared->allows_lazy_compilation() ||
2071 shared->code()->kind() == Code::BUILTIN) {
2072 continue;
2073 }
2074
2075 // Make sure that the shared full code is compiled with debug
2076 // break slots.
2077 if (!shared->code()->has_debug_break_slots()) {
2078 // Try to compile the full code with debug break slots. If it
2079 // fails just keep the current code.
2080 bool prev_force_debugger_active =
2081 isolate_->debugger()->force_debugger_active();
2082 isolate_->debugger()->set_force_debugger_active(true);
2083 Handle<Code> code = Compiler::GetCodeForDebugging(
2084 function).ToHandleChecked();
2085 function->ReplaceCode(*code);
2086 isolate_->debugger()->set_force_debugger_active(
2087 prev_force_debugger_active);
2088 }
2089
2090 // Keep function code in sync with shared function info.
2091 function->set_code(shared->code());
2092 } 2202 }
2093 2203
2094 RedirectActivationsToRecompiledCodeOnThread(isolate_, 2204 RedirectActivationsToRecompiledCodeOnThread(isolate_,
2095 isolate_->thread_local_top()); 2205 isolate_->thread_local_top());
2096 2206
2097 ActiveFunctionsRedirector active_functions_redirector; 2207 ActiveFunctionsRedirector active_functions_redirector;
2098 isolate_->thread_manager()->IterateArchivedThreads( 2208 isolate_->thread_manager()->IterateArchivedThreads(
2099 &active_functions_redirector); 2209 &active_functions_redirector);
2100 } 2210 }
2101 } 2211 }
(...skipping 1618 matching lines...) Expand 10 before | Expand all | Expand 10 after
3720 already_signalled_ = false; 3830 already_signalled_ = false;
3721 } 3831 }
3722 { 3832 {
3723 Locker locker(reinterpret_cast<v8::Isolate*>(isolate_)); 3833 Locker locker(reinterpret_cast<v8::Isolate*>(isolate_));
3724 isolate_->debugger()->CallMessageDispatchHandler(); 3834 isolate_->debugger()->CallMessageDispatchHandler();
3725 } 3835 }
3726 } 3836 }
3727 } 3837 }
3728 3838
3729 } } // namespace v8::internal 3839 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/debug.h ('k') | src/objects.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698