Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(193)

Side by Side Diff: src/debug.cc

Issue 264973014: Relocate suspended generator activations when enabling debug mode (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/debug.h ('k') | src/objects.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "v8.h" 5 #include "v8.h"
6 6
7 #include "api.h" 7 #include "api.h"
8 #include "arguments.h" 8 #include "arguments.h"
9 #include "bootstrapper.h" 9 #include "bootstrapper.h"
10 #include "code-stubs.h" 10 #include "code-stubs.h"
(...skipping 1863 matching lines...) Expand 10 before | Expand all | Expand 10 after
1874 } else if (frame->function()->IsJSFunction()) { 1874 } else if (frame->function()->IsJSFunction()) {
1875 JSFunction* function = frame->function(); 1875 JSFunction* function = frame->function();
1876 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); 1876 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
1877 active_functions->Add(Handle<JSFunction>(function)); 1877 active_functions->Add(Handle<JSFunction>(function));
1878 function->shared()->code()->set_gc_metadata(active_code_marker); 1878 function->shared()->code()->set_gc_metadata(active_code_marker);
1879 } 1879 }
1880 } 1880 }
1881 } 1881 }
1882 1882
1883 1883
1884 // Figure out how many bytes of "pc_offset" correspond to actual code by
1885 // subtracting off the bytes that correspond to constant/veneer pools. See
1886 // Assembler::CheckConstPool() and Assembler::CheckVeneerPool(). Note that this
1887 // is only useful for architectures using constant pools or veneer pools.
1888 static int ComputeCodeOffsetFromPcOffset(Code *code, int pc_offset) {
1889 ASSERT_EQ(code->kind(), Code::FUNCTION);
1890 ASSERT(!code->has_debug_break_slots());
1891 ASSERT_LE(0, pc_offset);
1892 ASSERT_LT(pc_offset, code->instruction_end() - code->instruction_start());
1893
1894 int mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1895 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1896 byte *pc = code->instruction_start() + pc_offset;
1897 int code_offset = pc_offset;
1898 for (RelocIterator it(code, mask); !it.done(); it.next()) {
1899 RelocInfo* info = it.rinfo();
1900 if (info->pc() >= pc) break;
1901 ASSERT(RelocInfo::IsConstPool(info->rmode()));
1902 code_offset -= static_cast<int>(info->data());
1903 ASSERT_LE(0, code_offset);
1904 }
1905
1906 return code_offset;
1907 }
1908
1909
1910 // The inverse of ComputeCodeOffsetFromPcOffset.
1911 static int ComputePcOffsetFromCodeOffset(Code *code, int code_offset) {
1912 ASSERT_EQ(code->kind(), Code::FUNCTION);
1913
1914 int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
1915 RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1916 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1917 int reloc = 0;
1918 for (RelocIterator it(code, mask); !it.done(); it.next()) {
1919 RelocInfo* info = it.rinfo();
1920 if (info->pc() - code->instruction_start() - reloc >= code_offset) break;
1921 if (RelocInfo::IsDebugBreakSlot(info->rmode())) {
1922 reloc += Assembler::kDebugBreakSlotLength;
1923 } else {
1924 ASSERT(RelocInfo::IsConstPool(info->rmode()));
1925 reloc += static_cast<int>(info->data());
1926 }
1927 }
1928
1929 int pc_offset = code_offset + reloc;
1930
1931 ASSERT_LT(code->instruction_start() + pc_offset, code->instruction_end());
1932
1933 return pc_offset;
1934 }
1935
1936
1884 static void RedirectActivationsToRecompiledCodeOnThread( 1937 static void RedirectActivationsToRecompiledCodeOnThread(
1885 Isolate* isolate, 1938 Isolate* isolate,
1886 ThreadLocalTop* top) { 1939 ThreadLocalTop* top) {
1887 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) { 1940 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
1888 JavaScriptFrame* frame = it.frame(); 1941 JavaScriptFrame* frame = it.frame();
1889 1942
1890 if (frame->is_optimized() || !frame->function()->IsJSFunction()) continue; 1943 if (frame->is_optimized() || !frame->function()->IsJSFunction()) continue;
1891 1944
1892 JSFunction* function = frame->function(); 1945 JSFunction* function = frame->function();
1893 1946
1894 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); 1947 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
1895 1948
1896 Handle<Code> frame_code(frame->LookupCode()); 1949 Handle<Code> frame_code(frame->LookupCode());
1897 if (frame_code->has_debug_break_slots()) continue; 1950 if (frame_code->has_debug_break_slots()) continue;
1898 1951
1899 Handle<Code> new_code(function->shared()->code()); 1952 Handle<Code> new_code(function->shared()->code());
1900 if (new_code->kind() != Code::FUNCTION || 1953 if (new_code->kind() != Code::FUNCTION ||
1901 !new_code->has_debug_break_slots()) { 1954 !new_code->has_debug_break_slots()) {
1902 continue; 1955 continue;
1903 } 1956 }
1904 1957
1905 // Iterate over the RelocInfo in the original code to compute the sum of the 1958 int old_pc_offset =
1906 // constant pools and veneer pools sizes. (See Assembler::CheckConstPool() 1959 static_cast<int>(frame->pc() - frame_code->instruction_start());
1907 // and Assembler::CheckVeneerPool()) 1960 int code_offset = ComputeCodeOffsetFromPcOffset(*frame_code, old_pc_offset);
1908 // Note that this is only useful for architectures using constant pools or 1961 int new_pc_offset = ComputePcOffsetFromCodeOffset(*new_code, code_offset);
1909 // veneer pools.
1910 int pool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1911 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1912 int frame_pool_size = 0;
1913 for (RelocIterator it(*frame_code, pool_mask); !it.done(); it.next()) {
1914 RelocInfo* info = it.rinfo();
1915 if (info->pc() >= frame->pc()) break;
1916 frame_pool_size += static_cast<int>(info->data());
1917 }
1918 intptr_t frame_offset =
1919 frame->pc() - frame_code->instruction_start() - frame_pool_size;
1920
1921 // Iterate over the RelocInfo for new code to find the number of bytes
1922 // generated for debug slots and constant pools.
1923 int debug_break_slot_bytes = 0;
1924 int new_code_pool_size = 0;
1925 int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
1926 RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1927 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1928 for (RelocIterator it(*new_code, mask); !it.done(); it.next()) {
1929 // Check if the pc in the new code with debug break
1930 // slots is before this slot.
1931 RelocInfo* info = it.rinfo();
1932 intptr_t new_offset = info->pc() - new_code->instruction_start() -
1933 new_code_pool_size - debug_break_slot_bytes;
1934 if (new_offset >= frame_offset) {
1935 break;
1936 }
1937
1938 if (RelocInfo::IsDebugBreakSlot(info->rmode())) {
1939 debug_break_slot_bytes += Assembler::kDebugBreakSlotLength;
1940 } else {
1941 ASSERT(RelocInfo::IsConstPool(info->rmode()));
1942 // The size of the pools is encoded in the data.
1943 new_code_pool_size += static_cast<int>(info->data());
1944 }
1945 }
1946 1962
1947 // Compute the equivalent pc in the new code. 1963 // Compute the equivalent pc in the new code.
1948 byte* new_pc = new_code->instruction_start() + frame_offset + 1964 byte* new_pc = new_code->instruction_start() + new_pc_offset;
1949 debug_break_slot_bytes + new_code_pool_size;
1950 1965
1951 if (FLAG_trace_deopt) { 1966 if (FLAG_trace_deopt) {
1952 PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " 1967 PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
1953 "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " 1968 "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
1954 "for debugging, " 1969 "for debugging, "
1955 "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n", 1970 "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n",
1956 reinterpret_cast<intptr_t>( 1971 reinterpret_cast<intptr_t>(
1957 frame_code->instruction_start()), 1972 frame_code->instruction_start()),
1958 reinterpret_cast<intptr_t>( 1973 reinterpret_cast<intptr_t>(
1959 frame_code->instruction_start()) + 1974 frame_code->instruction_start()) +
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1995 2010
1996 2011
1997 class ActiveFunctionsRedirector : public ThreadVisitor { 2012 class ActiveFunctionsRedirector : public ThreadVisitor {
1998 public: 2013 public:
1999 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { 2014 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
2000 RedirectActivationsToRecompiledCodeOnThread(isolate, top); 2015 RedirectActivationsToRecompiledCodeOnThread(isolate, top);
2001 } 2016 }
2002 }; 2017 };
2003 2018
2004 2019
2020 class ForceDebuggerActive {
2021 public:
2022 explicit ForceDebuggerActive(Isolate *isolate) {
2023 isolate_ = isolate;
2024 old_state_ = isolate->debugger()->force_debugger_active();
2025 isolate_->debugger()->set_force_debugger_active(true);
2026 }
2027
2028 ~ForceDebuggerActive() {
2029 isolate_->debugger()->set_force_debugger_active(old_state_);
2030 }
2031
2032 private:
2033 Isolate *isolate_;
2034 bool old_state_;
2035
2036 DISALLOW_COPY_AND_ASSIGN(ForceDebuggerActive);
2037 };
2038
2039
2040 void Debug::MaybeRecompileFunctionForDebugging(Handle<JSFunction> function) {
2041 ASSERT_EQ(Code::FUNCTION, function->code()->kind());
2042 ASSERT_EQ(function->code(), function->shared()->code());
2043
2044 if (function->code()->has_debug_break_slots()) return;
2045
2046 ForceDebuggerActive force_debugger_active(isolate_);
2047 MaybeHandle<Code> code = Compiler::GetCodeForDebugging(function);
2048 // Recompilation can fail. In that case leave the code as it was.
2049 if (!code.is_null())
2050 function->ReplaceCode(*code.ToHandleChecked());
2051 ASSERT_EQ(function->code(), function->shared()->code());
2052 }
2053
2054
2055 void Debug::RecompileAndRelocateSuspendedGenerators(
2056 const List<Handle<JSGeneratorObject> > &generators) {
2057 for (int i = 0; i < generators.length(); i++) {
2058 Handle<JSFunction> fun(generators[i]->function());
2059
2060 MaybeRecompileFunctionForDebugging(fun);
2061
2062 int code_offset = generators[i]->continuation();
2063 int pc_offset = ComputePcOffsetFromCodeOffset(fun->code(), code_offset);
2064 generators[i]->set_continuation(pc_offset);
2065 }
2066 }
2067
2068
2005 void Debug::PrepareForBreakPoints() { 2069 void Debug::PrepareForBreakPoints() {
2006 // If preparing for the first break point make sure to deoptimize all 2070 // If preparing for the first break point make sure to deoptimize all
2007 // functions as debugging does not work with optimized code. 2071 // functions as debugging does not work with optimized code.
2008 if (!has_break_points_) { 2072 if (!has_break_points_) {
2009 if (isolate_->concurrent_recompilation_enabled()) { 2073 if (isolate_->concurrent_recompilation_enabled()) {
2010 isolate_->optimizing_compiler_thread()->Flush(); 2074 isolate_->optimizing_compiler_thread()->Flush();
2011 } 2075 }
2012 2076
2013 Deoptimizer::DeoptimizeAll(isolate_); 2077 Deoptimizer::DeoptimizeAll(isolate_);
2014 2078
2015 Handle<Code> lazy_compile = isolate_->builtins()->CompileUnoptimized(); 2079 Handle<Code> lazy_compile = isolate_->builtins()->CompileUnoptimized();
2016 2080
2017 // There will be at least one break point when we are done. 2081 // There will be at least one break point when we are done.
2018 has_break_points_ = true; 2082 has_break_points_ = true;
2019 2083
2020 // Keep the list of activated functions in a handlified list as it 2084 // Keep the list of activated functions in a handlified list as it
2021 // is used both in GC and non-GC code. 2085 // is used both in GC and non-GC code.
2022 List<Handle<JSFunction> > active_functions(100); 2086 List<Handle<JSFunction> > active_functions(100);
2023 2087
2088 // A list of all suspended generators.
2089 List<Handle<JSGeneratorObject> > suspended_generators;
2090
2091 // A list of all generator functions. We need to recompile all functions,
2092 // but we don't know until after visiting the whole heap which generator
2093 // functions have suspended activations and which do not. As in the case of
2094 // functions with activations on the stack, we need to be careful with
2095 // generator functions with suspended activations because although they
2096 // should be recompiled, recompilation can fail, and we need to avoid
2097 // leaving the heap in an inconsistent state.
2098 //
2099 // We could perhaps avoid this list and instead re-use the GC metadata
2100 // links.
2101 List<Handle<JSFunction> > generator_functions;
2102
2024 { 2103 {
2025 // We are going to iterate heap to find all functions without 2104 // We are going to iterate heap to find all functions without
2026 // debug break slots. 2105 // debug break slots.
2027 Heap* heap = isolate_->heap(); 2106 Heap* heap = isolate_->heap();
2028 heap->CollectAllGarbage(Heap::kMakeHeapIterableMask, 2107 heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
2029 "preparing for breakpoints"); 2108 "preparing for breakpoints");
2030 2109
2110 // Collecting the generators should not alter iterability of the heap.
2111 ASSERT(heap->IsHeapIterable());
2112
2031 // Ensure no GC in this scope as we are going to use gc_metadata 2113 // Ensure no GC in this scope as we are going to use gc_metadata
2032 // field in the Code object to mark active functions. 2114 // field in the Code object to mark active functions.
2033 DisallowHeapAllocation no_allocation; 2115 DisallowHeapAllocation no_allocation;
2034 2116
2035 Object* active_code_marker = heap->the_hole_value(); 2117 Object* active_code_marker = heap->the_hole_value();
2036 2118
2037 CollectActiveFunctionsFromThread(isolate_, 2119 CollectActiveFunctionsFromThread(isolate_,
2038 isolate_->thread_local_top(), 2120 isolate_->thread_local_top(),
2039 &active_functions, 2121 &active_functions,
2040 active_code_marker); 2122 active_code_marker);
(...skipping 10 matching lines...) Expand all
2051 while (((obj = iterator.next()) != NULL)) { 2133 while (((obj = iterator.next()) != NULL)) {
2052 if (obj->IsJSFunction()) { 2134 if (obj->IsJSFunction()) {
2053 JSFunction* function = JSFunction::cast(obj); 2135 JSFunction* function = JSFunction::cast(obj);
2054 SharedFunctionInfo* shared = function->shared(); 2136 SharedFunctionInfo* shared = function->shared();
2055 2137
2056 if (!shared->allows_lazy_compilation()) continue; 2138 if (!shared->allows_lazy_compilation()) continue;
2057 if (!shared->script()->IsScript()) continue; 2139 if (!shared->script()->IsScript()) continue;
2058 if (function->IsBuiltin()) continue; 2140 if (function->IsBuiltin()) continue;
2059 if (shared->code()->gc_metadata() == active_code_marker) continue; 2141 if (shared->code()->gc_metadata() == active_code_marker) continue;
2060 2142
2143 if (shared->is_generator()) {
2144 generator_functions.Add(Handle<JSFunction>(function, isolate_));
2145 continue;
2146 }
2147
2061 Code::Kind kind = function->code()->kind(); 2148 Code::Kind kind = function->code()->kind();
2062 if (kind == Code::FUNCTION && 2149 if (kind == Code::FUNCTION &&
2063 !function->code()->has_debug_break_slots()) { 2150 !function->code()->has_debug_break_slots()) {
2064 function->set_code(*lazy_compile); 2151 function->set_code(*lazy_compile);
2065 function->shared()->set_code(*lazy_compile); 2152 function->shared()->set_code(*lazy_compile);
2066 } else if (kind == Code::BUILTIN && 2153 } else if (kind == Code::BUILTIN &&
2067 (function->IsInOptimizationQueue() || 2154 (function->IsInOptimizationQueue() ||
2068 function->IsMarkedForOptimization() || 2155 function->IsMarkedForOptimization() ||
2069 function->IsMarkedForConcurrentOptimization())) { 2156 function->IsMarkedForConcurrentOptimization())) {
2070 // Abort in-flight compilation. 2157 // Abort in-flight compilation.
2071 Code* shared_code = function->shared()->code(); 2158 Code* shared_code = function->shared()->code();
2072 if (shared_code->kind() == Code::FUNCTION && 2159 if (shared_code->kind() == Code::FUNCTION &&
2073 shared_code->has_debug_break_slots()) { 2160 shared_code->has_debug_break_slots()) {
2074 function->set_code(shared_code); 2161 function->set_code(shared_code);
2075 } else { 2162 } else {
2076 function->set_code(*lazy_compile); 2163 function->set_code(*lazy_compile);
2077 function->shared()->set_code(*lazy_compile); 2164 function->shared()->set_code(*lazy_compile);
2078 } 2165 }
2079 } 2166 }
2167 } else if (obj->IsJSGeneratorObject()) {
2168 JSGeneratorObject* gen = JSGeneratorObject::cast(obj);
2169 if (!gen->is_suspended()) continue;
2170
2171 JSFunction* fun = gen->function();
2172 ASSERT_EQ(fun->code()->kind(), Code::FUNCTION);
2173 if (fun->code()->has_debug_break_slots()) continue;
2174
2175 int pc_offset = gen->continuation();
2176 ASSERT_LT(0, pc_offset);
2177
2178 int code_offset =
2179 ComputeCodeOffsetFromPcOffset(fun->code(), pc_offset);
2180
2181 // This will be fixed after we recompile the functions.
2182 gen->set_continuation(code_offset);
2183
2184 suspended_generators.Add(Handle<JSGeneratorObject>(gen, isolate_));
2080 } 2185 }
2081 } 2186 }
2082 2187
2083 // Clear gc_metadata field. 2188 // Clear gc_metadata field.
2084 for (int i = 0; i < active_functions.length(); i++) { 2189 for (int i = 0; i < active_functions.length(); i++) {
2085 Handle<JSFunction> function = active_functions[i]; 2190 Handle<JSFunction> function = active_functions[i];
2086 function->shared()->code()->set_gc_metadata(Smi::FromInt(0)); 2191 function->shared()->code()->set_gc_metadata(Smi::FromInt(0));
2087 } 2192 }
2088 } 2193 }
2089 2194
2195 // Recompile generator functions that have suspended activations, and
2196 // relocate those activations.
2197 RecompileAndRelocateSuspendedGenerators(suspended_generators);
2198
2199 // Mark generator functions that didn't have suspended activations for lazy
2200 // recompilation. Note that this set does not include any active functions.
2201 for (int i = 0; i < generator_functions.length(); i++) {
2202 Handle<JSFunction> &function = generator_functions[i];
2203 if (function->code()->kind() != Code::FUNCTION) continue;
2204 if (function->code()->has_debug_break_slots()) continue;
2205 function->set_code(*lazy_compile);
2206 function->shared()->set_code(*lazy_compile);
2207 }
2208
2090 // Now recompile all functions with activation frames and and 2209 // Now recompile all functions with activation frames and and
2091 // patch the return address to run in the new compiled code. 2210 // patch the return address to run in the new compiled code. It could be
2211 // that some active functions were recompiled already by the suspended
2212 // generator recompilation pass above; a generator with suspended
2213 // activations could also have active activations. That's fine.
2092 for (int i = 0; i < active_functions.length(); i++) { 2214 for (int i = 0; i < active_functions.length(); i++) {
2093 Handle<JSFunction> function = active_functions[i]; 2215 Handle<JSFunction> function = active_functions[i];
2094 Handle<SharedFunctionInfo> shared(function->shared()); 2216 Handle<SharedFunctionInfo> shared(function->shared());
2095 2217
2096 if (function->code()->kind() == Code::FUNCTION && 2218 // If recompilation is not possible just skip it.
2097 function->code()->has_debug_break_slots()) { 2219 if (shared->is_toplevel()) continue;
2098 // Nothing to do. Function code already had debug break slots. 2220 if (!shared->allows_lazy_compilation()) continue;
2099 continue; 2221 if (shared->code()->kind() == Code::BUILTIN) continue;
2100 }
2101 2222
2102 // If recompilation is not possible just skip it. 2223 MaybeRecompileFunctionForDebugging(function);
2103 if (shared->is_toplevel() ||
2104 !shared->allows_lazy_compilation() ||
2105 shared->code()->kind() == Code::BUILTIN) {
2106 continue;
2107 }
2108
2109 // Make sure that the shared full code is compiled with debug
2110 // break slots.
2111 if (!shared->code()->has_debug_break_slots()) {
2112 // Try to compile the full code with debug break slots. If it
2113 // fails just keep the current code.
2114 bool prev_force_debugger_active =
2115 isolate_->debugger()->force_debugger_active();
2116 isolate_->debugger()->set_force_debugger_active(true);
2117 Handle<Code> code = Compiler::GetCodeForDebugging(
2118 function).ToHandleChecked();
2119 function->ReplaceCode(*code);
2120 isolate_->debugger()->set_force_debugger_active(
2121 prev_force_debugger_active);
2122 }
2123
2124 // Keep function code in sync with shared function info.
2125 function->set_code(shared->code());
2126 } 2224 }
2127 2225
2128 RedirectActivationsToRecompiledCodeOnThread(isolate_, 2226 RedirectActivationsToRecompiledCodeOnThread(isolate_,
2129 isolate_->thread_local_top()); 2227 isolate_->thread_local_top());
2130 2228
2131 ActiveFunctionsRedirector active_functions_redirector; 2229 ActiveFunctionsRedirector active_functions_redirector;
2132 isolate_->thread_manager()->IterateArchivedThreads( 2230 isolate_->thread_manager()->IterateArchivedThreads(
2133 &active_functions_redirector); 2231 &active_functions_redirector);
2134 } 2232 }
2135 } 2233 }
(...skipping 1616 matching lines...) Expand 10 before | Expand all | Expand 10 after
3752 already_signalled_ = false; 3850 already_signalled_ = false;
3753 } 3851 }
3754 { 3852 {
3755 Locker locker(reinterpret_cast<v8::Isolate*>(isolate_)); 3853 Locker locker(reinterpret_cast<v8::Isolate*>(isolate_));
3756 isolate_->debugger()->CallMessageDispatchHandler(); 3854 isolate_->debugger()->CallMessageDispatchHandler();
3757 } 3855 }
3758 } 3856 }
3759 } 3857 }
3760 3858
3761 } } // namespace v8::internal 3859 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/debug.h ('k') | src/objects.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698