Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(701)

Side by Side Diff: src/debug.cc

Issue 262193003: Revert r21141. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/debug.h ('k') | src/objects.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "v8.h" 5 #include "v8.h"
6 6
7 #include "api.h" 7 #include "api.h"
8 #include "arguments.h" 8 #include "arguments.h"
9 #include "bootstrapper.h" 9 #include "bootstrapper.h"
10 #include "code-stubs.h" 10 #include "code-stubs.h"
(...skipping 1863 matching lines...) Expand 10 before | Expand all | Expand 10 after
1874 } else if (frame->function()->IsJSFunction()) { 1874 } else if (frame->function()->IsJSFunction()) {
1875 JSFunction* function = frame->function(); 1875 JSFunction* function = frame->function();
1876 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); 1876 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
1877 active_functions->Add(Handle<JSFunction>(function)); 1877 active_functions->Add(Handle<JSFunction>(function));
1878 function->shared()->code()->set_gc_metadata(active_code_marker); 1878 function->shared()->code()->set_gc_metadata(active_code_marker);
1879 } 1879 }
1880 } 1880 }
1881 } 1881 }
1882 1882
1883 1883
1884 // Figure out how many bytes of "pc_offset" correspond to actual code by
1885 // subtracting off the bytes that correspond to constant/veneer pools. See
1886 // Assembler::CheckConstPool() and Assembler::CheckVeneerPool(). Note that this
1887 // is only useful for architectures using constant pools or veneer pools.
1888 static int ComputeCodeOffsetFromPcOffset(Code *code, int pc_offset) {
1889 ASSERT_EQ(code->kind(), Code::FUNCTION);
1890 ASSERT(!code->has_debug_break_slots());
1891 ASSERT_LE(0, pc_offset);
1892 ASSERT_LT(pc_offset, code->instruction_end() - code->instruction_start());
1893
1894 int mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1895 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1896 byte *pc = code->instruction_start() + pc_offset;
1897 int code_offset = pc_offset;
1898 for (RelocIterator it(code, mask); !it.done(); it.next()) {
1899 RelocInfo* info = it.rinfo();
1900 if (info->pc() >= pc) break;
1901 ASSERT(RelocInfo::IsConstPool(info->rmode()));
1902 code_offset -= static_cast<int>(info->data());
1903 ASSERT_LE(0, code_offset);
1904 }
1905
1906 return code_offset;
1907 }
1908
1909
1910 // The inverse of ComputeCodeOffsetFromPcOffset.
1911 static int ComputePcOffsetFromCodeOffset(Code *code, int code_offset) {
1912 ASSERT_EQ(code->kind(), Code::FUNCTION);
1913
1914 int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
1915 RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1916 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1917 int reloc = 0;
1918 for (RelocIterator it(code, mask); !it.done(); it.next()) {
1919 RelocInfo* info = it.rinfo();
1920 if (info->pc() - code->instruction_start() - reloc >= code_offset) break;
1921 if (RelocInfo::IsDebugBreakSlot(info->rmode())) {
1922 reloc += Assembler::kDebugBreakSlotLength;
1923 } else {
1924 ASSERT(RelocInfo::IsConstPool(info->rmode()));
1925 reloc += static_cast<int>(info->data());
1926 }
1927 }
1928
1929 int pc_offset = code_offset + reloc;
1930
1931 ASSERT_LT(code->instruction_start() + pc_offset, code->instruction_end());
1932
1933 return pc_offset;
1934 }
1935
1936
1937 static void RedirectActivationsToRecompiledCodeOnThread( 1884 static void RedirectActivationsToRecompiledCodeOnThread(
1938 Isolate* isolate, 1885 Isolate* isolate,
1939 ThreadLocalTop* top) { 1886 ThreadLocalTop* top) {
1940 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) { 1887 for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
1941 JavaScriptFrame* frame = it.frame(); 1888 JavaScriptFrame* frame = it.frame();
1942 1889
1943 if (frame->is_optimized() || !frame->function()->IsJSFunction()) continue; 1890 if (frame->is_optimized() || !frame->function()->IsJSFunction()) continue;
1944 1891
1945 JSFunction* function = frame->function(); 1892 JSFunction* function = frame->function();
1946 1893
1947 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION); 1894 ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
1948 1895
1949 Handle<Code> frame_code(frame->LookupCode()); 1896 Handle<Code> frame_code(frame->LookupCode());
1950 if (frame_code->has_debug_break_slots()) continue; 1897 if (frame_code->has_debug_break_slots()) continue;
1951 1898
1952 Handle<Code> new_code(function->shared()->code()); 1899 Handle<Code> new_code(function->shared()->code());
1953 if (new_code->kind() != Code::FUNCTION || 1900 if (new_code->kind() != Code::FUNCTION ||
1954 !new_code->has_debug_break_slots()) { 1901 !new_code->has_debug_break_slots()) {
1955 continue; 1902 continue;
1956 } 1903 }
1957 1904
1958 int old_pc_offset = frame->pc() - frame_code->instruction_start(); 1905 // Iterate over the RelocInfo in the original code to compute the sum of the
1959 int code_offset = ComputeCodeOffsetFromPcOffset(*frame_code, old_pc_offset); 1906 // constant pools and veneer pools sizes. (See Assembler::CheckConstPool()
1960 int new_pc_offset = ComputePcOffsetFromCodeOffset(*new_code, code_offset); 1907 // and Assembler::CheckVeneerPool())
1908 // Note that this is only useful for architectures using constant pools or
1909 // veneer pools.
1910 int pool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1911 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1912 int frame_pool_size = 0;
1913 for (RelocIterator it(*frame_code, pool_mask); !it.done(); it.next()) {
1914 RelocInfo* info = it.rinfo();
1915 if (info->pc() >= frame->pc()) break;
1916 frame_pool_size += static_cast<int>(info->data());
1917 }
1918 intptr_t frame_offset =
1919 frame->pc() - frame_code->instruction_start() - frame_pool_size;
1920
1921 // Iterate over the RelocInfo for new code to find the number of bytes
1922 // generated for debug slots and constant pools.
1923 int debug_break_slot_bytes = 0;
1924 int new_code_pool_size = 0;
1925 int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
1926 RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
1927 RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
1928 for (RelocIterator it(*new_code, mask); !it.done(); it.next()) {
1929 // Check if the pc in the new code with debug break
1930 // slots is before this slot.
1931 RelocInfo* info = it.rinfo();
1932 intptr_t new_offset = info->pc() - new_code->instruction_start() -
1933 new_code_pool_size - debug_break_slot_bytes;
1934 if (new_offset >= frame_offset) {
1935 break;
1936 }
1937
1938 if (RelocInfo::IsDebugBreakSlot(info->rmode())) {
1939 debug_break_slot_bytes += Assembler::kDebugBreakSlotLength;
1940 } else {
1941 ASSERT(RelocInfo::IsConstPool(info->rmode()));
1942 // The size of the pools is encoded in the data.
1943 new_code_pool_size += static_cast<int>(info->data());
1944 }
1945 }
1961 1946
1962 // Compute the equivalent pc in the new code. 1947 // Compute the equivalent pc in the new code.
1963 byte* new_pc = new_code->instruction_start() + new_pc_offset; 1948 byte* new_pc = new_code->instruction_start() + frame_offset +
1949 debug_break_slot_bytes + new_code_pool_size;
1964 1950
1965 if (FLAG_trace_deopt) { 1951 if (FLAG_trace_deopt) {
1966 PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " 1952 PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
1967 "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " 1953 "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
1968 "for debugging, " 1954 "for debugging, "
1969 "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n", 1955 "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n",
1970 reinterpret_cast<intptr_t>( 1956 reinterpret_cast<intptr_t>(
1971 frame_code->instruction_start()), 1957 frame_code->instruction_start()),
1972 reinterpret_cast<intptr_t>( 1958 reinterpret_cast<intptr_t>(
1973 frame_code->instruction_start()) + 1959 frame_code->instruction_start()) +
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
2009 1995
2010 1996
2011 class ActiveFunctionsRedirector : public ThreadVisitor { 1997 class ActiveFunctionsRedirector : public ThreadVisitor {
2012 public: 1998 public:
2013 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { 1999 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
2014 RedirectActivationsToRecompiledCodeOnThread(isolate, top); 2000 RedirectActivationsToRecompiledCodeOnThread(isolate, top);
2015 } 2001 }
2016 }; 2002 };
2017 2003
2018 2004
2019 class ForceDebuggerActive {
2020 public:
2021 explicit ForceDebuggerActive(Isolate *isolate) {
2022 isolate_ = isolate;
2023 old_state_ = isolate->debugger()->force_debugger_active();
2024 isolate_->debugger()->set_force_debugger_active(true);
2025 }
2026
2027 ~ForceDebuggerActive() {
2028 isolate_->debugger()->set_force_debugger_active(old_state_);
2029 }
2030
2031 private:
2032 Isolate *isolate_;
2033 bool old_state_;
2034
2035 DISALLOW_COPY_AND_ASSIGN(ForceDebuggerActive);
2036 };
2037
2038
2039 void Debug::MaybeRecompileFunctionForDebugging(Handle<JSFunction> function) {
2040 ASSERT_EQ(Code::FUNCTION, function->code()->kind());
2041 ASSERT_EQ(function->code(), function->shared()->code());
2042
2043 if (function->code()->has_debug_break_slots()) return;
2044
2045 ForceDebuggerActive force_debugger_active(isolate_);
2046 MaybeHandle<Code> code = Compiler::GetCodeForDebugging(function);
2047 // Recompilation can fail. In that case leave the code as it was.
2048 if (!code.is_null())
2049 function->ReplaceCode(*code.ToHandleChecked());
2050 ASSERT_EQ(function->code(), function->shared()->code());
2051 }
2052
2053
2054 void Debug::RecompileAndRelocateSuspendedGenerators(
2055 const List<Handle<JSGeneratorObject> > &generators) {
2056 for (int i = 0; i < generators.length(); i++) {
2057 Handle<JSFunction> fun(generators[i]->function());
2058
2059 MaybeRecompileFunctionForDebugging(fun);
2060
2061 int code_offset = generators[i]->continuation();
2062 int pc_offset = ComputePcOffsetFromCodeOffset(fun->code(), code_offset);
2063 generators[i]->set_continuation(pc_offset);
2064 }
2065 }
2066
2067
2068 void Debug::PrepareForBreakPoints() { 2005 void Debug::PrepareForBreakPoints() {
2069 // If preparing for the first break point make sure to deoptimize all 2006 // If preparing for the first break point make sure to deoptimize all
2070 // functions as debugging does not work with optimized code. 2007 // functions as debugging does not work with optimized code.
2071 if (!has_break_points_) { 2008 if (!has_break_points_) {
2072 if (isolate_->concurrent_recompilation_enabled()) { 2009 if (isolate_->concurrent_recompilation_enabled()) {
2073 isolate_->optimizing_compiler_thread()->Flush(); 2010 isolate_->optimizing_compiler_thread()->Flush();
2074 } 2011 }
2075 2012
2076 Deoptimizer::DeoptimizeAll(isolate_); 2013 Deoptimizer::DeoptimizeAll(isolate_);
2077 2014
2078 Handle<Code> lazy_compile = isolate_->builtins()->CompileUnoptimized(); 2015 Handle<Code> lazy_compile = isolate_->builtins()->CompileUnoptimized();
2079 2016
2080 // There will be at least one break point when we are done. 2017 // There will be at least one break point when we are done.
2081 has_break_points_ = true; 2018 has_break_points_ = true;
2082 2019
2083 // Keep the list of activated functions in a handlified list as it 2020 // Keep the list of activated functions in a handlified list as it
2084 // is used both in GC and non-GC code. 2021 // is used both in GC and non-GC code.
2085 List<Handle<JSFunction> > active_functions(100); 2022 List<Handle<JSFunction> > active_functions(100);
2086 2023
2087 // A list of all suspended generators.
2088 List<Handle<JSGeneratorObject> > suspended_generators;
2089
2090 // A list of all generator functions. We need to recompile all functions,
2091 // but we don't know until after visiting the whole heap which generator
2092 // functions have suspended activations and which do not. As in the case of
2093 // functions with activations on the stack, we need to be careful with
2094 // generator functions with suspended activations because although they
2095 // should be recompiled, recompilation can fail, and we need to avoid
2096 // leaving the heap in an inconsistent state.
2097 //
2098 // We could perhaps avoid this list and instead re-use the GC metadata
2099 // links.
2100 List<Handle<JSFunction> > generator_functions;
2101
2102 { 2024 {
2103 // We are going to iterate heap to find all functions without 2025 // We are going to iterate heap to find all functions without
2104 // debug break slots. 2026 // debug break slots.
2105 Heap* heap = isolate_->heap(); 2027 Heap* heap = isolate_->heap();
2106 heap->CollectAllGarbage(Heap::kMakeHeapIterableMask, 2028 heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
2107 "preparing for breakpoints"); 2029 "preparing for breakpoints");
2108 2030
2109 // Collecting the generators should not alter iterability of the heap.
2110 ASSERT(heap->IsHeapIterable());
2111
2112 // Ensure no GC in this scope as we are going to use gc_metadata 2031 // Ensure no GC in this scope as we are going to use gc_metadata
2113 // field in the Code object to mark active functions. 2032 // field in the Code object to mark active functions.
2114 DisallowHeapAllocation no_allocation; 2033 DisallowHeapAllocation no_allocation;
2115 2034
2116 Object* active_code_marker = heap->the_hole_value(); 2035 Object* active_code_marker = heap->the_hole_value();
2117 2036
2118 CollectActiveFunctionsFromThread(isolate_, 2037 CollectActiveFunctionsFromThread(isolate_,
2119 isolate_->thread_local_top(), 2038 isolate_->thread_local_top(),
2120 &active_functions, 2039 &active_functions,
2121 active_code_marker); 2040 active_code_marker);
(...skipping 10 matching lines...) Expand all
2132 while (((obj = iterator.next()) != NULL)) { 2051 while (((obj = iterator.next()) != NULL)) {
2133 if (obj->IsJSFunction()) { 2052 if (obj->IsJSFunction()) {
2134 JSFunction* function = JSFunction::cast(obj); 2053 JSFunction* function = JSFunction::cast(obj);
2135 SharedFunctionInfo* shared = function->shared(); 2054 SharedFunctionInfo* shared = function->shared();
2136 2055
2137 if (!shared->allows_lazy_compilation()) continue; 2056 if (!shared->allows_lazy_compilation()) continue;
2138 if (!shared->script()->IsScript()) continue; 2057 if (!shared->script()->IsScript()) continue;
2139 if (function->IsBuiltin()) continue; 2058 if (function->IsBuiltin()) continue;
2140 if (shared->code()->gc_metadata() == active_code_marker) continue; 2059 if (shared->code()->gc_metadata() == active_code_marker) continue;
2141 2060
2142 if (shared->is_generator()) {
2143 generator_functions.Add(Handle<JSFunction>(function, isolate_));
2144 continue;
2145 }
2146
2147 Code::Kind kind = function->code()->kind(); 2061 Code::Kind kind = function->code()->kind();
2148 if (kind == Code::FUNCTION && 2062 if (kind == Code::FUNCTION &&
2149 !function->code()->has_debug_break_slots()) { 2063 !function->code()->has_debug_break_slots()) {
2150 function->set_code(*lazy_compile); 2064 function->set_code(*lazy_compile);
2151 function->shared()->set_code(*lazy_compile); 2065 function->shared()->set_code(*lazy_compile);
2152 } else if (kind == Code::BUILTIN && 2066 } else if (kind == Code::BUILTIN &&
2153 (function->IsInOptimizationQueue() || 2067 (function->IsInOptimizationQueue() ||
2154 function->IsMarkedForOptimization() || 2068 function->IsMarkedForOptimization() ||
2155 function->IsMarkedForConcurrentOptimization())) { 2069 function->IsMarkedForConcurrentOptimization())) {
2156 // Abort in-flight compilation. 2070 // Abort in-flight compilation.
2157 Code* shared_code = function->shared()->code(); 2071 Code* shared_code = function->shared()->code();
2158 if (shared_code->kind() == Code::FUNCTION && 2072 if (shared_code->kind() == Code::FUNCTION &&
2159 shared_code->has_debug_break_slots()) { 2073 shared_code->has_debug_break_slots()) {
2160 function->set_code(shared_code); 2074 function->set_code(shared_code);
2161 } else { 2075 } else {
2162 function->set_code(*lazy_compile); 2076 function->set_code(*lazy_compile);
2163 function->shared()->set_code(*lazy_compile); 2077 function->shared()->set_code(*lazy_compile);
2164 } 2078 }
2165 } 2079 }
2166 } else if (obj->IsJSGeneratorObject()) {
2167 JSGeneratorObject* gen = JSGeneratorObject::cast(obj);
2168 if (!gen->is_suspended()) continue;
2169
2170 JSFunction* fun = gen->function();
2171 ASSERT_EQ(fun->code()->kind(), Code::FUNCTION);
2172 if (fun->code()->has_debug_break_slots()) continue;
2173
2174 int pc_offset = gen->continuation();
2175 ASSERT_LT(0, pc_offset);
2176
2177 int code_offset =
2178 ComputeCodeOffsetFromPcOffset(fun->code(), pc_offset);
2179
2180 // This will be fixed after we recompile the functions.
2181 gen->set_continuation(code_offset);
2182
2183 suspended_generators.Add(Handle<JSGeneratorObject>(gen, isolate_));
2184 } 2080 }
2185 } 2081 }
2186 2082
2187 // Clear gc_metadata field. 2083 // Clear gc_metadata field.
2188 for (int i = 0; i < active_functions.length(); i++) { 2084 for (int i = 0; i < active_functions.length(); i++) {
2189 Handle<JSFunction> function = active_functions[i]; 2085 Handle<JSFunction> function = active_functions[i];
2190 function->shared()->code()->set_gc_metadata(Smi::FromInt(0)); 2086 function->shared()->code()->set_gc_metadata(Smi::FromInt(0));
2191 } 2087 }
2192 } 2088 }
2193 2089
2194 // Recompile generator functions that have suspended activations, and
2195 // relocate those activations.
2196 RecompileAndRelocateSuspendedGenerators(suspended_generators);
2197
2198 // Mark generator functions that didn't have suspended activations for lazy
2199 // recompilation. Note that this set does not include any active functions.
2200 for (int i = 0; i < generator_functions.length(); i++) {
2201 Handle<JSFunction> &function = generator_functions[i];
2202 if (function->code()->kind() != Code::FUNCTION) continue;
2203 if (function->code()->has_debug_break_slots()) continue;
2204 function->set_code(*lazy_compile);
2205 function->shared()->set_code(*lazy_compile);
2206 }
2207
2208 // Now recompile all functions with activation frames and and 2090 // Now recompile all functions with activation frames and and
2209 // patch the return address to run in the new compiled code. It could be 2091 // patch the return address to run in the new compiled code.
2210 // that some active functions were recompiled already by the suspended
2211 // generator recompilation pass above; a generator with suspended
2212 // activations could also have active activations. That's fine.
2213 for (int i = 0; i < active_functions.length(); i++) { 2092 for (int i = 0; i < active_functions.length(); i++) {
2214 Handle<JSFunction> function = active_functions[i]; 2093 Handle<JSFunction> function = active_functions[i];
2215 Handle<SharedFunctionInfo> shared(function->shared()); 2094 Handle<SharedFunctionInfo> shared(function->shared());
2216 2095
2096 if (function->code()->kind() == Code::FUNCTION &&
2097 function->code()->has_debug_break_slots()) {
2098 // Nothing to do. Function code already had debug break slots.
2099 continue;
2100 }
2101
2217 // If recompilation is not possible just skip it. 2102 // If recompilation is not possible just skip it.
2218 if (shared->is_toplevel()) continue; 2103 if (shared->is_toplevel() ||
2219 if (!shared->allows_lazy_compilation()) continue; 2104 !shared->allows_lazy_compilation() ||
2220 if (shared->code()->kind() == Code::BUILTIN) continue; 2105 shared->code()->kind() == Code::BUILTIN) {
2106 continue;
2107 }
2221 2108
2222 MaybeRecompileFunctionForDebugging(function); 2109 // Make sure that the shared full code is compiled with debug
2110 // break slots.
2111 if (!shared->code()->has_debug_break_slots()) {
2112 // Try to compile the full code with debug break slots. If it
2113 // fails just keep the current code.
2114 bool prev_force_debugger_active =
2115 isolate_->debugger()->force_debugger_active();
2116 isolate_->debugger()->set_force_debugger_active(true);
2117 Handle<Code> code = Compiler::GetCodeForDebugging(
2118 function).ToHandleChecked();
2119 function->ReplaceCode(*code);
2120 isolate_->debugger()->set_force_debugger_active(
2121 prev_force_debugger_active);
2122 }
2123
2124 // Keep function code in sync with shared function info.
2125 function->set_code(shared->code());
2223 } 2126 }
2224 2127
2225 RedirectActivationsToRecompiledCodeOnThread(isolate_, 2128 RedirectActivationsToRecompiledCodeOnThread(isolate_,
2226 isolate_->thread_local_top()); 2129 isolate_->thread_local_top());
2227 2130
2228 ActiveFunctionsRedirector active_functions_redirector; 2131 ActiveFunctionsRedirector active_functions_redirector;
2229 isolate_->thread_manager()->IterateArchivedThreads( 2132 isolate_->thread_manager()->IterateArchivedThreads(
2230 &active_functions_redirector); 2133 &active_functions_redirector);
2231 } 2134 }
2232 } 2135 }
(...skipping 1616 matching lines...) Expand 10 before | Expand all | Expand 10 after
3849 already_signalled_ = false; 3752 already_signalled_ = false;
3850 } 3753 }
3851 { 3754 {
3852 Locker locker(reinterpret_cast<v8::Isolate*>(isolate_)); 3755 Locker locker(reinterpret_cast<v8::Isolate*>(isolate_));
3853 isolate_->debugger()->CallMessageDispatchHandler(); 3756 isolate_->debugger()->CallMessageDispatchHandler();
3854 } 3757 }
3855 } 3758 }
3856 } 3759 }
3857 3760
3858 } } // namespace v8::internal 3761 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/debug.h ('k') | src/objects.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698