| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/code_generator.h" | 5 #include "vm/code_generator.h" |
| 6 | 6 |
| 7 #include "vm/assembler.h" | 7 #include "vm/assembler.h" |
| 8 #include "vm/ast.h" | 8 #include "vm/ast.h" |
| 9 #include "vm/code_patcher.h" | 9 #include "vm/code_patcher.h" |
| 10 #include "vm/compiler.h" | 10 #include "vm/compiler.h" |
| (...skipping 1978 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1989 #define DEOPT_REASON_TO_TEXT(name) case ICData::kDeopt##name: return #name; | 1989 #define DEOPT_REASON_TO_TEXT(name) case ICData::kDeopt##name: return #name; |
| 1990 DEOPT_REASONS(DEOPT_REASON_TO_TEXT) | 1990 DEOPT_REASONS(DEOPT_REASON_TO_TEXT) |
| 1991 #undef DEOPT_REASON_TO_TEXT | 1991 #undef DEOPT_REASON_TO_TEXT |
| 1992 default: | 1992 default: |
| 1993 UNREACHABLE(); | 1993 UNREACHABLE(); |
| 1994 return ""; | 1994 return ""; |
| 1995 } | 1995 } |
| 1996 } | 1996 } |
| 1997 | 1997 |
| 1998 | 1998 |
| 1999 void DeoptimizeAt(const Code& optimized_code, StackFrame* frame) { | 1999 void DeoptimizeAt(const Code& optimized_code, uword pc) { |
| 2000 ASSERT(optimized_code.is_optimized()); | 2000 ASSERT(optimized_code.is_optimized()); |
| 2001 Thread* thread = Thread::Current(); | 2001 Thread* thread = Thread::Current(); |
| 2002 Zone* zone = thread->zone(); | 2002 Zone* zone = thread->zone(); |
| 2003 ICData::DeoptReasonId deopt_reason = ICData::kDeoptUnknown; |
| 2004 uint32_t deopt_flags = 0; |
| 2005 const TypedData& deopt_info = TypedData::Handle(zone, |
| 2006 optimized_code.GetDeoptInfoAtPc(pc, &deopt_reason, &deopt_flags)); |
| 2007 ASSERT(!deopt_info.IsNull()); |
| 2003 const Function& function = Function::Handle(zone, optimized_code.function()); | 2008 const Function& function = Function::Handle(zone, optimized_code.function()); |
| 2004 const Error& error = | 2009 const Error& error = |
| 2005 Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, function)); | 2010 Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, function)); |
| 2006 if (!error.IsNull()) { | 2011 if (!error.IsNull()) { |
| 2007 Exceptions::PropagateError(error); | 2012 Exceptions::PropagateError(error); |
| 2008 } | 2013 } |
| 2009 const Code& unoptimized_code = | 2014 const Code& unoptimized_code = |
| 2010 Code::Handle(zone, function.unoptimized_code()); | 2015 Code::Handle(zone, function.unoptimized_code()); |
| 2011 ASSERT(!unoptimized_code.IsNull()); | 2016 ASSERT(!unoptimized_code.IsNull()); |
| 2012 // The switch to unoptimized code may have already occurred. | 2017 // The switch to unoptimized code may have already occurred. |
| 2013 if (function.HasOptimizedCode()) { | 2018 if (function.HasOptimizedCode()) { |
| 2014 function.SwitchToUnoptimizedCode(); | 2019 function.SwitchToUnoptimizedCode(); |
| 2015 } | 2020 } |
| 2016 | 2021 // Patch call site (lazy deoptimization is quite rare, patching it twice |
| 2017 #if defined(TARGET_ARCH_DBC) | 2022 // is not a performance issue). |
| 2023 uword lazy_deopt_jump_return = optimized_code.GetLazyDeoptReturnPc(); |
| 2024 uword lazy_deopt_jump_throw = optimized_code.GetLazyDeoptThrowPc(); |
| 2025 #if !defined(TARGET_ARCH_DBC) |
| 2026 ASSERT(lazy_deopt_jump_return != 0); |
| 2027 ASSERT(lazy_deopt_jump_throw != 0); |
| 2028 #endif |
| 2018 const Instructions& instrs = | 2029 const Instructions& instrs = |
| 2019 Instructions::Handle(zone, optimized_code.instructions()); | 2030 Instructions::Handle(zone, optimized_code.instructions()); |
| 2020 { | 2031 { |
| 2021 WritableInstructionsScope writable(instrs.PayloadStart(), instrs.size()); | 2032 WritableInstructionsScope writable(instrs.PayloadStart(), instrs.size()); |
| 2022 CodePatcher::InsertDeoptimizationCallAt(frame->pc()); | 2033 CodePatcher::InsertDeoptimizationCallAt(pc, lazy_deopt_jump_return); |
| 2023 if (FLAG_trace_patching) { | 2034 if (FLAG_trace_patching) { |
| 2024 const String& name = String::Handle(function.name()); | 2035 const String& name = String::Handle(function.name()); |
| 2025 OS::PrintErr( | 2036 OS::PrintErr( |
| 2026 "InsertDeoptimizationCallAt: 0x%" Px " for %s\n", | 2037 "InsertDeoptimizationCallAt: 0x%" Px " to 0x%" Px " for %s\n", |
| 2027 frame->pc(), name.ToCString()); | 2038 pc, lazy_deopt_jump_return, name.ToCString()); |
| 2028 } | 2039 } |
| 2029 const ExceptionHandlers& handlers = | 2040 const ExceptionHandlers& handlers = |
| 2030 ExceptionHandlers::Handle(zone, optimized_code.exception_handlers()); | 2041 ExceptionHandlers::Handle(zone, optimized_code.exception_handlers()); |
| 2031 RawExceptionHandlers::HandlerInfo info; | 2042 RawExceptionHandlers::HandlerInfo info; |
| 2032 for (intptr_t i = 0; i < handlers.num_entries(); ++i) { | 2043 for (intptr_t i = 0; i < handlers.num_entries(); ++i) { |
| 2033 handlers.GetHandlerInfo(i, &info); | 2044 handlers.GetHandlerInfo(i, &info); |
| 2034 const uword patch_pc = instrs.PayloadStart() + info.handler_pc_offset; | 2045 const uword patch_pc = instrs.PayloadStart() + info.handler_pc_offset; |
| 2035 CodePatcher::InsertDeoptimizationCallAt(patch_pc); | 2046 CodePatcher::InsertDeoptimizationCallAt(patch_pc, lazy_deopt_jump_throw); |
| 2036 if (FLAG_trace_patching) { | 2047 if (FLAG_trace_patching) { |
| 2037 OS::PrintErr(" at handler 0x%" Px "\n", patch_pc); | 2048 OS::PrintErr(" at handler 0x%" Px "\n", patch_pc); |
| 2038 } | 2049 } |
| 2039 } | 2050 } |
| 2040 } | 2051 } |
| 2041 #else // !DBC | |
| 2042 uword lazy_deopt_entry = | |
| 2043 StubCode::DeoptimizeLazyFromReturn_entry()->EntryPoint(); | |
| 2044 if (frame->pc() == lazy_deopt_entry) { | |
| 2045 // Deopt already scheduled. | |
| 2046 if (FLAG_trace_deoptimization) { | |
| 2047 THR_Print("Lazy deopt already scheduled for fp=%" Pp "\n", frame->fp()); | |
| 2048 } | |
| 2049 } else { | |
| 2050 uword deopt_pc = frame->pc(); | |
| 2051 ASSERT(optimized_code.ContainsInstructionAt(deopt_pc)); | |
| 2052 PendingLazyDeopt pair(frame->fp(), deopt_pc); | |
| 2053 thread->isolate()->pending_deopts()->Add(pair); | |
| 2054 frame->set_pc(lazy_deopt_entry); | |
| 2055 | |
| 2056 if (FLAG_trace_deoptimization) { | |
| 2057 THR_Print("Lazy deopt scheduled for fp=%" Pp ", pc=%" Pp "\n", | |
| 2058 frame->fp(), deopt_pc); | |
| 2059 } | |
| 2060 } | |
| 2061 #endif // !DBC | |
| 2062 | |
| 2063 // Mark code as dead (do not GC its embedded objects). | 2052 // Mark code as dead (do not GC its embedded objects). |
| 2064 optimized_code.set_is_alive(false); | 2053 optimized_code.set_is_alive(false); |
| 2065 } | 2054 } |
| 2066 | 2055 |
| 2067 | 2056 |
| 2068 // Currently checks only that all optimized frames have kDeoptIndex | 2057 // Currently checks only that all optimized frames have kDeoptIndex |
| 2069 // and unoptimized code has the kDeoptAfter. | 2058 // and unoptimized code has the kDeoptAfter. |
| 2070 void DeoptimizeFunctionsOnStack() { | 2059 void DeoptimizeFunctionsOnStack() { |
| 2071 DartFrameIterator iterator; | 2060 DartFrameIterator iterator; |
| 2072 StackFrame* frame = iterator.NextFrame(); | 2061 StackFrame* frame = iterator.NextFrame(); |
| 2073 Code& optimized_code = Code::Handle(); | 2062 Code& optimized_code = Code::Handle(); |
| 2074 while (frame != NULL) { | 2063 while (frame != NULL) { |
| 2075 optimized_code = frame->LookupDartCode(); | 2064 optimized_code = frame->LookupDartCode(); |
| 2076 if (optimized_code.is_optimized()) { | 2065 if (optimized_code.is_optimized()) { |
| 2077 DeoptimizeAt(optimized_code, frame); | 2066 DeoptimizeAt(optimized_code, frame->pc()); |
| 2078 } | 2067 } |
| 2079 frame = iterator.NextFrame(); | 2068 frame = iterator.NextFrame(); |
| 2080 } | 2069 } |
| 2081 } | 2070 } |
| 2082 | 2071 |
| 2083 #if !defined(DART_PRECOMPILED_RUNTIME) | 2072 #if !defined(DART_PRECOMPILED_RUNTIME) |
| 2084 #if !defined(TARGET_ARCH_DBC) | 2073 #if !defined(TARGET_ARCH_DBC) |
| 2085 static const intptr_t kNumberOfSavedCpuRegisters = kNumberOfCpuRegisters; | 2074 static const intptr_t kNumberOfSavedCpuRegisters = kNumberOfCpuRegisters; |
| 2086 static const intptr_t kNumberOfSavedFpuRegisters = kNumberOfFpuRegisters; | 2075 static const intptr_t kNumberOfSavedFpuRegisters = kNumberOfFpuRegisters; |
| 2087 #else | 2076 #else |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2148 Function::Handle(thread->zone(), optimized_code.function()); | 2137 Function::Handle(thread->zone(), optimized_code.function()); |
| 2149 const bool deoptimizing_code = top_function.HasOptimizedCode(); | 2138 const bool deoptimizing_code = top_function.HasOptimizedCode(); |
| 2150 if (FLAG_trace_deoptimization) { | 2139 if (FLAG_trace_deoptimization) { |
| 2151 const Function& function = Function::Handle(optimized_code.function()); | 2140 const Function& function = Function::Handle(optimized_code.function()); |
| 2152 THR_Print("== Deoptimizing code for '%s', %s, %s\n", | 2141 THR_Print("== Deoptimizing code for '%s', %s, %s\n", |
| 2153 function.ToFullyQualifiedCString(), | 2142 function.ToFullyQualifiedCString(), |
| 2154 deoptimizing_code ? "code & frame" : "frame", | 2143 deoptimizing_code ? "code & frame" : "frame", |
| 2155 is_lazy_deopt ? "lazy-deopt" : ""); | 2144 is_lazy_deopt ? "lazy-deopt" : ""); |
| 2156 } | 2145 } |
| 2157 | 2146 |
| 2158 #if !defined(TARGET_ARCH_DBC) | |
| 2159 if (is_lazy_deopt) { | |
| 2160 uword deopt_pc = 0; | |
| 2161 MallocGrowableArray<PendingLazyDeopt>* pending_deopts = | |
| 2162 isolate->pending_deopts(); | |
| 2163 for (intptr_t i = pending_deopts->length() - 1; i >= 0; i--) { | |
| 2164 if ((*pending_deopts)[i].fp() == caller_frame->fp()) { | |
| 2165 deopt_pc = (*pending_deopts)[i].pc(); | |
| 2166 break; | |
| 2167 } | |
| 2168 } | |
| 2169 for (intptr_t i = pending_deopts->length() - 1; i >= 0; i--) { | |
| 2170 if ((*pending_deopts)[i].fp() <= caller_frame->fp()) { | |
| 2171 pending_deopts->RemoveAt(i); | |
| 2172 } | |
| 2173 } | |
| 2174 if (FLAG_trace_deoptimization) { | |
| 2175 THR_Print("Lazy deopt fp=%" Pp " pc=%" Pp "\n", | |
| 2176 caller_frame->fp(), deopt_pc); | |
| 2177 THR_Print("%" Pd " pending lazy deopts\n", | |
| 2178 pending_deopts->length()); | |
| 2179 } | |
| 2180 ASSERT(deopt_pc != 0); | |
| 2181 caller_frame->set_pc(deopt_pc); | |
| 2182 ASSERT(caller_frame->pc() == deopt_pc); | |
| 2183 } else { | |
| 2184 if (FLAG_trace_deoptimization) { | |
| 2185 THR_Print("Eager deopt fp=%" Pp " pc=%" Pp "\n", | |
| 2186 caller_frame->fp(), caller_frame->pc()); | |
| 2187 } | |
| 2188 } | |
| 2189 ASSERT(optimized_code.ContainsInstructionAt(caller_frame->pc())); | |
| 2190 #endif // !DBC | |
| 2191 | |
| 2192 // Copy the saved registers from the stack. | 2147 // Copy the saved registers from the stack. |
| 2193 fpu_register_t* fpu_registers; | 2148 fpu_register_t* fpu_registers; |
| 2194 intptr_t* cpu_registers; | 2149 intptr_t* cpu_registers; |
| 2195 CopySavedRegisters(saved_registers_address, &fpu_registers, &cpu_registers); | 2150 CopySavedRegisters(saved_registers_address, &fpu_registers, &cpu_registers); |
| 2196 | 2151 |
| 2197 // Create the DeoptContext. | 2152 // Create the DeoptContext. |
| 2198 DeoptContext* deopt_context = | 2153 DeoptContext* deopt_context = |
| 2199 new DeoptContext(caller_frame, | 2154 new DeoptContext(caller_frame, |
| 2200 optimized_code, | 2155 optimized_code, |
| 2201 DeoptContext::kDestIsOriginalFrame, | 2156 DeoptContext::kDestIsOriginalFrame, |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2242 // The code will be the same as before. | 2197 // The code will be the same as before. |
| 2243 ASSERT(code.raw() == optimized_code.raw()); | 2198 ASSERT(code.raw() == optimized_code.raw()); |
| 2244 | 2199 |
| 2245 // Some sanity checking of the optimized code. | 2200 // Some sanity checking of the optimized code. |
| 2246 ASSERT(!optimized_code.IsNull() && optimized_code.is_optimized()); | 2201 ASSERT(!optimized_code.IsNull() && optimized_code.is_optimized()); |
| 2247 } | 2202 } |
| 2248 #endif | 2203 #endif |
| 2249 | 2204 |
| 2250 deopt_context->set_dest_frame(caller_frame); | 2205 deopt_context->set_dest_frame(caller_frame); |
| 2251 deopt_context->FillDestFrame(); | 2206 deopt_context->FillDestFrame(); |
| 2252 | |
| 2253 #else | 2207 #else |
| 2254 UNREACHABLE(); | 2208 UNREACHABLE(); |
| 2255 #endif // !DART_PRECOMPILED_RUNTIME | 2209 #endif // !DART_PRECOMPILED_RUNTIME |
| 2256 } | 2210 } |
| 2257 END_LEAF_RUNTIME_ENTRY | 2211 END_LEAF_RUNTIME_ENTRY |
| 2258 | 2212 |
| 2259 | 2213 |
| 2260 // This is the last step in the deoptimization, GC can occur. | 2214 // This is the last step in the deoptimization, GC can occur. |
| 2261 // Returns number of bytes to remove from the expression stack of the | 2215 // Returns number of bytes to remove from the expression stack of the |
| 2262 // bottom-most deoptimized frame. Those arguments were artificially injected | 2216 // bottom-most deoptimized frame. Those arguments were artificially injected |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2356 const intptr_t elm_size = old_data.ElementSizeInBytes(); | 2310 const intptr_t elm_size = old_data.ElementSizeInBytes(); |
| 2357 const TypedData& new_data = | 2311 const TypedData& new_data = |
| 2358 TypedData::Handle(TypedData::New(cid, new_size, Heap::kOld)); | 2312 TypedData::Handle(TypedData::New(cid, new_size, Heap::kOld)); |
| 2359 TypedData::Copy(new_data, 0, old_data, 0, old_size * elm_size); | 2313 TypedData::Copy(new_data, 0, old_data, 0, old_size * elm_size); |
| 2360 typed_data_cell.SetAt(0, new_data); | 2314 typed_data_cell.SetAt(0, new_data); |
| 2361 arguments.SetReturn(new_data); | 2315 arguments.SetReturn(new_data); |
| 2362 } | 2316 } |
| 2363 | 2317 |
| 2364 | 2318 |
| 2365 } // namespace dart | 2319 } // namespace dart |
| OLD | NEW |