| OLD | NEW |
| 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/exceptions.h" | 5 #include "vm/exceptions.h" |
| 6 | 6 |
| 7 #include "vm/dart_api_impl.h" | 7 #include "vm/dart_api_impl.h" |
| 8 #include "vm/dart_entry.h" | 8 #include "vm/dart_entry.h" |
| 9 #include "vm/debugger.h" | 9 #include "vm/debugger.h" |
| 10 #include "vm/flags.h" | 10 #include "vm/flags.h" |
| 11 #include "vm/object.h" | 11 #include "vm/object.h" |
| 12 #include "vm/object_store.h" | 12 #include "vm/object_store.h" |
| 13 #include "vm/stack_frame.h" | 13 #include "vm/stack_frame.h" |
| 14 #include "vm/stub_code.h" | 14 #include "vm/stub_code.h" |
| 15 #include "vm/symbols.h" | 15 #include "vm/symbols.h" |
| 16 | 16 |
| 17 namespace dart { | 17 namespace dart { |
| 18 | 18 |
| 19 DEFINE_FLAG(bool, print_stacktrace_at_throw, false, | 19 DEFINE_FLAG(bool, print_stacktrace_at_throw, false, |
| 20 "Prints a stack trace everytime a throw occurs."); | 20 "Prints a stack trace everytime a throw occurs."); |
| 21 DEFINE_FLAG(bool, heap_profile_out_of_memory, false, | 21 DEFINE_FLAG(bool, heap_profile_out_of_memory, false, |
| 22 "Writes a heap profile on unhandled out-of-memory exceptions."); | 22 "Writes a heap profile on unhandled out-of-memory exceptions."); |
| 23 DEFINE_FLAG(bool, verbose_stacktrace, false, | 23 DEFINE_FLAG(bool, verbose_stacktrace, false, |
| 24 "Stack traces will include methods marked invisible."); | 24 "Stack traces will include methods marked invisible."); |
| 25 | 25 |
| 26 const char* Exceptions::kCastErrorDstName = "type cast"; | 26 const char* Exceptions::kCastErrorDstName = "type cast"; |
| 27 | 27 |
| 28 | 28 |
| 29 class StacktraceBuilder : public ValueObject { | |
| 30 public: | |
| 31 StacktraceBuilder() { } | |
| 32 virtual ~StacktraceBuilder() { } | |
| 33 | |
| 34 virtual void AddFrame(const Function& func, | |
| 35 const Code& code, | |
| 36 const Smi& offset) = 0; | |
| 37 }; | |
| 38 | |
| 39 | |
| 40 class RegularStacktraceBuilder : public StacktraceBuilder { | |
| 41 public: | |
| 42 RegularStacktraceBuilder(const GrowableObjectArray& func_list, | |
| 43 const GrowableObjectArray& code_list, | |
| 44 const GrowableObjectArray& pc_offset_list) | |
| 45 : func_list_(func_list), | |
| 46 code_list_(code_list), | |
| 47 pc_offset_list_(pc_offset_list) { } | |
| 48 ~RegularStacktraceBuilder() { } | |
| 49 | |
| 50 const GrowableObjectArray& func_list() const { return func_list_; } | |
| 51 const GrowableObjectArray& code_list() const { return code_list_; } | |
| 52 const GrowableObjectArray& pc_offset_list() const { return pc_offset_list_; } | |
| 53 | |
| 54 void AddFrame(const Function& func, const Code& code, const Smi& offset) { | |
| 55 func_list_.Add(func); | |
| 56 code_list_.Add(code); | |
| 57 pc_offset_list_.Add(offset); | |
| 58 } | |
| 59 | |
| 60 private: | |
| 61 const GrowableObjectArray& func_list_; | |
| 62 const GrowableObjectArray& code_list_; | |
| 63 const GrowableObjectArray& pc_offset_list_; | |
| 64 | |
| 65 DISALLOW_COPY_AND_ASSIGN(RegularStacktraceBuilder); | |
| 66 }; | |
| 67 | |
| 68 | |
| 69 class PreallocatedStacktraceBuilder : public StacktraceBuilder { | |
| 70 public: | |
| 71 explicit PreallocatedStacktraceBuilder(const Stacktrace& stacktrace) | |
| 72 : stacktrace_(stacktrace), | |
| 73 cur_index_(0) { | |
| 74 ASSERT(stacktrace_.raw() == | |
| 75 Isolate::Current()->object_store()->preallocated_stack_trace()); | |
| 76 } | |
| 77 ~PreallocatedStacktraceBuilder() { } | |
| 78 | |
| 79 void AddFrame(const Function& func, const Code& code, const Smi& offset); | |
| 80 | |
| 81 private: | |
| 82 static const int kNumTopframes = 3; | |
| 83 | |
| 84 const Stacktrace& stacktrace_; | |
| 85 intptr_t cur_index_; | |
| 86 | |
| 87 DISALLOW_COPY_AND_ASSIGN(PreallocatedStacktraceBuilder); | |
| 88 }; | |
| 89 | |
| 90 | |
| 91 void PreallocatedStacktraceBuilder::AddFrame(const Function& func, | |
| 92 const Code& code, | |
| 93 const Smi& offset) { | |
| 94 if (cur_index_ >= Stacktrace::kPreallocatedStackdepth) { | |
| 95 // The number of frames is overflowing the preallocated stack trace object. | |
| 96 Function& frame_func = Function::Handle(); | |
| 97 Code& frame_code = Code::Handle(); | |
| 98 Smi& frame_offset = Smi::Handle(); | |
| 99 intptr_t start = Stacktrace::kPreallocatedStackdepth - (kNumTopframes - 1); | |
| 100 intptr_t null_slot = start - 2; | |
| 101 // Add an empty slot to indicate the overflow so that the toString | |
| 102 // method can account for the overflow. | |
| 103 if (stacktrace_.FunctionAtFrame(null_slot) != Function::null()) { | |
| 104 stacktrace_.SetFunctionAtFrame(null_slot, frame_func); | |
| 105 stacktrace_.SetCodeAtFrame(null_slot, frame_code); | |
| 106 } | |
| 107 // Move frames one slot down so that we can accomadate the new frame. | |
| 108 for (intptr_t i = start; i < Stacktrace::kPreallocatedStackdepth; i++) { | |
| 109 intptr_t prev = (i - 1); | |
| 110 frame_func = stacktrace_.FunctionAtFrame(i); | |
| 111 frame_code = stacktrace_.CodeAtFrame(i); | |
| 112 frame_offset = stacktrace_.PcOffsetAtFrame(i); | |
| 113 stacktrace_.SetFunctionAtFrame(prev, frame_func); | |
| 114 stacktrace_.SetCodeAtFrame(prev, frame_code); | |
| 115 stacktrace_.SetPcOffsetAtFrame(prev, frame_offset); | |
| 116 } | |
| 117 cur_index_ = (Stacktrace::kPreallocatedStackdepth - 1); | |
| 118 } | |
| 119 stacktrace_.SetFunctionAtFrame(cur_index_, func); | |
| 120 stacktrace_.SetCodeAtFrame(cur_index_, code); | |
| 121 stacktrace_.SetPcOffsetAtFrame(cur_index_, offset); | |
| 122 cur_index_ += 1; | |
| 123 } | |
| 124 | |
| 125 | |
| 126 static bool ShouldShowFunction(const Function& function) { | 29 static bool ShouldShowFunction(const Function& function) { |
| 127 if (FLAG_verbose_stacktrace) { | 30 if (FLAG_verbose_stacktrace) { |
| 128 return true; | 31 return true; |
| 129 } | 32 } |
| 130 return function.is_visible(); | 33 return function.is_visible(); |
| 131 } | 34 } |
| 132 | 35 |
| 133 | 36 |
| 134 // Iterate through the stack frames and try to find a frame with an | 37 // Iterate through the stack frames and try to find a frame with an |
| 135 // exception handler. Once found, set the pc, sp and fp so that execution | 38 // exception handler. Once found, set the pc, sp and fp so that execution |
| 136 // can continue in that frame. | 39 // can continue in that frame. |
| 137 static bool FindExceptionHandler(uword* handler_pc, | 40 static bool FindExceptionHandler(uword* handler_pc, |
| 138 uword* handler_sp, | 41 uword* handler_sp, |
| 139 uword* handler_fp, | 42 uword* handler_fp, |
| 140 StacktraceBuilder* builder) { | 43 const GrowableObjectArray& func_list, |
| 44 const GrowableObjectArray& code_list, |
| 45 const GrowableObjectArray& pc_offset_list) { |
| 141 StackFrameIterator frames(StackFrameIterator::kDontValidateFrames); | 46 StackFrameIterator frames(StackFrameIterator::kDontValidateFrames); |
| 142 StackFrame* frame = frames.NextFrame(); | 47 StackFrame* frame = frames.NextFrame(); |
| 143 ASSERT(frame != NULL); // We expect to find a dart invocation frame. | 48 ASSERT(frame != NULL); // We expect to find a dart invocation frame. |
| 144 Function& func = Function::Handle(); | 49 Function& func = Function::Handle(); |
| 145 Code& code = Code::Handle(); | 50 Code& code = Code::Handle(); |
| 146 Smi& offset = Smi::Handle(); | 51 Smi& offset = Smi::Handle(); |
| 147 while (!frame->IsEntryFrame()) { | 52 while (!frame->IsEntryFrame()) { |
| 148 if (frame->IsDartFrame()) { | 53 if (frame->IsDartFrame()) { |
| 149 code = frame->LookupDartCode(); | 54 code = frame->LookupDartCode(); |
| 150 if (code.is_optimized()) { | 55 if (code.is_optimized()) { |
| 151 // For optimized frames, extract all the inlined functions if any | 56 // For optimized frames, extract all the inlined functions if any |
| 152 // into the stack trace. | 57 // into the stack trace. |
| 153 for (InlinedFunctionsIterator it(frame); !it.Done(); it.Advance()) { | 58 for (InlinedFunctionsIterator it(frame); !it.Done(); it.Advance()) { |
| 154 func = it.function(); | 59 func = it.function(); |
| 155 code = it.code(); | 60 code = it.code(); |
| 156 uword pc = it.pc(); | 61 uword pc = it.pc(); |
| 157 ASSERT(pc != 0); | 62 ASSERT(pc != 0); |
| 158 ASSERT(code.EntryPoint() <= pc); | 63 ASSERT(code.EntryPoint() <= pc); |
| 159 ASSERT(pc < (code.EntryPoint() + code.Size())); | 64 ASSERT(pc < (code.EntryPoint() + code.Size())); |
| 160 if (ShouldShowFunction(func)) { | 65 if (ShouldShowFunction(func)) { |
| 161 offset = Smi::New(pc - code.EntryPoint()); | 66 offset = Smi::New(pc - code.EntryPoint()); |
| 162 builder->AddFrame(func, code, offset); | 67 func_list.Add(func); |
| 68 code_list.Add(code); |
| 69 pc_offset_list.Add(offset); |
| 163 } | 70 } |
| 164 } | 71 } |
| 165 } else { | 72 } else { |
| 166 offset = Smi::New(frame->pc() - code.EntryPoint()); | 73 offset = Smi::New(frame->pc() - code.EntryPoint()); |
| 167 func = code.function(); | 74 func = code.function(); |
| 168 if (ShouldShowFunction(func)) { | 75 if (ShouldShowFunction(func)) { |
| 169 builder->AddFrame(func, code, offset); | 76 func_list.Add(func); |
| 77 code_list.Add(code); |
| 78 pc_offset_list.Add(offset); |
| 170 } | 79 } |
| 171 } | 80 } |
| 172 if (frame->FindExceptionHandler(handler_pc)) { | 81 if (frame->FindExceptionHandler(handler_pc)) { |
| 173 *handler_sp = frame->sp(); | 82 *handler_sp = frame->sp(); |
| 174 *handler_fp = frame->fp(); | 83 *handler_fp = frame->fp(); |
| 175 return true; | 84 return true; |
| 176 } | 85 } |
| 177 } | 86 } |
| 178 frame = frames.NextFrame(); | 87 frame = frames.NextFrame(); |
| 179 ASSERT(frame != NULL); | 88 ASSERT(frame != NULL); |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 255 typedef void (*ErrorHandler)(uword, uword, uword, RawError*); | 164 typedef void (*ErrorHandler)(uword, uword, uword, RawError*); |
| 256 ErrorHandler func = reinterpret_cast<ErrorHandler>( | 165 ErrorHandler func = reinterpret_cast<ErrorHandler>( |
| 257 StubCode::JumpToErrorHandlerEntryPoint()); | 166 StubCode::JumpToErrorHandlerEntryPoint()); |
| 258 func(program_counter, stack_pointer, frame_pointer, raw_error); | 167 func(program_counter, stack_pointer, frame_pointer, raw_error); |
| 259 UNREACHABLE(); | 168 UNREACHABLE(); |
| 260 } | 169 } |
| 261 | 170 |
| 262 | 171 |
| 263 static void ThrowExceptionHelper(const Instance& incoming_exception, | 172 static void ThrowExceptionHelper(const Instance& incoming_exception, |
| 264 const Instance& existing_stacktrace) { | 173 const Instance& existing_stacktrace) { |
| 265 bool use_preallocated_stacktrace = false; | 174 Instance& exception = Instance::Handle(incoming_exception.raw()); |
| 266 Isolate* isolate = Isolate::Current(); | |
| 267 Instance& exception = Instance::Handle(isolate, incoming_exception.raw()); | |
| 268 if (exception.IsNull()) { | 175 if (exception.IsNull()) { |
| 269 exception ^= Exceptions::Create(Exceptions::kNullThrown, | 176 exception ^= Exceptions::Create(Exceptions::kNullThrown, |
| 270 Object::empty_array()); | 177 Object::empty_array()); |
| 271 } else if (exception.raw() == isolate->object_store()->out_of_memory() || | |
| 272 exception.raw() == isolate->object_store()->stack_overflow()) { | |
| 273 use_preallocated_stacktrace = true; | |
| 274 } | 178 } |
| 275 uword handler_pc = 0; | 179 uword handler_pc = 0; |
| 276 uword handler_sp = 0; | 180 uword handler_sp = 0; |
| 277 uword handler_fp = 0; | 181 uword handler_fp = 0; |
| 278 Stacktrace& stacktrace = Stacktrace::Handle(isolate); | 182 const GrowableObjectArray& func_list = |
| 279 bool handler_exists = false; | 183 GrowableObjectArray::Handle(GrowableObjectArray::New()); |
| 280 if (use_preallocated_stacktrace) { | 184 const GrowableObjectArray& code_list = |
| 281 stacktrace ^= isolate->object_store()->preallocated_stack_trace(); | 185 GrowableObjectArray::Handle(GrowableObjectArray::New()); |
| 282 PreallocatedStacktraceBuilder frame_builder(stacktrace); | 186 const GrowableObjectArray& pc_offset_list = |
| 283 handler_exists = FindExceptionHandler(&handler_pc, | 187 GrowableObjectArray::Handle(GrowableObjectArray::New()); |
| 284 &handler_sp, | 188 bool handler_exists = FindExceptionHandler(&handler_pc, |
| 285 &handler_fp, | 189 &handler_sp, |
| 286 &frame_builder); | 190 &handler_fp, |
| 287 } else { | 191 func_list, |
| 288 RegularStacktraceBuilder frame_builder( | 192 code_list, |
| 289 GrowableObjectArray::Handle(isolate, GrowableObjectArray::New()), | 193 pc_offset_list); |
| 290 GrowableObjectArray::Handle(isolate, GrowableObjectArray::New()), | |
| 291 GrowableObjectArray::Handle(isolate, GrowableObjectArray::New())); | |
| 292 handler_exists = FindExceptionHandler(&handler_pc, | |
| 293 &handler_sp, | |
| 294 &handler_fp, | |
| 295 &frame_builder); | |
| 296 // TODO(5411263): At some point we can optimize by figuring out if a | |
| 297 // stack trace is needed based on whether the catch code specifies a | |
| 298 // stack trace object or there is a rethrow in the catch clause. | |
| 299 if (frame_builder.pc_offset_list().Length() != 0) { | |
| 300 // Create arrays for function, code and pc_offset triplet for each frame. | |
| 301 const Array& func_array = | |
| 302 Array::Handle(isolate, Array::MakeArray(frame_builder.func_list())); | |
| 303 const Array& code_array = | |
| 304 Array::Handle(isolate, Array::MakeArray(frame_builder.code_list())); | |
| 305 const Array& pc_offset_array = | |
| 306 Array::Handle(isolate, | |
| 307 Array::MakeArray(frame_builder.pc_offset_list())); | |
| 308 if (existing_stacktrace.IsNull()) { | |
| 309 stacktrace = Stacktrace::New(func_array, code_array, pc_offset_array); | |
| 310 } else { | |
| 311 stacktrace ^= existing_stacktrace.raw(); | |
| 312 stacktrace.Append(func_array, code_array, pc_offset_array); | |
| 313 } | |
| 314 } else { | |
| 315 stacktrace ^= existing_stacktrace.raw(); | |
| 316 } | |
| 317 } | |
| 318 // We expect to find a handler_pc, if the exception is unhandled | 194 // We expect to find a handler_pc, if the exception is unhandled |
| 319 // then we expect to at least have the dart entry frame on the | 195 // then we expect to at least have the dart entry frame on the |
| 320 // stack as Exceptions::Throw should happen only after a dart | 196 // stack as Exceptions::Throw should happen only after a dart |
| 321 // invocation has been done. | 197 // invocation has been done. |
| 322 ASSERT(handler_pc != 0); | 198 ASSERT(handler_pc != 0); |
| 323 | 199 |
| 200 // TODO(5411263): At some point we can optimize by figuring out if a |
| 201 // stack trace is needed based on whether the catch code specifies a |
| 202 // stack trace object or there is a rethrow in the catch clause. |
| 203 Stacktrace& stacktrace = Stacktrace::Handle(); |
| 204 if (pc_offset_list.Length() != 0) { |
| 205 if (existing_stacktrace.IsNull()) { |
| 206 stacktrace = Stacktrace::New(func_list, code_list, pc_offset_list); |
| 207 } else { |
| 208 stacktrace ^= existing_stacktrace.raw(); |
| 209 stacktrace.Append(func_list, code_list, pc_offset_list); |
| 210 } |
| 211 } else { |
| 212 stacktrace ^= existing_stacktrace.raw(); |
| 213 } |
| 324 if (FLAG_print_stacktrace_at_throw) { | 214 if (FLAG_print_stacktrace_at_throw) { |
| 325 OS::Print("Exception '%s' thrown:\n", exception.ToCString()); | 215 OS::Print("Exception '%s' thrown:\n", exception.ToCString()); |
| 326 OS::Print("%s\n", stacktrace.ToCString()); | 216 OS::Print("%s\n", stacktrace.ToCString()); |
| 327 } | 217 } |
| 328 if (handler_exists) { | 218 if (handler_exists) { |
| 329 // Found a dart handler for the exception, jump to it. | 219 // Found a dart handler for the exception, jump to it. |
| 330 JumpToExceptionHandler(handler_pc, | 220 JumpToExceptionHandler(handler_pc, |
| 331 handler_sp, | 221 handler_sp, |
| 332 handler_fp, | 222 handler_fp, |
| 333 exception, | 223 exception, |
| 334 stacktrace); | 224 stacktrace); |
| 335 } else { | 225 } else { |
| 336 if (FLAG_heap_profile_out_of_memory) { | 226 if (FLAG_heap_profile_out_of_memory) { |
| 227 Isolate* isolate = Isolate::Current(); |
| 337 if (exception.raw() == isolate->object_store()->out_of_memory()) { | 228 if (exception.raw() == isolate->object_store()->out_of_memory()) { |
| 338 isolate->heap()->ProfileToFile("out-of-memory"); | 229 isolate->heap()->ProfileToFile("out-of-memory"); |
| 339 } | 230 } |
| 340 } | 231 } |
| 341 // No dart exception handler found in this invocation sequence, | 232 // No dart exception handler found in this invocation sequence, |
| 342 // so we create an unhandled exception object and return to the | 233 // so we create an unhandled exception object and return to the |
| 343 // invocation stub so that it returns this unhandled exception | 234 // invocation stub so that it returns this unhandled exception |
| 344 // object. The C++ code which invoked this dart sequence can check | 235 // object. The C++ code which invoked this dart sequence can check |
| 345 // and do the appropriate thing (rethrow the exception to the | 236 // and do the appropriate thing (rethrow the exception to the |
| 346 // dart invocation sequence above it, print diagnostics and terminate | 237 // dart invocation sequence above it, print diagnostics and terminate |
| (...skipping 242 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 589 break; | 480 break; |
| 590 } | 481 } |
| 591 | 482 |
| 592 return DartLibraryCalls::ExceptionCreate(library, | 483 return DartLibraryCalls::ExceptionCreate(library, |
| 593 *class_name, | 484 *class_name, |
| 594 *constructor_name, | 485 *constructor_name, |
| 595 arguments); | 486 arguments); |
| 596 } | 487 } |
| 597 | 488 |
| 598 } // namespace dart | 489 } // namespace dart |
| OLD | NEW |