| OLD | NEW |
| 1 // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/thread.h" | 5 #include "vm/thread.h" |
| 6 | 6 |
| 7 #include "vm/compiler_stats.h" | 7 #include "vm/compiler_stats.h" |
| 8 #include "vm/dart_api_state.h" | 8 #include "vm/dart_api_state.h" |
| 9 #include "vm/growable_array.h" | 9 #include "vm/growable_array.h" |
| 10 #include "vm/isolate.h" | 10 #include "vm/isolate.h" |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 49 } | 49 } |
| 50 | 50 |
| 51 | 51 |
| 52 #if defined(DEBUG) | 52 #if defined(DEBUG) |
| 53 #define REUSABLE_HANDLE_SCOPE_INIT(object) \ | 53 #define REUSABLE_HANDLE_SCOPE_INIT(object) \ |
| 54 reusable_##object##_handle_scope_active_(false), | 54 reusable_##object##_handle_scope_active_(false), |
| 55 #else | 55 #else |
| 56 #define REUSABLE_HANDLE_SCOPE_INIT(object) | 56 #define REUSABLE_HANDLE_SCOPE_INIT(object) |
| 57 #endif // defined(DEBUG) | 57 #endif // defined(DEBUG) |
| 58 | 58 |
| 59 #define REUSABLE_HANDLE_INITIALIZERS(object) \ | 59 #define REUSABLE_HANDLE_INITIALIZERS(object) object##_handle_(NULL), |
| 60 object##_handle_(NULL), | |
| 61 | 60 |
| 62 | 61 |
| 63 Thread::Thread(Isolate* isolate) | 62 Thread::Thread(Isolate* isolate) |
| 64 : BaseThread(false), | 63 : BaseThread(false), |
| 65 stack_limit_(0), | 64 stack_limit_(0), |
| 66 stack_overflow_flags_(0), | 65 stack_overflow_flags_(0), |
| 67 isolate_(NULL), | 66 isolate_(NULL), |
| 68 heap_(NULL), | 67 heap_(NULL), |
| 69 top_exit_frame_info_(0), | 68 top_exit_frame_info_(0), |
| 70 store_buffer_block_(NULL), | 69 store_buffer_block_(NULL), |
| (...skipping 19 matching lines...) Expand all Loading... |
| 90 deferred_interrupts_mask_(0), | 89 deferred_interrupts_mask_(0), |
| 91 deferred_interrupts_(0), | 90 deferred_interrupts_(0), |
| 92 stack_overflow_count_(0), | 91 stack_overflow_count_(0), |
| 93 cha_(NULL), | 92 cha_(NULL), |
| 94 type_range_cache_(NULL), | 93 type_range_cache_(NULL), |
| 95 deopt_id_(0), | 94 deopt_id_(0), |
| 96 pending_functions_(GrowableObjectArray::null()), | 95 pending_functions_(GrowableObjectArray::null()), |
| 97 sticky_error_(Error::null()), | 96 sticky_error_(Error::null()), |
| 98 compiler_stats_(NULL), | 97 compiler_stats_(NULL), |
| 99 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_INITIALIZERS) | 98 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_INITIALIZERS) |
| 100 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_SCOPE_INIT) | 99 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_SCOPE_INIT) safepoint_state_(0), |
| 101 safepoint_state_(0), | |
| 102 execution_state_(kThreadInNative), | 100 execution_state_(kThreadInNative), |
| 103 next_(NULL) { | 101 next_(NULL) { |
| 104 NOT_IN_PRODUCT( | 102 #if !defined(PRODUCT) |
| 105 dart_stream_ = Timeline::GetDartStream(); | 103 dart_stream_ = Timeline::GetDartStream(); |
| 106 ASSERT(dart_stream_ != NULL); | 104 ASSERT(dart_stream_ != NULL); |
| 107 ) | 105 #endif |
| 108 #define DEFAULT_INIT(type_name, member_name, init_expr, default_init_value) \ | 106 #define DEFAULT_INIT(type_name, member_name, init_expr, default_init_value) \ |
| 109 member_name = default_init_value; | 107 member_name = default_init_value; |
| 110 CACHED_CONSTANTS_LIST(DEFAULT_INIT) | 108 CACHED_CONSTANTS_LIST(DEFAULT_INIT) |
| 111 #undef DEFAULT_INIT | 109 #undef DEFAULT_INIT |
| 112 | 110 |
| 113 #define DEFAULT_INIT(name) \ | 111 #define DEFAULT_INIT(name) name##_entry_point_ = 0; |
| 114 name##_entry_point_ = 0; | 112 RUNTIME_ENTRY_LIST(DEFAULT_INIT) |
| 115 RUNTIME_ENTRY_LIST(DEFAULT_INIT) | |
| 116 #undef DEFAULT_INIT | 113 #undef DEFAULT_INIT |
| 117 | 114 |
| 118 #define DEFAULT_INIT(returntype, name, ...) \ | 115 #define DEFAULT_INIT(returntype, name, ...) name##_entry_point_ = 0; |
| 119 name##_entry_point_ = 0; | 116 LEAF_RUNTIME_ENTRY_LIST(DEFAULT_INIT) |
| 120 LEAF_RUNTIME_ENTRY_LIST(DEFAULT_INIT) | |
| 121 #undef DEFAULT_INIT | 117 #undef DEFAULT_INIT |
| 122 | 118 |
| 123 // We cannot initialize the VM constants here for the vm isolate thread | 119 // We cannot initialize the VM constants here for the vm isolate thread |
| 124 // due to boot strapping issues. | 120 // due to boot strapping issues. |
| 125 if ((Dart::vm_isolate() != NULL) && (isolate != Dart::vm_isolate())) { | 121 if ((Dart::vm_isolate() != NULL) && (isolate != Dart::vm_isolate())) { |
| 126 InitVMConstants(); | 122 InitVMConstants(); |
| 127 } | 123 } |
| 128 | 124 |
| 129 if (FLAG_support_compiler_stats) { | 125 if (FLAG_support_compiler_stats) { |
| 130 compiler_stats_ = new CompilerStats(isolate); | 126 compiler_stats_ = new CompilerStats(isolate); |
| 131 if (FLAG_compiler_benchmark) { | 127 if (FLAG_compiler_benchmark) { |
| 132 compiler_stats_->EnableBenchmark(); | 128 compiler_stats_->EnableBenchmark(); |
| 133 } | 129 } |
| 134 } | 130 } |
| 135 } | 131 } |
| 136 | 132 |
| 137 | 133 |
| 138 static const struct ALIGN16 { | 134 static const struct ALIGN16 { |
| 139 uint64_t a; | 135 uint64_t a; |
| 140 uint64_t b; | 136 uint64_t b; |
| 141 } double_negate_constant = | 137 } double_negate_constant = {0x8000000000000000LL, 0x8000000000000000LL}; |
| 142 {0x8000000000000000LL, 0x8000000000000000LL}; | |
| 143 | 138 |
| 144 static const struct ALIGN16 { | 139 static const struct ALIGN16 { |
| 145 uint64_t a; | 140 uint64_t a; |
| 146 uint64_t b; | 141 uint64_t b; |
| 147 } double_abs_constant = | 142 } double_abs_constant = {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL}; |
| 148 {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL}; | |
| 149 | 143 |
| 150 static const struct ALIGN16 { | 144 static const struct ALIGN16 { |
| 151 uint32_t a; | 145 uint32_t a; |
| 152 uint32_t b; | 146 uint32_t b; |
| 153 uint32_t c; | 147 uint32_t c; |
| 154 uint32_t d; | 148 uint32_t d; |
| 155 } float_not_constant = | 149 } float_not_constant = {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF}; |
| 156 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF }; | |
| 157 | 150 |
| 158 static const struct ALIGN16 { | 151 static const struct ALIGN16 { |
| 159 uint32_t a; | 152 uint32_t a; |
| 160 uint32_t b; | 153 uint32_t b; |
| 161 uint32_t c; | 154 uint32_t c; |
| 162 uint32_t d; | 155 uint32_t d; |
| 163 } float_negate_constant = | 156 } float_negate_constant = {0x80000000, 0x80000000, 0x80000000, 0x80000000}; |
| 164 { 0x80000000, 0x80000000, 0x80000000, 0x80000000 }; | |
| 165 | 157 |
| 166 static const struct ALIGN16 { | 158 static const struct ALIGN16 { |
| 167 uint32_t a; | 159 uint32_t a; |
| 168 uint32_t b; | 160 uint32_t b; |
| 169 uint32_t c; | 161 uint32_t c; |
| 170 uint32_t d; | 162 uint32_t d; |
| 171 } float_absolute_constant = | 163 } float_absolute_constant = {0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF}; |
| 172 { 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF }; | |
| 173 | 164 |
| 174 static const struct ALIGN16 { | 165 static const struct ALIGN16 { |
| 175 uint32_t a; | 166 uint32_t a; |
| 176 uint32_t b; | 167 uint32_t b; |
| 177 uint32_t c; | 168 uint32_t c; |
| 178 uint32_t d; | 169 uint32_t d; |
| 179 } float_zerow_constant = | 170 } float_zerow_constant = {0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000}; |
| 180 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000 }; | |
| 181 | 171 |
| 182 | 172 |
| 183 void Thread::InitVMConstants() { | 173 void Thread::InitVMConstants() { |
| 184 #define ASSERT_VM_HEAP(type_name, member_name, init_expr, default_init_value) \ | 174 #define ASSERT_VM_HEAP(type_name, member_name, init_expr, default_init_value) \ |
| 185 ASSERT((init_expr)->IsOldObject()); | 175 ASSERT((init_expr)->IsOldObject()); |
| 186 CACHED_VM_OBJECTS_LIST(ASSERT_VM_HEAP) | 176 CACHED_VM_OBJECTS_LIST(ASSERT_VM_HEAP) |
| 187 #undef ASSERT_VM_HEAP | 177 #undef ASSERT_VM_HEAP |
| 188 | 178 |
| 189 #define INIT_VALUE(type_name, member_name, init_expr, default_init_value) \ | 179 #define INIT_VALUE(type_name, member_name, init_expr, default_init_value) \ |
| 190 ASSERT(member_name == default_init_value); \ | 180 ASSERT(member_name == default_init_value); \ |
| 191 member_name = (init_expr); | 181 member_name = (init_expr); |
| 192 CACHED_CONSTANTS_LIST(INIT_VALUE) | 182 CACHED_CONSTANTS_LIST(INIT_VALUE) |
| 193 #undef INIT_VALUE | 183 #undef INIT_VALUE |
| 194 | 184 |
| 195 #define INIT_VALUE(name) \ | 185 #define INIT_VALUE(name) \ |
| 196 ASSERT(name##_entry_point_ == 0); \ | 186 ASSERT(name##_entry_point_ == 0); \ |
| 197 name##_entry_point_ = k##name##RuntimeEntry.GetEntryPoint(); | 187 name##_entry_point_ = k##name##RuntimeEntry.GetEntryPoint(); |
| 198 RUNTIME_ENTRY_LIST(INIT_VALUE) | 188 RUNTIME_ENTRY_LIST(INIT_VALUE) |
| 199 #undef INIT_VALUE | 189 #undef INIT_VALUE |
| 200 | 190 |
| 201 #define INIT_VALUE(returntype, name, ...) \ | 191 #define INIT_VALUE(returntype, name, ...) \ |
| 202 ASSERT(name##_entry_point_ == 0); \ | 192 ASSERT(name##_entry_point_ == 0); \ |
| 203 name##_entry_point_ = k##name##RuntimeEntry.GetEntryPoint(); | 193 name##_entry_point_ = k##name##RuntimeEntry.GetEntryPoint(); |
| 204 LEAF_RUNTIME_ENTRY_LIST(INIT_VALUE) | 194 LEAF_RUNTIME_ENTRY_LIST(INIT_VALUE) |
| 205 #undef INIT_VALUE | 195 #undef INIT_VALUE |
| 206 | 196 |
| 207 // Setup the thread specific reusable handles. | 197 // Setup the thread specific reusable handles. |
| 208 #define REUSABLE_HANDLE_ALLOCATION(object) \ | 198 #define REUSABLE_HANDLE_ALLOCATION(object) \ |
| 209 this->object##_handle_ = this->AllocateReusableHandle<object>(); | 199 this->object##_handle_ = this->AllocateReusableHandle<object>(); |
| 210 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_ALLOCATION) | 200 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_ALLOCATION) |
| 211 #undef REUSABLE_HANDLE_ALLOCATION | 201 #undef REUSABLE_HANDLE_ALLOCATION |
| 212 } | 202 } |
| 213 | 203 |
| 214 | 204 |
| 215 RawGrowableObjectArray* Thread::pending_functions() { | 205 RawGrowableObjectArray* Thread::pending_functions() { |
| 216 if (pending_functions_ == GrowableObjectArray::null()) { | 206 if (pending_functions_ == GrowableObjectArray::null()) { |
| 217 pending_functions_ = GrowableObjectArray::New(Heap::kOld); | 207 pending_functions_ = GrowableObjectArray::New(Heap::kOld); |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 273 const bool kIsMutatorThread = true; | 263 const bool kIsMutatorThread = true; |
| 274 isolate->UnscheduleThread(thread, kIsMutatorThread); | 264 isolate->UnscheduleThread(thread, kIsMutatorThread); |
| 275 } | 265 } |
| 276 | 266 |
| 277 | 267 |
| 278 bool Thread::EnterIsolateAsHelper(Isolate* isolate, | 268 bool Thread::EnterIsolateAsHelper(Isolate* isolate, |
| 279 TaskKind kind, | 269 TaskKind kind, |
| 280 bool bypass_safepoint) { | 270 bool bypass_safepoint) { |
| 281 ASSERT(kind != kMutatorTask); | 271 ASSERT(kind != kMutatorTask); |
| 282 const bool kIsNotMutatorThread = false; | 272 const bool kIsNotMutatorThread = false; |
| 283 Thread* thread = isolate->ScheduleThread(kIsNotMutatorThread, | 273 Thread* thread = |
| 284 bypass_safepoint); | 274 isolate->ScheduleThread(kIsNotMutatorThread, bypass_safepoint); |
| 285 if (thread != NULL) { | 275 if (thread != NULL) { |
| 286 ASSERT(thread->store_buffer_block_ == NULL); | 276 ASSERT(thread->store_buffer_block_ == NULL); |
| 287 // TODO(koda): Use StoreBufferAcquire once we properly flush | 277 // TODO(koda): Use StoreBufferAcquire once we properly flush |
| 288 // before Scavenge. | 278 // before Scavenge. |
| 289 thread->store_buffer_block_ = | 279 thread->store_buffer_block_ = |
| 290 thread->isolate()->store_buffer()->PopEmptyBlock(); | 280 thread->isolate()->store_buffer()->PopEmptyBlock(); |
| 291 // This thread should not be the main mutator. | 281 // This thread should not be the main mutator. |
| 292 thread->task_kind_ = kind; | 282 thread->task_kind_ = kind; |
| 293 ASSERT(!thread->IsMutatorThread()); | 283 ASSERT(!thread->IsMutatorThread()); |
| 294 return true; | 284 return true; |
| (...skipping 24 matching lines...) Expand all Loading... |
| 319 ASSERT(store_buffer_block_ != NULL); | 309 ASSERT(store_buffer_block_ != NULL); |
| 320 StoreBufferRelease(StoreBuffer::kIgnoreThreshold); | 310 StoreBufferRelease(StoreBuffer::kIgnoreThreshold); |
| 321 // Make sure to get an *empty* block; the isolate needs all entries | 311 // Make sure to get an *empty* block; the isolate needs all entries |
| 322 // at GC time. | 312 // at GC time. |
| 323 // TODO(koda): Replace with an epilogue (PrepareAfterGC) that acquires. | 313 // TODO(koda): Replace with an epilogue (PrepareAfterGC) that acquires. |
| 324 store_buffer_block_ = isolate()->store_buffer()->PopEmptyBlock(); | 314 store_buffer_block_ = isolate()->store_buffer()->PopEmptyBlock(); |
| 325 } | 315 } |
| 326 | 316 |
| 327 | 317 |
| 328 void Thread::SetStackLimitFromStackBase(uword stack_base) { | 318 void Thread::SetStackLimitFromStackBase(uword stack_base) { |
| 329 // Set stack limit. | 319 // Set stack limit. |
| 330 #if !defined(TARGET_ARCH_DBC) | 320 #if !defined(TARGET_ARCH_DBC) |
| 331 #if defined(USING_SIMULATOR) | 321 #if defined(USING_SIMULATOR) |
| 332 // Ignore passed-in native stack top and use Simulator stack top. | 322 // Ignore passed-in native stack top and use Simulator stack top. |
| 333 Simulator* sim = Simulator::Current(); // May allocate a simulator. | 323 Simulator* sim = Simulator::Current(); // May allocate a simulator. |
| 334 ASSERT(isolate()->simulator() == sim); // Isolate's simulator is current one. | 324 ASSERT(isolate()->simulator() == sim); // Isolate's simulator is current one. |
| 335 stack_base = sim->StackTop(); | 325 stack_base = sim->StackTop(); |
| 336 // The overflow area is accounted for by the simulator. | 326 // The overflow area is accounted for by the simulator. |
| 337 #endif | 327 #endif |
| 338 SetStackLimit(stack_base - OSThread::GetSpecifiedStackSize()); | 328 SetStackLimit(stack_base - OSThread::GetSpecifiedStackSize()); |
| 339 #else | 329 #else |
| 340 SetStackLimit(Simulator::Current()->StackTop()); | 330 SetStackLimit(Simulator::Current()->StackTop()); |
| 341 #endif // !defined(TARGET_ARCH_DBC) | 331 #endif // !defined(TARGET_ARCH_DBC) |
| 342 } | 332 } |
| 343 | 333 |
| 344 | 334 |
| 345 void Thread::SetStackLimit(uword limit) { | 335 void Thread::SetStackLimit(uword limit) { |
| 346 // The thread setting the stack limit is not necessarily the thread which | 336 // The thread setting the stack limit is not necessarily the thread which |
| (...skipping 16 matching lines...) Expand all Loading... |
| 363 uword Thread::GetCurrentStackPointer() { | 353 uword Thread::GetCurrentStackPointer() { |
| 364 #if !defined(TARGET_ARCH_DBC) | 354 #if !defined(TARGET_ARCH_DBC) |
| 365 // Since AddressSanitizer's detect_stack_use_after_return instruments the | 355 // Since AddressSanitizer's detect_stack_use_after_return instruments the |
| 366 // C++ code to give out fake stack addresses, we call a stub in that case. | 356 // C++ code to give out fake stack addresses, we call a stub in that case. |
| 367 ASSERT(StubCode::GetStackPointer_entry() != NULL); | 357 ASSERT(StubCode::GetStackPointer_entry() != NULL); |
| 368 uword (*func)() = reinterpret_cast<uword (*)()>( | 358 uword (*func)() = reinterpret_cast<uword (*)()>( |
| 369 StubCode::GetStackPointer_entry()->EntryPoint()); | 359 StubCode::GetStackPointer_entry()->EntryPoint()); |
| 370 #else | 360 #else |
| 371 uword (*func)() = NULL; | 361 uword (*func)() = NULL; |
| 372 #endif | 362 #endif |
| 373 // But for performance (and to support simulators), we normally use a local. | 363 // But for performance (and to support simulators), we normally use a local. |
| 374 #if defined(__has_feature) | 364 #if defined(__has_feature) |
| 375 #if __has_feature(address_sanitizer) | 365 #if __has_feature(address_sanitizer) |
| 376 uword current_sp = func(); | 366 uword current_sp = func(); |
| 377 return current_sp; | 367 return current_sp; |
| 378 #else | 368 #else |
| 379 uword stack_allocated_local_address = reinterpret_cast<uword>(&func); | 369 uword stack_allocated_local_address = reinterpret_cast<uword>(&func); |
| 380 return stack_allocated_local_address; | 370 return stack_allocated_local_address; |
| 381 #endif | 371 #endif |
| 382 #else | 372 #else |
| 383 uword stack_allocated_local_address = reinterpret_cast<uword>(&func); | 373 uword stack_allocated_local_address = reinterpret_cast<uword>(&func); |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 499 heap()->CollectGarbage(Heap::kNew); | 489 heap()->CollectGarbage(Heap::kNew); |
| 500 } | 490 } |
| 501 } | 491 } |
| 502 if ((interrupt_bits & kMessageInterrupt) != 0) { | 492 if ((interrupt_bits & kMessageInterrupt) != 0) { |
| 503 MessageHandler::MessageStatus status = | 493 MessageHandler::MessageStatus status = |
| 504 isolate()->message_handler()->HandleOOBMessages(); | 494 isolate()->message_handler()->HandleOOBMessages(); |
| 505 if (status != MessageHandler::kOK) { | 495 if (status != MessageHandler::kOK) { |
| 506 // False result from HandleOOBMessages signals that the isolate should | 496 // False result from HandleOOBMessages signals that the isolate should |
| 507 // be terminating. | 497 // be terminating. |
| 508 if (FLAG_trace_isolates) { | 498 if (FLAG_trace_isolates) { |
| 509 OS::Print("[!] Terminating isolate due to OOB message:\n" | 499 OS::Print( |
| 510 "\tisolate: %s\n", isolate()->name()); | 500 "[!] Terminating isolate due to OOB message:\n" |
| 501 "\tisolate: %s\n", |
| 502 isolate()->name()); |
| 511 } | 503 } |
| 512 Thread* thread = Thread::Current(); | 504 Thread* thread = Thread::Current(); |
| 513 const Error& error = Error::Handle(thread->sticky_error()); | 505 const Error& error = Error::Handle(thread->sticky_error()); |
| 514 ASSERT(!error.IsNull() && error.IsUnwindError()); | 506 ASSERT(!error.IsNull() && error.IsUnwindError()); |
| 515 thread->clear_sticky_error(); | 507 thread->clear_sticky_error(); |
| 516 return error.raw(); | 508 return error.raw(); |
| 517 } | 509 } |
| 518 } | 510 } |
| 519 return Error::null(); | 511 return Error::null(); |
| 520 } | 512 } |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 571 // thread is at a safepoint in the following situations : | 563 // thread is at a safepoint in the following situations : |
| 572 // - background compiler thread finalizing and installing code | 564 // - background compiler thread finalizing and installing code |
| 573 // - disassembly of the generated code is done after compilation | 565 // - disassembly of the generated code is done after compilation |
| 574 // So essentially we state that garbage collection is possible only | 566 // So essentially we state that garbage collection is possible only |
| 575 // when we are not at a safepoint. | 567 // when we are not at a safepoint. |
| 576 return !IsAtSafepoint(); | 568 return !IsAtSafepoint(); |
| 577 } | 569 } |
| 578 | 570 |
| 579 | 571 |
| 580 bool Thread::IsExecutingDartCode() const { | 572 bool Thread::IsExecutingDartCode() const { |
| 581 return (top_exit_frame_info() == 0) && | 573 return (top_exit_frame_info() == 0) && (vm_tag() == VMTag::kDartTagId); |
| 582 (vm_tag() == VMTag::kDartTagId); | |
| 583 } | 574 } |
| 584 | 575 |
| 585 | 576 |
| 586 bool Thread::HasExitedDartCode() const { | 577 bool Thread::HasExitedDartCode() const { |
| 587 return (top_exit_frame_info() != 0) && | 578 return (top_exit_frame_info() != 0) && (vm_tag() != VMTag::kDartTagId); |
| 588 (vm_tag() != VMTag::kDartTagId); | |
| 589 } | 579 } |
| 590 | 580 |
| 591 | 581 |
| 592 template<class C> | 582 template <class C> |
| 593 C* Thread::AllocateReusableHandle() { | 583 C* Thread::AllocateReusableHandle() { |
| 594 C* handle = reinterpret_cast<C*>(reusable_handles_.AllocateScopedHandle()); | 584 C* handle = reinterpret_cast<C*>(reusable_handles_.AllocateScopedHandle()); |
| 595 C::initializeHandle(handle, C::null()); | 585 C::initializeHandle(handle, C::null()); |
| 596 return handle; | 586 return handle; |
| 597 } | 587 } |
| 598 | 588 |
| 599 | 589 |
| 600 void Thread::ClearReusableHandles() { | 590 void Thread::ClearReusableHandles() { |
| 601 #define CLEAR_REUSABLE_HANDLE(object) \ | 591 #define CLEAR_REUSABLE_HANDLE(object) *object##_handle_ = object::null(); |
| 602 *object##_handle_ = object::null(); | |
| 603 REUSABLE_HANDLE_LIST(CLEAR_REUSABLE_HANDLE) | 592 REUSABLE_HANDLE_LIST(CLEAR_REUSABLE_HANDLE) |
| 604 #undef CLEAR_REUSABLE_HANDLE | 593 #undef CLEAR_REUSABLE_HANDLE |
| 605 } | 594 } |
| 606 | 595 |
| 607 | 596 |
| 608 void Thread::VisitObjectPointers(ObjectPointerVisitor* visitor, | 597 void Thread::VisitObjectPointers(ObjectPointerVisitor* visitor, |
| 609 bool validate_frames) { | 598 bool validate_frames) { |
| 610 ASSERT(visitor != NULL); | 599 ASSERT(visitor != NULL); |
| 611 | 600 |
| 612 if (zone_ != NULL) { | 601 if (zone_ != NULL) { |
| 613 zone_->VisitObjectPointers(visitor); | 602 zone_->VisitObjectPointers(visitor); |
| 614 } | 603 } |
| 615 | 604 |
| 616 // Visit objects in thread specific handles area. | 605 // Visit objects in thread specific handles area. |
| 617 reusable_handles_.VisitObjectPointers(visitor); | 606 reusable_handles_.VisitObjectPointers(visitor); |
| 618 | 607 |
| 619 visitor->VisitPointer( | 608 visitor->VisitPointer(reinterpret_cast<RawObject**>(&pending_functions_)); |
| 620 reinterpret_cast<RawObject**>(&pending_functions_)); | 609 visitor->VisitPointer(reinterpret_cast<RawObject**>(&sticky_error_)); |
| 621 visitor->VisitPointer( | |
| 622 reinterpret_cast<RawObject**>(&sticky_error_)); | |
| 623 | 610 |
| 624 // Visit the api local scope as it has all the api local handles. | 611 // Visit the api local scope as it has all the api local handles. |
| 625 ApiLocalScope* scope = api_top_scope_; | 612 ApiLocalScope* scope = api_top_scope_; |
| 626 while (scope != NULL) { | 613 while (scope != NULL) { |
| 627 scope->local_handles()->VisitObjectPointers(visitor); | 614 scope->local_handles()->VisitObjectPointers(visitor); |
| 628 scope = scope->previous(); | 615 scope = scope->previous(); |
| 629 } | 616 } |
| 630 | 617 |
| 631 // Iterate over all the stack frames and visit objects on the stack. | 618 // Iterate over all the stack frames and visit objects on the stack. |
| 632 StackFrameIterator frames_iterator(top_exit_frame_info(), | 619 StackFrameIterator frames_iterator(top_exit_frame_info(), validate_frames); |
| 633 validate_frames); | |
| 634 StackFrame* frame = frames_iterator.NextFrame(); | 620 StackFrame* frame = frames_iterator.NextFrame(); |
| 635 while (frame != NULL) { | 621 while (frame != NULL) { |
| 636 frame->VisitObjectPointers(visitor); | 622 frame->VisitObjectPointers(visitor); |
| 637 frame = frames_iterator.NextFrame(); | 623 frame = frames_iterator.NextFrame(); |
| 638 } | 624 } |
| 639 } | 625 } |
| 640 | 626 |
| 641 | 627 |
| 642 bool Thread::CanLoadFromThread(const Object& object) { | 628 bool Thread::CanLoadFromThread(const Object& object) { |
| 643 #define CHECK_OBJECT(type_name, member_name, expr, default_init_value) \ | 629 #define CHECK_OBJECT(type_name, member_name, expr, default_init_value) \ |
| 644 if (object.raw() == expr) return true; | 630 if (object.raw() == expr) return true; |
| 645 CACHED_VM_OBJECTS_LIST(CHECK_OBJECT) | 631 CACHED_VM_OBJECTS_LIST(CHECK_OBJECT) |
| 646 #undef CHECK_OBJECT | 632 #undef CHECK_OBJECT |
| 647 return false; | 633 return false; |
| 648 } | 634 } |
| 649 | 635 |
| 650 | 636 |
| 651 intptr_t Thread::OffsetFromThread(const Object& object) { | 637 intptr_t Thread::OffsetFromThread(const Object& object) { |
| 652 #define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value) \ | 638 #define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value) \ |
| 653 ASSERT((expr)->IsVMHeapObject()); \ | 639 ASSERT((expr)->IsVMHeapObject()); \ |
| 654 if (object.raw() == expr) return Thread::member_name##offset(); | 640 if (object.raw() == expr) return Thread::member_name##offset(); |
| 655 CACHED_VM_OBJECTS_LIST(COMPUTE_OFFSET) | 641 CACHED_VM_OBJECTS_LIST(COMPUTE_OFFSET) |
| 656 #undef COMPUTE_OFFSET | 642 #undef COMPUTE_OFFSET |
| 657 UNREACHABLE(); | 643 UNREACHABLE(); |
| 658 return -1; | 644 return -1; |
| 659 } | 645 } |
| 660 | 646 |
| 661 | 647 |
| 662 bool Thread::ObjectAtOffset(intptr_t offset, Object* object) { | 648 bool Thread::ObjectAtOffset(intptr_t offset, Object* object) { |
| 663 if (Isolate::Current() == Dart::vm_isolate()) { | 649 if (Isolate::Current() == Dart::vm_isolate()) { |
| 664 // --disassemble-stubs runs before all the references through | 650 // --disassemble-stubs runs before all the references through |
| 665 // thread have targets | 651 // thread have targets |
| 666 return false; | 652 return false; |
| 667 } | 653 } |
| 668 | 654 |
| 669 #define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value) \ | 655 #define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value) \ |
| 670 if (Thread::member_name##offset() == offset) { \ | 656 if (Thread::member_name##offset() == offset) { \ |
| 671 *object = expr; \ | 657 *object = expr; \ |
| 672 return true; \ | 658 return true; \ |
| 673 } | 659 } |
| 674 CACHED_VM_OBJECTS_LIST(COMPUTE_OFFSET) | 660 CACHED_VM_OBJECTS_LIST(COMPUTE_OFFSET) |
| 675 #undef COMPUTE_OFFSET | 661 #undef COMPUTE_OFFSET |
| 676 return false; | 662 return false; |
| 677 } | 663 } |
| 678 | 664 |
| 679 | 665 |
| 680 intptr_t Thread::OffsetFromThread(const RuntimeEntry* runtime_entry) { | 666 intptr_t Thread::OffsetFromThread(const RuntimeEntry* runtime_entry) { |
| 681 #define COMPUTE_OFFSET(name) \ | 667 #define COMPUTE_OFFSET(name) \ |
| 682 if (runtime_entry->function() == k##name##RuntimeEntry.function()) { \ | 668 if (runtime_entry->function() == k##name##RuntimeEntry.function()) { \ |
| 683 return Thread::name##_entry_point_offset(); \ | 669 return Thread::name##_entry_point_offset(); \ |
| 684 } | 670 } |
| 685 RUNTIME_ENTRY_LIST(COMPUTE_OFFSET) | 671 RUNTIME_ENTRY_LIST(COMPUTE_OFFSET) |
| 686 #undef COMPUTE_OFFSET | 672 #undef COMPUTE_OFFSET |
| 687 | 673 |
| 688 #define COMPUTE_OFFSET(returntype, name, ...) \ | 674 #define COMPUTE_OFFSET(returntype, name, ...) \ |
| 689 if (runtime_entry->function() == k##name##RuntimeEntry.function()) { \ | 675 if (runtime_entry->function() == k##name##RuntimeEntry.function()) { \ |
| 690 return Thread::name##_entry_point_offset(); \ | 676 return Thread::name##_entry_point_offset(); \ |
| 691 } | 677 } |
| 692 LEAF_RUNTIME_ENTRY_LIST(COMPUTE_OFFSET) | 678 LEAF_RUNTIME_ENTRY_LIST(COMPUTE_OFFSET) |
| 693 #undef COMPUTE_OFFSET | 679 #undef COMPUTE_OFFSET |
| 694 | 680 |
| 695 UNREACHABLE(); | 681 UNREACHABLE(); |
| 696 return -1; | 682 return -1; |
| 697 } | 683 } |
| 698 | 684 |
| 699 | 685 |
| 700 bool Thread::IsValidLocalHandle(Dart_Handle object) const { | 686 bool Thread::IsValidLocalHandle(Dart_Handle object) const { |
| 701 ApiLocalScope* scope = api_top_scope_; | 687 ApiLocalScope* scope = api_top_scope_; |
| 702 while (scope != NULL) { | 688 while (scope != NULL) { |
| (...skipping 25 matching lines...) Expand all Loading... |
| 728 scope = scope->previous(); | 714 scope = scope->previous(); |
| 729 } | 715 } |
| 730 return total; | 716 return total; |
| 731 } | 717 } |
| 732 | 718 |
| 733 | 719 |
| 734 void Thread::UnwindScopes(uword stack_marker) { | 720 void Thread::UnwindScopes(uword stack_marker) { |
| 735 // Unwind all scopes using the same stack_marker, i.e. all scopes allocated | 721 // Unwind all scopes using the same stack_marker, i.e. all scopes allocated |
| 736 // under the same top_exit_frame_info. | 722 // under the same top_exit_frame_info. |
| 737 ApiLocalScope* scope = api_top_scope_; | 723 ApiLocalScope* scope = api_top_scope_; |
| 738 while (scope != NULL && | 724 while (scope != NULL && scope->stack_marker() != 0 && |
| 739 scope->stack_marker() != 0 && | |
| 740 scope->stack_marker() == stack_marker) { | 725 scope->stack_marker() == stack_marker) { |
| 741 api_top_scope_ = scope->previous(); | 726 api_top_scope_ = scope->previous(); |
| 742 delete scope; | 727 delete scope; |
| 743 scope = api_top_scope_; | 728 scope = api_top_scope_; |
| 744 } | 729 } |
| 745 } | 730 } |
| 746 | 731 |
| 747 | 732 |
| 748 void Thread::EnterSafepointUsingLock() { | 733 void Thread::EnterSafepointUsingLock() { |
| 749 isolate()->safepoint_handler()->EnterSafepointUsingLock(this); | 734 isolate()->safepoint_handler()->EnterSafepointUsingLock(this); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 772 | 757 |
| 773 DisableThreadInterruptsScope::~DisableThreadInterruptsScope() { | 758 DisableThreadInterruptsScope::~DisableThreadInterruptsScope() { |
| 774 if (thread() != NULL) { | 759 if (thread() != NULL) { |
| 775 OSThread* os_thread = thread()->os_thread(); | 760 OSThread* os_thread = thread()->os_thread(); |
| 776 ASSERT(os_thread != NULL); | 761 ASSERT(os_thread != NULL); |
| 777 os_thread->EnableThreadInterrupts(); | 762 os_thread->EnableThreadInterrupts(); |
| 778 } | 763 } |
| 779 } | 764 } |
| 780 | 765 |
| 781 } // namespace dart | 766 } // namespace dart |
| OLD | NEW |