Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(330)

Side by Side Diff: runtime/vm/thread.h

Issue 2481873005: clang-format runtime/vm (Closed)
Patch Set: Merge Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/tags.cc ('k') | runtime/vm/thread.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #ifndef RUNTIME_VM_THREAD_H_ 5 #ifndef RUNTIME_VM_THREAD_H_
6 #define RUNTIME_VM_THREAD_H_ 6 #define RUNTIME_VM_THREAD_H_
7 7
8 #include "include/dart_api.h" 8 #include "include/dart_api.h"
9 #include "platform/assert.h" 9 #include "platform/assert.h"
10 #include "vm/atomic.h" 10 #include "vm/atomic.h"
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
64 V(Field) \ 64 V(Field) \
65 V(Function) \ 65 V(Function) \
66 V(GrowableObjectArray) \ 66 V(GrowableObjectArray) \
67 V(Instance) \ 67 V(Instance) \
68 V(Library) \ 68 V(Library) \
69 V(Object) \ 69 V(Object) \
70 V(PcDescriptors) \ 70 V(PcDescriptors) \
71 V(Smi) \ 71 V(Smi) \
72 V(String) \ 72 V(String) \
73 V(TypeArguments) \ 73 V(TypeArguments) \
74 V(TypeParameter) \ 74 V(TypeParameter)
75 75
76 76
77 #if defined(TARGET_ARCH_DBC) 77 #if defined(TARGET_ARCH_DBC)
78 #define CACHED_VM_STUBS_LIST(V) 78 #define CACHED_VM_STUBS_LIST(V)
79 #else 79 #else
80 #define CACHED_VM_STUBS_LIST(V) \ 80 #define CACHED_VM_STUBS_LIST(V) \
81 V(RawCode*, update_store_buffer_code_, \ 81 V(RawCode*, update_store_buffer_code_, \
82 StubCode::UpdateStoreBuffer_entry()->code(), NULL) \ 82 StubCode::UpdateStoreBuffer_entry()->code(), NULL) \
83 V(RawCode*, fix_callers_target_code_, \ 83 V(RawCode*, fix_callers_target_code_, \
84 StubCode::FixCallersTarget_entry()->code(), NULL) \ 84 StubCode::FixCallersTarget_entry()->code(), NULL) \
85 V(RawCode*, fix_allocation_stub_code_, \ 85 V(RawCode*, fix_allocation_stub_code_, \
86 StubCode::FixAllocationStubTarget_entry()->code(), NULL) \ 86 StubCode::FixAllocationStubTarget_entry()->code(), NULL) \
87 V(RawCode*, invoke_dart_code_stub_, \ 87 V(RawCode*, invoke_dart_code_stub_, \
88 StubCode::InvokeDartCode_entry()->code(), NULL) \ 88 StubCode::InvokeDartCode_entry()->code(), NULL) \
89 V(RawCode*, call_to_runtime_stub_, \ 89 V(RawCode*, call_to_runtime_stub_, StubCode::CallToRuntime_entry()->code(), \
90 StubCode::CallToRuntime_entry()->code(), NULL) \ 90 NULL) \
91 V(RawCode*, monomorphic_miss_stub_, \ 91 V(RawCode*, monomorphic_miss_stub_, \
92 StubCode::MonomorphicMiss_entry()->code(), NULL) \ 92 StubCode::MonomorphicMiss_entry()->code(), NULL) \
93 V(RawCode*, ic_lookup_through_code_stub_, \ 93 V(RawCode*, ic_lookup_through_code_stub_, \
94 StubCode::ICCallThroughCode_entry()->code(), NULL) \ 94 StubCode::ICCallThroughCode_entry()->code(), NULL) \
95 V(RawCode*, lazy_deopt_from_return_stub_, \ 95 V(RawCode*, lazy_deopt_from_return_stub_, \
96 StubCode::DeoptimizeLazyFromReturn_entry()->code(), NULL) \ 96 StubCode::DeoptimizeLazyFromReturn_entry()->code(), NULL) \
97 V(RawCode*, lazy_deopt_from_throw_stub_, \ 97 V(RawCode*, lazy_deopt_from_throw_stub_, \
98 StubCode::DeoptimizeLazyFromThrow_entry()->code(), NULL) \ 98 StubCode::DeoptimizeLazyFromThrow_entry()->code(), NULL)
99 99
100 #endif 100 #endif
101 101
102 // List of VM-global objects/addresses cached in each Thread object. 102 // List of VM-global objects/addresses cached in each Thread object.
103 #define CACHED_VM_OBJECTS_LIST(V) \ 103 #define CACHED_VM_OBJECTS_LIST(V) \
104 V(RawObject*, object_null_, Object::null(), NULL) \ 104 V(RawObject*, object_null_, Object::null(), NULL) \
105 V(RawBool*, bool_true_, Object::bool_true().raw(), NULL) \ 105 V(RawBool*, bool_true_, Object::bool_true().raw(), NULL) \
106 V(RawBool*, bool_false_, Object::bool_false().raw(), NULL) \ 106 V(RawBool*, bool_false_, Object::bool_false().raw(), NULL) \
107 CACHED_VM_STUBS_LIST(V) \ 107 CACHED_VM_STUBS_LIST(V)
108 108
109 #if defined(TARGET_ARCH_DBC) 109 #if defined(TARGET_ARCH_DBC)
110 #define CACHED_VM_STUBS_ADDRESSES_LIST(V) 110 #define CACHED_VM_STUBS_ADDRESSES_LIST(V)
111 #else 111 #else
112 #define CACHED_VM_STUBS_ADDRESSES_LIST(V) \ 112 #define CACHED_VM_STUBS_ADDRESSES_LIST(V) \
113 V(uword, update_store_buffer_entry_point_, \ 113 V(uword, update_store_buffer_entry_point_, \
114 StubCode::UpdateStoreBuffer_entry()->EntryPoint(), 0) \ 114 StubCode::UpdateStoreBuffer_entry()->EntryPoint(), 0) \
115 V(uword, call_to_runtime_entry_point_, \ 115 V(uword, call_to_runtime_entry_point_, \
116 StubCode::CallToRuntime_entry()->EntryPoint(), 0) \ 116 StubCode::CallToRuntime_entry()->EntryPoint(), 0) \
117 V(uword, megamorphic_call_checked_entry_, \ 117 V(uword, megamorphic_call_checked_entry_, \
118 StubCode::MegamorphicCall_entry()->EntryPoint(), 0) \ 118 StubCode::MegamorphicCall_entry()->EntryPoint(), 0) \
119 V(uword, monomorphic_miss_entry_, \ 119 V(uword, monomorphic_miss_entry_, \
120 StubCode::MonomorphicMiss_entry()->EntryPoint(), 0) \ 120 StubCode::MonomorphicMiss_entry()->EntryPoint(), 0)
121 121
122 #endif 122 #endif
123 123
124 #define CACHED_ADDRESSES_LIST(V) \ 124 #define CACHED_ADDRESSES_LIST(V) \
125 CACHED_VM_STUBS_ADDRESSES_LIST(V) \ 125 CACHED_VM_STUBS_ADDRESSES_LIST(V) \
126 V(uword, native_call_wrapper_entry_point_, \ 126 V(uword, native_call_wrapper_entry_point_, \
127 NativeEntry::NativeCallWrapperEntry(), 0) \ 127 NativeEntry::NativeCallWrapperEntry(), 0) \
128 V(RawString**, predefined_symbols_address_, \ 128 V(RawString**, predefined_symbols_address_, Symbols::PredefinedAddress(), \
129 Symbols::PredefinedAddress(), NULL) \ 129 NULL) \
130 V(uword, double_negate_address_, \ 130 V(uword, double_negate_address_, \
131 reinterpret_cast<uword>(&double_negate_constant), 0) \ 131 reinterpret_cast<uword>(&double_negate_constant), 0) \
132 V(uword, double_abs_address_, \ 132 V(uword, double_abs_address_, reinterpret_cast<uword>(&double_abs_constant), \
133 reinterpret_cast<uword>(&double_abs_constant), 0) \ 133 0) \
134 V(uword, float_not_address_, \ 134 V(uword, float_not_address_, reinterpret_cast<uword>(&float_not_constant), \
135 reinterpret_cast<uword>(&float_not_constant), 0) \ 135 0) \
136 V(uword, float_negate_address_, \ 136 V(uword, float_negate_address_, \
137 reinterpret_cast<uword>(&float_negate_constant), 0) \ 137 reinterpret_cast<uword>(&float_negate_constant), 0) \
138 V(uword, float_absolute_address_, \ 138 V(uword, float_absolute_address_, \
139 reinterpret_cast<uword>(&float_absolute_constant), 0) \ 139 reinterpret_cast<uword>(&float_absolute_constant), 0) \
140 V(uword, float_zerow_address_, \ 140 V(uword, float_zerow_address_, \
141 reinterpret_cast<uword>(&float_zerow_constant), 0) \ 141 reinterpret_cast<uword>(&float_zerow_constant), 0)
142 142
143 #define CACHED_CONSTANTS_LIST(V) \ 143 #define CACHED_CONSTANTS_LIST(V) \
144 CACHED_VM_OBJECTS_LIST(V) \ 144 CACHED_VM_OBJECTS_LIST(V) \
145 CACHED_ADDRESSES_LIST(V) \ 145 CACHED_ADDRESSES_LIST(V)
146 146
147 // A VM thread; may be executing Dart code or performing helper tasks like 147 // A VM thread; may be executing Dart code or performing helper tasks like
148 // garbage collection or compilation. The Thread structure associated with 148 // garbage collection or compilation. The Thread structure associated with
149 // a thread is allocated by EnsureInit before entering an isolate, and destroyed 149 // a thread is allocated by EnsureInit before entering an isolate, and destroyed
150 // automatically when the underlying OS thread exits. NOTE: On Windows, CleanUp 150 // automatically when the underlying OS thread exits. NOTE: On Windows, CleanUp
151 // must currently be called manually (issue 23474). 151 // must currently be called manually (issue 23474).
152 class Thread : public BaseThread { 152 class Thread : public BaseThread {
153 public: 153 public:
154 // The kind of task this thread is performing. Sampled by the profiler. 154 // The kind of task this thread is performing. Sampled by the profiler.
155 enum TaskKind { 155 enum TaskKind {
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
203 } 203 }
204 static intptr_t stack_limit_offset() { 204 static intptr_t stack_limit_offset() {
205 return OFFSET_OF(Thread, stack_limit_); 205 return OFFSET_OF(Thread, stack_limit_);
206 } 206 }
207 207
208 // The true stack limit for this isolate. 208 // The true stack limit for this isolate.
209 uword saved_stack_limit() const { return saved_stack_limit_; } 209 uword saved_stack_limit() const { return saved_stack_limit_; }
210 210
211 #if defined(TARGET_ARCH_DBC) 211 #if defined(TARGET_ARCH_DBC)
212 // Access to the current stack limit for DBC interpreter. 212 // Access to the current stack limit for DBC interpreter.
213 uword stack_limit() const { 213 uword stack_limit() const { return stack_limit_; }
214 return stack_limit_;
215 }
216 #endif 214 #endif
217 215
218 // Stack overflow flags 216 // Stack overflow flags
219 enum { 217 enum {
220 kOsrRequest = 0x1, // Current stack overflow caused by OSR request. 218 kOsrRequest = 0x1, // Current stack overflow caused by OSR request.
221 }; 219 };
222 220
223 uword stack_overflow_flags_address() const { 221 uword stack_overflow_flags_address() const {
224 return reinterpret_cast<uword>(&stack_overflow_flags_); 222 return reinterpret_cast<uword>(&stack_overflow_flags_);
225 } 223 }
226 static intptr_t stack_overflow_flags_offset() { 224 static intptr_t stack_overflow_flags_offset() {
227 return OFFSET_OF(Thread, stack_overflow_flags_); 225 return OFFSET_OF(Thread, stack_overflow_flags_);
228 } 226 }
229 227
230 int32_t IncrementAndGetStackOverflowCount() { 228 int32_t IncrementAndGetStackOverflowCount() {
231 return ++stack_overflow_count_; 229 return ++stack_overflow_count_;
232 } 230 }
233 231
234 TaskKind task_kind() const { 232 TaskKind task_kind() const { return task_kind_; }
235 return task_kind_;
236 }
237 233
238 // Retrieves and clears the stack overflow flags. These are set by 234 // Retrieves and clears the stack overflow flags. These are set by
239 // the generated code before the slow path runtime routine for a 235 // the generated code before the slow path runtime routine for a
240 // stack overflow is called. 236 // stack overflow is called.
241 uword GetAndClearStackOverflowFlags(); 237 uword GetAndClearStackOverflowFlags();
242 238
243 // Interrupt bits. 239 // Interrupt bits.
244 enum { 240 enum {
245 kVMInterrupt = 0x1, // Internal VM checks: safepoints, store buffers, etc. 241 kVMInterrupt = 0x1, // Internal VM checks: safepoints, store buffers, etc.
246 kMessageInterrupt = 0x2, // An interrupt to process an out of band message. 242 kMessageInterrupt = 0x2, // An interrupt to process an out of band message.
247 243
248 kInterruptsMask = (kVMInterrupt | kMessageInterrupt), 244 kInterruptsMask = (kVMInterrupt | kMessageInterrupt),
249 }; 245 };
250 246
251 void ScheduleInterrupts(uword interrupt_bits); 247 void ScheduleInterrupts(uword interrupt_bits);
252 void ScheduleInterruptsLocked(uword interrupt_bits); 248 void ScheduleInterruptsLocked(uword interrupt_bits);
253 RawError* HandleInterrupts(); 249 RawError* HandleInterrupts();
254 uword GetAndClearInterrupts(); 250 uword GetAndClearInterrupts();
255 251
256 // OSThread corresponding to this thread. 252 // OSThread corresponding to this thread.
257 OSThread* os_thread() const { return os_thread_; } 253 OSThread* os_thread() const { return os_thread_; }
258 void set_os_thread(OSThread* os_thread) { 254 void set_os_thread(OSThread* os_thread) { os_thread_ = os_thread; }
259 os_thread_ = os_thread;
260 }
261 255
262 // Monitor corresponding to this thread. 256 // Monitor corresponding to this thread.
263 Monitor* thread_lock() const { return thread_lock_; } 257 Monitor* thread_lock() const { return thread_lock_; }
264 258
265 // The topmost zone used for allocation in this thread. 259 // The topmost zone used for allocation in this thread.
266 Zone* zone() const { return zone_; } 260 Zone* zone() const { return zone_; }
267 261
268 bool ZoneIsOwnedByThread(Zone* zone) const; 262 bool ZoneIsOwnedByThread(Zone* zone) const;
269 263
270 // The reusable api local scope for this thread. 264 // The reusable api local scope for this thread.
271 ApiLocalScope* api_reusable_scope() const { return api_reusable_scope_; } 265 ApiLocalScope* api_reusable_scope() const { return api_reusable_scope_; }
272 void set_api_reusable_scope(ApiLocalScope* value) { 266 void set_api_reusable_scope(ApiLocalScope* value) {
273 ASSERT(value == NULL || api_reusable_scope_ == NULL); 267 ASSERT(value == NULL || api_reusable_scope_ == NULL);
274 api_reusable_scope_ = value; 268 api_reusable_scope_ = value;
275 } 269 }
276 270
277 // The api local scope for this thread, this where all local handles 271 // The api local scope for this thread, this where all local handles
278 // are allocated. 272 // are allocated.
279 ApiLocalScope* api_top_scope() const { return api_top_scope_; } 273 ApiLocalScope* api_top_scope() const { return api_top_scope_; }
280 void set_api_top_scope(ApiLocalScope* value) { api_top_scope_ = value; } 274 void set_api_top_scope(ApiLocalScope* value) { api_top_scope_ = value; }
281 275
282 // The isolate that this thread is operating on, or NULL if none. 276 // The isolate that this thread is operating on, or NULL if none.
283 Isolate* isolate() const { return isolate_; } 277 Isolate* isolate() const { return isolate_; }
284 static intptr_t isolate_offset() { 278 static intptr_t isolate_offset() { return OFFSET_OF(Thread, isolate_); }
285 return OFFSET_OF(Thread, isolate_);
286 }
287 bool IsMutatorThread() const; 279 bool IsMutatorThread() const;
288 bool CanCollectGarbage() const; 280 bool CanCollectGarbage() const;
289 281
290 // Offset of Dart TimelineStream object. 282 // Offset of Dart TimelineStream object.
291 static intptr_t dart_stream_offset() { 283 static intptr_t dart_stream_offset() {
292 return OFFSET_OF(Thread, dart_stream_); 284 return OFFSET_OF(Thread, dart_stream_);
293 } 285 }
294 286
295 // Is |this| executing Dart code? 287 // Is |this| executing Dart code?
296 bool IsExecutingDartCode() const; 288 bool IsExecutingDartCode() const;
(...skipping 10 matching lines...) Expand all
307 void set_cha(CHA* value) { 299 void set_cha(CHA* value) {
308 ASSERT(isolate_ != NULL); 300 ASSERT(isolate_ != NULL);
309 cha_ = value; 301 cha_ = value;
310 } 302 }
311 303
312 TypeRangeCache* type_range_cache() const { return type_range_cache_; } 304 TypeRangeCache* type_range_cache() const { return type_range_cache_; }
313 void set_type_range_cache(TypeRangeCache* value) { 305 void set_type_range_cache(TypeRangeCache* value) {
314 type_range_cache_ = value; 306 type_range_cache_ = value;
315 } 307 }
316 308
317 int32_t no_callback_scope_depth() const { 309 int32_t no_callback_scope_depth() const { return no_callback_scope_depth_; }
318 return no_callback_scope_depth_;
319 }
320 310
321 void IncrementNoCallbackScopeDepth() { 311 void IncrementNoCallbackScopeDepth() {
322 ASSERT(no_callback_scope_depth_ < INT_MAX); 312 ASSERT(no_callback_scope_depth_ < INT_MAX);
323 no_callback_scope_depth_ += 1; 313 no_callback_scope_depth_ += 1;
324 } 314 }
325 315
326 void DecrementNoCallbackScopeDepth() { 316 void DecrementNoCallbackScopeDepth() {
327 ASSERT(no_callback_scope_depth_ > 0); 317 ASSERT(no_callback_scope_depth_ > 0);
328 no_callback_scope_depth_ -= 1; 318 no_callback_scope_depth_ -= 1;
329 } 319 }
330 320
331 void StoreBufferAddObject(RawObject* obj); 321 void StoreBufferAddObject(RawObject* obj);
332 void StoreBufferAddObjectGC(RawObject* obj); 322 void StoreBufferAddObjectGC(RawObject* obj);
333 #if defined(TESTING) 323 #if defined(TESTING)
334 bool StoreBufferContains(RawObject* obj) const { 324 bool StoreBufferContains(RawObject* obj) const {
335 return store_buffer_block_->Contains(obj); 325 return store_buffer_block_->Contains(obj);
336 } 326 }
337 #endif 327 #endif
338 void StoreBufferBlockProcess(StoreBuffer::ThresholdPolicy policy); 328 void StoreBufferBlockProcess(StoreBuffer::ThresholdPolicy policy);
339 static intptr_t store_buffer_block_offset() { 329 static intptr_t store_buffer_block_offset() {
340 return OFFSET_OF(Thread, store_buffer_block_); 330 return OFFSET_OF(Thread, store_buffer_block_);
341 } 331 }
342 332
343 uword top_exit_frame_info() const { 333 uword top_exit_frame_info() const { return top_exit_frame_info_; }
344 return top_exit_frame_info_;
345 }
346 void set_top_exit_frame_info(uword top_exit_frame_info) { 334 void set_top_exit_frame_info(uword top_exit_frame_info) {
347 top_exit_frame_info_ = top_exit_frame_info; 335 top_exit_frame_info_ = top_exit_frame_info;
348 } 336 }
349 static intptr_t top_exit_frame_info_offset() { 337 static intptr_t top_exit_frame_info_offset() {
350 return OFFSET_OF(Thread, top_exit_frame_info_); 338 return OFFSET_OF(Thread, top_exit_frame_info_);
351 } 339 }
352 340
353 StackResource* top_resource() const { return top_resource_; } 341 StackResource* top_resource() const { return top_resource_; }
354 void set_top_resource(StackResource* value) { 342 void set_top_resource(StackResource* value) { top_resource_ = value; }
355 top_resource_ = value;
356 }
357 static intptr_t top_resource_offset() { 343 static intptr_t top_resource_offset() {
358 return OFFSET_OF(Thread, top_resource_); 344 return OFFSET_OF(Thread, top_resource_);
359 } 345 }
360 346
361 // Heap of the isolate that this thread is operating on. 347 // Heap of the isolate that this thread is operating on.
362 Heap* heap() const { return heap_; } 348 Heap* heap() const { return heap_; }
363 static intptr_t heap_offset() { 349 static intptr_t heap_offset() { return OFFSET_OF(Thread, heap_); }
364 return OFFSET_OF(Thread, heap_);
365 }
366 350
367 int32_t no_handle_scope_depth() const { 351 int32_t no_handle_scope_depth() const {
368 #if defined(DEBUG) 352 #if defined(DEBUG)
369 return no_handle_scope_depth_; 353 return no_handle_scope_depth_;
370 #else 354 #else
371 return 0; 355 return 0;
372 #endif 356 #endif
373 } 357 }
374 358
375 void IncrementNoHandleScopeDepth() { 359 void IncrementNoHandleScopeDepth() {
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
419 #if defined(DEBUG) 403 #if defined(DEBUG)
420 ASSERT(no_safepoint_scope_depth_ > 0); 404 ASSERT(no_safepoint_scope_depth_ > 0);
421 no_safepoint_scope_depth_ -= 1; 405 no_safepoint_scope_depth_ -= 1;
422 #endif 406 #endif
423 } 407 }
424 408
425 #define DEFINE_OFFSET_METHOD(type_name, member_name, expr, default_init_value) \ 409 #define DEFINE_OFFSET_METHOD(type_name, member_name, expr, default_init_value) \
426 static intptr_t member_name##offset() { \ 410 static intptr_t member_name##offset() { \
427 return OFFSET_OF(Thread, member_name); \ 411 return OFFSET_OF(Thread, member_name); \
428 } 412 }
429 CACHED_CONSTANTS_LIST(DEFINE_OFFSET_METHOD) 413 CACHED_CONSTANTS_LIST(DEFINE_OFFSET_METHOD)
430 #undef DEFINE_OFFSET_METHOD 414 #undef DEFINE_OFFSET_METHOD
431 415
432 #define DEFINE_OFFSET_METHOD(name) \ 416 #define DEFINE_OFFSET_METHOD(name) \
433 static intptr_t name##_entry_point_offset() { \ 417 static intptr_t name##_entry_point_offset() { \
434 return OFFSET_OF(Thread, name##_entry_point_); \ 418 return OFFSET_OF(Thread, name##_entry_point_); \
435 } 419 }
436 RUNTIME_ENTRY_LIST(DEFINE_OFFSET_METHOD) 420 RUNTIME_ENTRY_LIST(DEFINE_OFFSET_METHOD)
437 #undef DEFINE_OFFSET_METHOD 421 #undef DEFINE_OFFSET_METHOD
438 422
439 #define DEFINE_OFFSET_METHOD(returntype, name, ...) \ 423 #define DEFINE_OFFSET_METHOD(returntype, name, ...) \
440 static intptr_t name##_entry_point_offset() { \ 424 static intptr_t name##_entry_point_offset() { \
441 return OFFSET_OF(Thread, name##_entry_point_); \ 425 return OFFSET_OF(Thread, name##_entry_point_); \
442 } 426 }
443 LEAF_RUNTIME_ENTRY_LIST(DEFINE_OFFSET_METHOD) 427 LEAF_RUNTIME_ENTRY_LIST(DEFINE_OFFSET_METHOD)
444 #undef DEFINE_OFFSET_METHOD 428 #undef DEFINE_OFFSET_METHOD
445 429
446 static bool CanLoadFromThread(const Object& object); 430 static bool CanLoadFromThread(const Object& object);
447 static intptr_t OffsetFromThread(const Object& object); 431 static intptr_t OffsetFromThread(const Object& object);
448 static bool ObjectAtOffset(intptr_t offset, Object* object); 432 static bool ObjectAtOffset(intptr_t offset, Object* object);
449 static intptr_t OffsetFromThread(const RuntimeEntry* runtime_entry); 433 static intptr_t OffsetFromThread(const RuntimeEntry* runtime_entry);
450 434
451 static const intptr_t kNoDeoptId = -1; 435 static const intptr_t kNoDeoptId = -1;
452 static const intptr_t kDeoptIdStep = 2; 436 static const intptr_t kDeoptIdStep = 2;
453 static const intptr_t kDeoptIdBeforeOffset = 0; 437 static const intptr_t kDeoptIdBeforeOffset = 0;
(...skipping 17 matching lines...) Expand all
471 455
472 static bool IsDeoptBefore(intptr_t deopt_id) { 456 static bool IsDeoptBefore(intptr_t deopt_id) {
473 return (deopt_id % kDeoptIdStep) == kDeoptIdBeforeOffset; 457 return (deopt_id % kDeoptIdStep) == kDeoptIdBeforeOffset;
474 } 458 }
475 459
476 static bool IsDeoptAfter(intptr_t deopt_id) { 460 static bool IsDeoptAfter(intptr_t deopt_id) {
477 return (deopt_id % kDeoptIdStep) == kDeoptIdAfterOffset; 461 return (deopt_id % kDeoptIdStep) == kDeoptIdAfterOffset;
478 } 462 }
479 463
480 LongJumpScope* long_jump_base() const { return long_jump_base_; } 464 LongJumpScope* long_jump_base() const { return long_jump_base_; }
481 void set_long_jump_base(LongJumpScope* value) { 465 void set_long_jump_base(LongJumpScope* value) { long_jump_base_ = value; }
482 long_jump_base_ = value;
483 }
484 466
485 uword vm_tag() const { 467 uword vm_tag() const { return vm_tag_; }
486 return vm_tag_; 468 void set_vm_tag(uword tag) { vm_tag_ = tag; }
487 } 469 static intptr_t vm_tag_offset() { return OFFSET_OF(Thread, vm_tag_); }
488 void set_vm_tag(uword tag) {
489 vm_tag_ = tag;
490 }
491 static intptr_t vm_tag_offset() {
492 return OFFSET_OF(Thread, vm_tag_);
493 }
494 470
495 RawGrowableObjectArray* pending_functions(); 471 RawGrowableObjectArray* pending_functions();
496 void clear_pending_functions(); 472 void clear_pending_functions();
497 473
498 RawError* sticky_error() const; 474 RawError* sticky_error() const;
499 void set_sticky_error(const Error& value); 475 void set_sticky_error(const Error& value);
500 void clear_sticky_error(); 476 void clear_sticky_error();
501 477
502 CompilerStats* compiler_stats() { return compiler_stats_; } 478 CompilerStats* compiler_stats() { return compiler_stats_; }
503 479
504 #if defined(DEBUG) 480 #if defined(DEBUG)
505 #define REUSABLE_HANDLE_SCOPE_ACCESSORS(object) \ 481 #define REUSABLE_HANDLE_SCOPE_ACCESSORS(object) \
506 void set_reusable_##object##_handle_scope_active(bool value) { \ 482 void set_reusable_##object##_handle_scope_active(bool value) { \
507 reusable_##object##_handle_scope_active_ = value; \ 483 reusable_##object##_handle_scope_active_ = value; \
508 } \ 484 } \
509 bool reusable_##object##_handle_scope_active() const { \ 485 bool reusable_##object##_handle_scope_active() const { \
510 return reusable_##object##_handle_scope_active_; \ 486 return reusable_##object##_handle_scope_active_; \
511 } 487 }
512 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_SCOPE_ACCESSORS) 488 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_SCOPE_ACCESSORS)
513 #undef REUSABLE_HANDLE_SCOPE_ACCESSORS 489 #undef REUSABLE_HANDLE_SCOPE_ACCESSORS
514 490
515 bool IsAnyReusableHandleScopeActive() const { 491 bool IsAnyReusableHandleScopeActive() const {
516 #define IS_REUSABLE_HANDLE_SCOPE_ACTIVE(object) \ 492 #define IS_REUSABLE_HANDLE_SCOPE_ACTIVE(object) \
517 if (reusable_##object##_handle_scope_active_) return true; 493 if (reusable_##object##_handle_scope_active_) { \
494 return true; \
495 }
518 REUSABLE_HANDLE_LIST(IS_REUSABLE_HANDLE_SCOPE_ACTIVE) 496 REUSABLE_HANDLE_LIST(IS_REUSABLE_HANDLE_SCOPE_ACTIVE)
519 return false; 497 return false;
520 #undef IS_REUSABLE_HANDLE_SCOPE_ACTIVE 498 #undef IS_REUSABLE_HANDLE_SCOPE_ACTIVE
521 } 499 }
522 #endif // defined(DEBUG) 500 #endif // defined(DEBUG)
523 501
524 void ClearReusableHandles(); 502 void ClearReusableHandles();
525 503
526 #define REUSABLE_HANDLE(object) \ 504 #define REUSABLE_HANDLE(object) \
527 object& object##Handle() const { \ 505 object& object##Handle() const { return *object##_handle_; }
528 return *object##_handle_; \
529 }
530 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE) 506 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE)
531 #undef REUSABLE_HANDLE 507 #undef REUSABLE_HANDLE
532 508
533 /* 509 /*
534 * Fields used to support safepointing a thread. 510 * Fields used to support safepointing a thread.
535 * 511 *
536 * - Bit 0 of the safepoint_state_ field is used to indicate if the thread is 512 * - Bit 0 of the safepoint_state_ field is used to indicate if the thread is
537 * already at a safepoint, 513 * already at a safepoint,
538 * - Bit 1 of the safepoint_state_ field is used to indicate if a safepoint 514 * - Bit 1 of the safepoint_state_ field is used to indicate if a safepoint
539 * operation is requested for this thread. 515 * operation is requested for this thread.
(...skipping 30 matching lines...) Expand all
570 static uint32_t SetSafepointRequested(bool value, uint32_t state) { 546 static uint32_t SetSafepointRequested(bool value, uint32_t state) {
571 return SafepointRequestedField::update(value, state); 547 return SafepointRequestedField::update(value, state);
572 } 548 }
573 uint32_t SetSafepointRequested(bool value) { 549 uint32_t SetSafepointRequested(bool value) {
574 ASSERT(thread_lock()->IsOwnedByCurrentThread()); 550 ASSERT(thread_lock()->IsOwnedByCurrentThread());
575 uint32_t old_state; 551 uint32_t old_state;
576 uint32_t new_state; 552 uint32_t new_state;
577 do { 553 do {
578 old_state = safepoint_state_; 554 old_state = safepoint_state_;
579 new_state = SafepointRequestedField::update(value, old_state); 555 new_state = SafepointRequestedField::update(value, old_state);
580 } while (AtomicOperations::CompareAndSwapUint32(&safepoint_state_, 556 } while (AtomicOperations::CompareAndSwapUint32(
581 old_state, 557 &safepoint_state_, old_state, new_state) != old_state);
582 new_state) != old_state);
583 return old_state; 558 return old_state;
584 } 559 }
585 static bool IsBlockedForSafepoint(uint32_t state) { 560 static bool IsBlockedForSafepoint(uint32_t state) {
586 return BlockedForSafepointField::decode(state); 561 return BlockedForSafepointField::decode(state);
587 } 562 }
588 bool IsBlockedForSafepoint() const { 563 bool IsBlockedForSafepoint() const {
589 return BlockedForSafepointField::decode(safepoint_state_); 564 return BlockedForSafepointField::decode(safepoint_state_);
590 } 565 }
591 void SetBlockedForSafepoint(bool value) { 566 void SetBlockedForSafepoint(bool value) {
592 ASSERT(thread_lock()->IsOwnedByCurrentThread()); 567 ASSERT(thread_lock()->IsOwnedByCurrentThread());
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
649 void VisitObjectPointers(ObjectPointerVisitor* visitor, bool validate_frames); 624 void VisitObjectPointers(ObjectPointerVisitor* visitor, bool validate_frames);
650 625
651 bool IsValidLocalHandle(Dart_Handle object) const; 626 bool IsValidLocalHandle(Dart_Handle object) const;
652 int CountLocalHandles() const; 627 int CountLocalHandles() const;
653 int ZoneSizeInBytes() const; 628 int ZoneSizeInBytes() const;
654 void UnwindScopes(uword stack_marker); 629 void UnwindScopes(uword stack_marker);
655 630
656 void InitVMConstants(); 631 void InitVMConstants();
657 632
658 private: 633 private:
659 template<class T> T* AllocateReusableHandle(); 634 template <class T>
635 T* AllocateReusableHandle();
660 636
661 // Accessed from generated code: 637 // Accessed from generated code:
662 uword stack_limit_; 638 uword stack_limit_;
663 uword stack_overflow_flags_; 639 uword stack_overflow_flags_;
664 Isolate* isolate_; 640 Isolate* isolate_;
665 Heap* heap_; 641 Heap* heap_;
666 uword top_exit_frame_info_; 642 uword top_exit_frame_info_;
667 StoreBufferBlock* store_buffer_block_; 643 StoreBufferBlock* store_buffer_block_;
668 uword vm_tag_; 644 uword vm_tag_;
669 TaskKind task_kind_; 645 TaskKind task_kind_;
670 // State that is cached in the TLS for fast access in generated code. 646 // State that is cached in the TLS for fast access in generated code.
671 #define DECLARE_MEMBERS(type_name, member_name, expr, default_init_value) \ 647 #define DECLARE_MEMBERS(type_name, member_name, expr, default_init_value) \
672 type_name member_name; 648 type_name member_name;
673 CACHED_CONSTANTS_LIST(DECLARE_MEMBERS) 649 CACHED_CONSTANTS_LIST(DECLARE_MEMBERS)
674 #undef DECLARE_MEMBERS 650 #undef DECLARE_MEMBERS
675 651
676 #define DECLARE_MEMBERS(name) \ 652 #define DECLARE_MEMBERS(name) uword name##_entry_point_;
677 uword name##_entry_point_; 653 RUNTIME_ENTRY_LIST(DECLARE_MEMBERS)
678 RUNTIME_ENTRY_LIST(DECLARE_MEMBERS)
679 #undef DECLARE_MEMBERS 654 #undef DECLARE_MEMBERS
680 655
681 #define DECLARE_MEMBERS(returntype, name, ...) \ 656 #define DECLARE_MEMBERS(returntype, name, ...) uword name##_entry_point_;
682 uword name##_entry_point_; 657 LEAF_RUNTIME_ENTRY_LIST(DECLARE_MEMBERS)
683 LEAF_RUNTIME_ENTRY_LIST(DECLARE_MEMBERS)
684 #undef DECLARE_MEMBERS 658 #undef DECLARE_MEMBERS
685 659
686 TimelineStream* dart_stream_; 660 TimelineStream* dart_stream_;
687 OSThread* os_thread_; 661 OSThread* os_thread_;
688 Monitor* thread_lock_; 662 Monitor* thread_lock_;
689 Zone* zone_; 663 Zone* zone_;
690 ApiLocalScope* api_reusable_scope_; 664 ApiLocalScope* api_reusable_scope_;
691 ApiLocalScope* api_top_scope_; 665 ApiLocalScope* api_top_scope_;
692 StackResource* top_resource_; 666 StackResource* top_resource_;
693 LongJumpScope* long_jump_base_; 667 LongJumpScope* long_jump_base_;
(...skipping 13 matching lines...) Expand all
707 // Compiler state: 681 // Compiler state:
708 CHA* cha_; 682 CHA* cha_;
709 TypeRangeCache* type_range_cache_; 683 TypeRangeCache* type_range_cache_;
710 intptr_t deopt_id_; // Compilation specific counter. 684 intptr_t deopt_id_; // Compilation specific counter.
711 RawGrowableObjectArray* pending_functions_; 685 RawGrowableObjectArray* pending_functions_;
712 686
713 RawError* sticky_error_; 687 RawError* sticky_error_;
714 688
715 CompilerStats* compiler_stats_; 689 CompilerStats* compiler_stats_;
716 690
717 // Reusable handles support. 691 // Reusable handles support.
718 #define REUSABLE_HANDLE_FIELDS(object) \ 692 #define REUSABLE_HANDLE_FIELDS(object) object* object##_handle_;
719 object* object##_handle_;
720 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_FIELDS) 693 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_FIELDS)
721 #undef REUSABLE_HANDLE_FIELDS 694 #undef REUSABLE_HANDLE_FIELDS
722 695
723 #if defined(DEBUG) 696 #if defined(DEBUG)
724 #define REUSABLE_HANDLE_SCOPE_VARIABLE(object) \ 697 #define REUSABLE_HANDLE_SCOPE_VARIABLE(object) \
725 bool reusable_##object##_handle_scope_active_; 698 bool reusable_##object##_handle_scope_active_;
726 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_SCOPE_VARIABLE); 699 REUSABLE_HANDLE_LIST(REUSABLE_HANDLE_SCOPE_VARIABLE);
727 #undef REUSABLE_HANDLE_SCOPE_VARIABLE 700 #undef REUSABLE_HANDLE_SCOPE_VARIABLE
728 #endif // defined(DEBUG) 701 #endif // defined(DEBUG)
729 702
730 class AtSafepointField : public BitField<uint32_t, bool, 0, 1> {}; 703 class AtSafepointField : public BitField<uint32_t, bool, 0, 1> {};
731 class SafepointRequestedField : public BitField<uint32_t, bool, 1, 1> {}; 704 class SafepointRequestedField : public BitField<uint32_t, bool, 1, 1> {};
732 class BlockedForSafepointField : public BitField<uint32_t, bool, 2, 1> {}; 705 class BlockedForSafepointField : public BitField<uint32_t, bool, 2, 1> {};
733 uint32_t safepoint_state_; 706 uint32_t safepoint_state_;
734 uint32_t execution_state_; 707 uint32_t execution_state_;
735 708
736 Thread* next_; // Used to chain the thread structures in an isolate. 709 Thread* next_; // Used to chain the thread structures in an isolate.
737 710
738 explicit Thread(Isolate* isolate); 711 explicit Thread(Isolate* isolate);
739 712
740 void StoreBufferRelease( 713 void StoreBufferRelease(
741 StoreBuffer::ThresholdPolicy policy = StoreBuffer::kCheckThreshold); 714 StoreBuffer::ThresholdPolicy policy = StoreBuffer::kCheckThreshold);
742 void StoreBufferAcquire(); 715 void StoreBufferAcquire();
743 716
744 void set_zone(Zone* zone) { 717 void set_zone(Zone* zone) { zone_ = zone; }
745 zone_ = zone;
746 }
747 718
748 void set_safepoint_state(uint32_t value) { 719 void set_safepoint_state(uint32_t value) { safepoint_state_ = value; }
749 safepoint_state_ = value;
750 }
751 void EnterSafepointUsingLock(); 720 void EnterSafepointUsingLock();
752 void ExitSafepointUsingLock(); 721 void ExitSafepointUsingLock();
753 void BlockForSafepoint(); 722 void BlockForSafepoint();
754 723
755 static void SetCurrent(Thread* current) { 724 static void SetCurrent(Thread* current) {
756 OSThread::SetCurrentTLS(reinterpret_cast<uword>(current)); 725 OSThread::SetCurrentTLS(reinterpret_cast<uword>(current));
757 } 726 }
758 727
759 void DeferOOBMessageInterrupts(); 728 void DeferOOBMessageInterrupts();
760 void RestoreOOBMessageInterrupts(); 729 void RestoreOOBMessageInterrupts();
761 730
762 #define REUSABLE_FRIEND_DECLARATION(name) \ 731 #define REUSABLE_FRIEND_DECLARATION(name) \
763 friend class Reusable##name##HandleScope; 732 friend class Reusable##name##HandleScope;
764 REUSABLE_HANDLE_LIST(REUSABLE_FRIEND_DECLARATION) 733 REUSABLE_HANDLE_LIST(REUSABLE_FRIEND_DECLARATION)
765 #undef REUSABLE_FRIEND_DECLARATION 734 #undef REUSABLE_FRIEND_DECLARATION
766 735
767 friend class ApiZone; 736 friend class ApiZone;
768 friend class InterruptChecker; 737 friend class InterruptChecker;
769 friend class Isolate; 738 friend class Isolate;
770 friend class IsolateTestHelper; 739 friend class IsolateTestHelper;
771 friend class NoOOBMessageScope; 740 friend class NoOOBMessageScope;
772 friend class Simulator; 741 friend class Simulator;
773 friend class StackZone; 742 friend class StackZone;
774 friend class ThreadRegistry; 743 friend class ThreadRegistry;
(...skipping 11 matching lines...) Expand all
786 // Disable thread interrupts. 755 // Disable thread interrupts.
787 class DisableThreadInterruptsScope : public StackResource { 756 class DisableThreadInterruptsScope : public StackResource {
788 public: 757 public:
789 explicit DisableThreadInterruptsScope(Thread* thread); 758 explicit DisableThreadInterruptsScope(Thread* thread);
790 ~DisableThreadInterruptsScope(); 759 ~DisableThreadInterruptsScope();
791 }; 760 };
792 761
793 } // namespace dart 762 } // namespace dart
794 763
795 #endif // RUNTIME_VM_THREAD_H_ 764 #endif // RUNTIME_VM_THREAD_H_
OLDNEW
« no previous file with comments | « runtime/vm/tags.cc ('k') | runtime/vm/thread.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698