OLD | NEW |
1 // Copyright 2008 the V8 project authors. All rights reserved. | 1 // Copyright 2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 18 matching lines...) Expand all Loading... |
29 | 29 |
30 #include "api.h" | 30 #include "api.h" |
31 #include "bootstrapper.h" | 31 #include "bootstrapper.h" |
32 #include "debug.h" | 32 #include "debug.h" |
33 #include "execution.h" | 33 #include "execution.h" |
34 #include "v8threads.h" | 34 #include "v8threads.h" |
35 #include "regexp-stack.h" | 35 #include "regexp-stack.h" |
36 | 36 |
37 namespace v8 { | 37 namespace v8 { |
38 | 38 |
39 static internal::Thread::LocalStorageKey thread_state_key = | |
40 internal::Thread::CreateThreadLocalKey(); | |
41 static internal::Thread::LocalStorageKey thread_id_key = | |
42 internal::Thread::CreateThreadLocalKey(); | |
43 | |
44 | |
45 // Track whether this V8 instance has ever called v8::Locker. This allows the | |
46 // API code to verify that the lock is always held when V8 is being entered. | |
47 bool Locker::active_ = false; | |
48 | |
49 | |
50 // Constructor for the Locker object. Once the Locker is constructed the | 39 // Constructor for the Locker object. Once the Locker is constructed the |
51 // current thread will be guaranteed to have the big V8 lock. | 40 // current thread will be guaranteed to have the big V8 lock. |
52 Locker::Locker() : has_lock_(false), top_level_(true) { | 41 Locker::Locker() : has_lock_(false), top_level_(true) { |
| 42 internal::V8Context* const v8context = v8_context(); |
| 43 |
53 // Record that the Locker has been used at least once. | 44 // Record that the Locker has been used at least once. |
54 active_ = true; | 45 v8context->v8_data_.active_ = true; |
55 // Get the big lock if necessary. | 46 // Get the big lock if necessary. |
56 if (!internal::ThreadManager::IsLockedByCurrentThread()) { | 47 if (!internal::ThreadManager::IsLockedByCurrentThread()) { |
57 internal::ThreadManager::Lock(); | 48 internal::ThreadManager::Lock(); |
58 has_lock_ = true; | 49 has_lock_ = true; |
59 // Make sure that V8 is initialized. Archiving of threads interferes | 50 // Make sure that V8 is initialized. Archiving of threads interferes |
60 // with deserialization by adding additional root pointers, so we must | 51 // with deserialization by adding additional root pointers, so we must |
61 // initialize here, before anyone can call ~Locker() or Unlocker(). | 52 // initialize here, before anyone can call ~Locker() or Unlocker(). |
62 if (!internal::V8::IsRunning()) { | 53 if (!internal::V8::IsRunning()) { |
63 V8::Initialize(); | 54 V8::Initialize(); |
64 } | 55 } |
65 // This may be a locker within an unlocker in which case we have to | 56 // This may be a locker within an unlocker in which case we have to |
66 // get the saved state for this thread and restore it. | 57 // get the saved state for this thread and restore it. |
67 if (internal::ThreadManager::RestoreThread()) { | 58 if (internal::ThreadManager::RestoreThread()) { |
68 top_level_ = false; | 59 top_level_ = false; |
69 } else { | 60 } else { |
70 internal::ExecutionAccess access; | 61 internal::ExecutionAccess access; |
71 internal::StackGuard::ClearThread(access); | 62 internal::StackGuard::ClearThread(access); |
72 internal::StackGuard::InitThread(access); | 63 internal::StackGuard::InitThread(access); |
73 } | 64 } |
74 } | 65 } |
75 ASSERT(internal::ThreadManager::IsLockedByCurrentThread()); | 66 ASSERT(internal::ThreadManager::IsLockedByCurrentThread()); |
76 | 67 |
77 // Make sure this thread is assigned a thread id. | 68 // Make sure this thread is assigned a thread id. |
78 internal::ThreadManager::AssignId(); | 69 internal::ThreadManager::AssignId(); |
79 } | 70 } |
80 | 71 |
| 72 bool Locker::IsActive() { |
| 73 return v8_context()->v8_data_.active_; |
| 74 } |
81 | 75 |
82 bool Locker::IsLocked() { | 76 bool Locker::IsLocked() { |
83 return internal::ThreadManager::IsLockedByCurrentThread(); | 77 return internal::ThreadManager::IsLockedByCurrentThread(); |
84 } | 78 } |
85 | 79 |
86 | 80 |
87 Locker::~Locker() { | 81 Locker::~Locker() { |
88 ASSERT(internal::ThreadManager::IsLockedByCurrentThread()); | 82 ASSERT(internal::ThreadManager::IsLockedByCurrentThread()); |
89 if (has_lock_) { | 83 if (has_lock_) { |
90 if (top_level_) { | 84 if (top_level_) { |
(...skipping 27 matching lines...) Expand all Loading... |
118 | 112 |
119 void Locker::StopPreemption() { | 113 void Locker::StopPreemption() { |
120 v8::internal::ContextSwitcher::StopPreemption(); | 114 v8::internal::ContextSwitcher::StopPreemption(); |
121 } | 115 } |
122 | 116 |
123 | 117 |
124 namespace internal { | 118 namespace internal { |
125 | 119 |
126 | 120 |
127 bool ThreadManager::RestoreThread() { | 121 bool ThreadManager::RestoreThread() { |
| 122 ThreadManagerData& thread_manager_data = v8_context()->thread_manager_data_; |
128 // First check whether the current thread has been 'lazily archived', ie | 123 // First check whether the current thread has been 'lazily archived', ie |
129 // not archived at all. If that is the case we put the state storage we | 124 // not archived at all. If that is the case we put the state storage we |
130 // had prepared back in the free list, since we didn't need it after all. | 125 // had prepared back in the free list, since we didn't need it after all. |
131 if (lazily_archived_thread_.IsSelf()) { | 126 if (thread_manager_data.lazily_archived_thread_.IsSelf()) { |
132 lazily_archived_thread_.Initialize(ThreadHandle::INVALID); | 127 thread_manager_data.lazily_archived_thread_.Initialize( |
133 ASSERT(Thread::GetThreadLocal(thread_state_key) == | 128 ThreadHandle::INVALID); |
134 lazily_archived_thread_state_); | 129 ASSERT(Thread::GetThreadLocal(thread_manager_data.thread_state_key_) == |
135 lazily_archived_thread_state_->set_id(kInvalidId); | 130 thread_manager_data.lazily_archived_thread_state_); |
136 lazily_archived_thread_state_->LinkInto(ThreadState::FREE_LIST); | 131 thread_manager_data.lazily_archived_thread_state_->set_id(kInvalidId); |
137 lazily_archived_thread_state_ = NULL; | 132 thread_manager_data.lazily_archived_thread_state_->LinkInto( |
138 Thread::SetThreadLocal(thread_state_key, NULL); | 133 ThreadState::FREE_LIST); |
| 134 |
| 135 thread_manager_data.lazily_archived_thread_state_ = NULL; |
| 136 Thread::SetThreadLocal(thread_manager_data.thread_state_key_, NULL); |
139 return true; | 137 return true; |
140 } | 138 } |
141 | 139 |
142 // Make sure that the preemption thread cannot modify the thread state while | 140 // Make sure that the preemption thread cannot modify the thread state while |
143 // it is being archived or restored. | 141 // it is being archived or restored. |
144 ExecutionAccess access; | 142 ExecutionAccess access; |
145 | 143 |
146 // If there is another thread that was lazily archived then we have to really | 144 // If there is another thread that was lazily archived then we have to really |
147 // archive it now. | 145 // archive it now. |
148 if (lazily_archived_thread_.IsValid()) { | 146 if (thread_manager_data.lazily_archived_thread_.IsValid()) { |
149 EagerlyArchiveThread(); | 147 EagerlyArchiveThread(); |
150 } | 148 } |
151 ThreadState* state = | 149 ThreadState* state = reinterpret_cast<ThreadState*>( |
152 reinterpret_cast<ThreadState*>(Thread::GetThreadLocal(thread_state_key)); | 150 Thread::GetThreadLocal(thread_manager_data.thread_state_key_)); |
153 if (state == NULL) { | 151 if (state == NULL) { |
154 // This is a new thread. | 152 // This is a new thread. |
155 StackGuard::InitThread(access); | 153 StackGuard::InitThread(access); |
156 return false; | 154 return false; |
157 } | 155 } |
158 char* from = state->data(); | 156 char* from = state->data(); |
159 from = HandleScopeImplementer::RestoreThread(from); | 157 from = HandleScopeImplementer::RestoreThread(from); |
160 from = Top::RestoreThread(from); | 158 from = Top::RestoreThread(from); |
161 from = Relocatable::RestoreState(from); | 159 from = Relocatable::RestoreState(from); |
162 #ifdef ENABLE_DEBUGGER_SUPPORT | 160 #ifdef ENABLE_DEBUGGER_SUPPORT |
163 from = Debug::RestoreDebug(from); | 161 from = Debug::RestoreDebug(from); |
164 #endif | 162 #endif |
165 from = StackGuard::RestoreStackGuard(from); | 163 from = StackGuard::RestoreStackGuard(from); |
166 from = RegExpStack::RestoreStack(from); | 164 from = RegExpStack::RestoreStack(from); |
167 from = Bootstrapper::RestoreState(from); | 165 from = Bootstrapper::RestoreState(from); |
168 Thread::SetThreadLocal(thread_state_key, NULL); | 166 Thread::SetThreadLocal(thread_manager_data.thread_state_key_, NULL); |
169 if (state->terminate_on_restore()) { | 167 if (state->terminate_on_restore()) { |
170 StackGuard::TerminateExecution(); | 168 StackGuard::TerminateExecution(); |
171 state->set_terminate_on_restore(false); | 169 state->set_terminate_on_restore(false); |
172 } | 170 } |
173 state->set_id(kInvalidId); | 171 state->set_id(kInvalidId); |
174 state->Unlink(); | 172 state->Unlink(); |
175 state->LinkInto(ThreadState::FREE_LIST); | 173 state->LinkInto(ThreadState::FREE_LIST); |
176 return true; | 174 return true; |
177 } | 175 } |
178 | 176 |
179 | 177 |
180 void ThreadManager::Lock() { | 178 void ThreadManager::Lock() { |
181 mutex_->Lock(); | 179 v8_context()->thread_manager_data_.mutex_->Lock(); |
182 mutex_owner_.Initialize(ThreadHandle::SELF); | 180 v8_context()->thread_manager_data_.mutex_owner_.Initialize( |
| 181 ThreadHandle::SELF); |
183 ASSERT(IsLockedByCurrentThread()); | 182 ASSERT(IsLockedByCurrentThread()); |
184 } | 183 } |
185 | 184 |
186 | 185 |
187 void ThreadManager::Unlock() { | 186 void ThreadManager::Unlock() { |
188 mutex_owner_.Initialize(ThreadHandle::INVALID); | 187 v8_context()->thread_manager_data_.mutex_owner_.Initialize( |
189 mutex_->Unlock(); | 188 ThreadHandle::INVALID); |
| 189 v8_context()->thread_manager_data_.mutex_->Unlock(); |
190 } | 190 } |
191 | 191 |
192 | 192 |
193 static int ArchiveSpacePerThread() { | 193 static int ArchiveSpacePerThread() { |
194 return HandleScopeImplementer::ArchiveSpacePerThread() + | 194 return HandleScopeImplementer::ArchiveSpacePerThread() + |
195 Top::ArchiveSpacePerThread() + | 195 Top::ArchiveSpacePerThread() + |
196 #ifdef ENABLE_DEBUGGER_SUPPORT | 196 #ifdef ENABLE_DEBUGGER_SUPPORT |
197 Debug::ArchiveSpacePerThread() + | 197 Debug::ArchiveSpacePerThread() + |
198 #endif | 198 #endif |
199 StackGuard::ArchiveSpacePerThread() + | 199 StackGuard::ArchiveSpacePerThread() + |
200 RegExpStack::ArchiveSpacePerThread() + | 200 RegExpStack::ArchiveSpacePerThread() + |
201 Bootstrapper::ArchiveSpacePerThread() + | 201 Bootstrapper::ArchiveSpacePerThread() + |
202 Relocatable::ArchiveSpacePerThread(); | 202 Relocatable::ArchiveSpacePerThread(); |
203 } | 203 } |
204 | 204 |
205 | 205 |
206 ThreadState* ThreadState::free_anchor_ = new ThreadState(); | |
207 ThreadState* ThreadState::in_use_anchor_ = new ThreadState(); | |
208 | 206 |
209 | 207 |
210 ThreadState::ThreadState() : id_(ThreadManager::kInvalidId), | 208 ThreadState::ThreadState() : id_(ThreadManager::kInvalidId), |
211 terminate_on_restore_(false), | 209 terminate_on_restore_(false), |
212 next_(this), previous_(this) { | 210 next_(this), previous_(this) { |
213 } | 211 } |
214 | 212 |
215 | 213 |
216 void ThreadState::AllocateSpace() { | 214 void ThreadState::AllocateSpace() { |
217 data_ = NewArray<char>(ArchiveSpacePerThread()); | 215 data_ = NewArray<char>(ArchiveSpacePerThread()); |
218 } | 216 } |
219 | 217 |
220 | 218 |
221 void ThreadState::Unlink() { | 219 void ThreadState::Unlink() { |
222 next_->previous_ = previous_; | 220 next_->previous_ = previous_; |
223 previous_->next_ = next_; | 221 previous_->next_ = next_; |
224 } | 222 } |
225 | 223 |
226 | 224 |
227 void ThreadState::LinkInto(List list) { | 225 void ThreadState::LinkInto(List list) { |
228 ThreadState* flying_anchor = | 226 ThreadState* flying_anchor = list == FREE_LIST ? |
229 list == FREE_LIST ? free_anchor_ : in_use_anchor_; | 227 v8_context()->thread_manager_data_.free_anchor_ |
| 228 : v8_context()->thread_manager_data_.in_use_anchor_; |
230 next_ = flying_anchor->next_; | 229 next_ = flying_anchor->next_; |
231 previous_ = flying_anchor; | 230 previous_ = flying_anchor; |
232 flying_anchor->next_ = this; | 231 flying_anchor->next_ = this; |
233 next_->previous_ = this; | 232 next_->previous_ = this; |
234 } | 233 } |
235 | 234 |
236 | 235 |
237 ThreadState* ThreadState::GetFree() { | 236 ThreadState* ThreadState::GetFree() { |
238 ThreadState* gotten = free_anchor_->next_; | 237 ThreadState* gotten = v8_context()->thread_manager_data_.free_anchor_->next_; |
239 if (gotten == free_anchor_) { | 238 if (gotten == v8_context()->thread_manager_data_.free_anchor_) { |
240 ThreadState* new_thread_state = new ThreadState(); | 239 ThreadState* new_thread_state = new ThreadState(); |
241 new_thread_state->AllocateSpace(); | 240 new_thread_state->AllocateSpace(); |
242 return new_thread_state; | 241 return new_thread_state; |
243 } | 242 } |
244 return gotten; | 243 return gotten; |
245 } | 244 } |
246 | 245 |
247 | 246 |
248 // Gets the first in the list of archived threads. | 247 // Gets the first in the list of archived threads. |
249 ThreadState* ThreadState::FirstInUse() { | 248 ThreadState* ThreadState::FirstInUse() { |
250 return in_use_anchor_->Next(); | 249 return v8_context()->thread_manager_data_.in_use_anchor_->Next(); |
251 } | 250 } |
252 | 251 |
253 | 252 |
254 ThreadState* ThreadState::Next() { | 253 ThreadState* ThreadState::Next() { |
255 if (next_ == in_use_anchor_) return NULL; | 254 if (next_ == v8_context()->thread_manager_data_.in_use_anchor_) return NULL; |
256 return next_; | 255 return next_; |
257 } | 256 } |
258 | 257 |
259 | 258 |
| 259 ThreadManagerData::ThreadManagerData() |
260 // Thread ids must start with 1, because in TLS having thread id 0 can't | 260 // Thread ids must start with 1, because in TLS having thread id 0 can't |
261 // be distinguished from not having a thread id at all (since NULL is | 261 // be distinguished from not having a thread id at all (since NULL is |
262 // defined as 0.) | 262 // defined as 0.) |
263 int ThreadManager::last_id_ = 0; | 263 :last_id_(0), |
264 Mutex* ThreadManager::mutex_ = OS::CreateMutex(); | 264 mutex_(OS::CreateMutex()), |
265 ThreadHandle ThreadManager::mutex_owner_(ThreadHandle::INVALID); | 265 mutex_owner_(ThreadHandle::INVALID), |
266 ThreadHandle ThreadManager::lazily_archived_thread_(ThreadHandle::INVALID); | 266 lazily_archived_thread_(ThreadHandle::INVALID), |
267 ThreadState* ThreadManager::lazily_archived_thread_state_ = NULL; | 267 lazily_archived_thread_state_(NULL), |
268 | 268 free_anchor_(new ThreadState()), |
| 269 in_use_anchor_(new ThreadState()), |
| 270 singleton_(NULL), |
| 271 thread_state_key_(Thread::CreateThreadLocalKey()), |
| 272 thread_id_key_(Thread::CreateThreadLocalKey()) { |
| 273 } |
269 | 274 |
270 void ThreadManager::ArchiveThread() { | 275 void ThreadManager::ArchiveThread() { |
271 ASSERT(!lazily_archived_thread_.IsValid()); | 276 ThreadManagerData& thread_manager_data = v8_context()->thread_manager_data_; |
| 277 ASSERT(!thread_manager_data.lazily_archived_thread_.IsValid()); |
272 ASSERT(!IsArchived()); | 278 ASSERT(!IsArchived()); |
273 ThreadState* state = ThreadState::GetFree(); | 279 ThreadState* state = ThreadState::GetFree(); |
274 state->Unlink(); | 280 state->Unlink(); |
275 Thread::SetThreadLocal(thread_state_key, reinterpret_cast<void*>(state)); | 281 Thread::SetThreadLocal(thread_manager_data .thread_state_key_, |
276 lazily_archived_thread_.Initialize(ThreadHandle::SELF); | 282 reinterpret_cast<void*>(state)); |
277 lazily_archived_thread_state_ = state; | 283 |
| 284 thread_manager_data.lazily_archived_thread_.Initialize(ThreadHandle::SELF); |
| 285 thread_manager_data.lazily_archived_thread_state_ = state; |
278 ASSERT(state->id() == kInvalidId); | 286 ASSERT(state->id() == kInvalidId); |
279 state->set_id(CurrentId()); | 287 state->set_id(CurrentId()); |
280 ASSERT(state->id() != kInvalidId); | 288 ASSERT(state->id() != kInvalidId); |
281 } | 289 } |
282 | 290 |
283 | 291 |
284 void ThreadManager::EagerlyArchiveThread() { | 292 void ThreadManager::EagerlyArchiveThread() { |
285 ThreadState* state = lazily_archived_thread_state_; | 293 ThreadManagerData& thread_manager_data = v8_context()->thread_manager_data_; |
| 294 ThreadState* state = thread_manager_data.lazily_archived_thread_state_; |
286 state->LinkInto(ThreadState::IN_USE_LIST); | 295 state->LinkInto(ThreadState::IN_USE_LIST); |
287 char* to = state->data(); | 296 char* to = state->data(); |
288 // Ensure that data containing GC roots are archived first, and handle them | 297 // Ensure that data containing GC roots are archived first, and handle them |
289 // in ThreadManager::Iterate(ObjectVisitor*). | 298 // in ThreadManager::Iterate(ObjectVisitor*). |
290 to = HandleScopeImplementer::ArchiveThread(to); | 299 to = HandleScopeImplementer::ArchiveThread(to); |
291 to = Top::ArchiveThread(to); | 300 to = Top::ArchiveThread(to); |
292 to = Relocatable::ArchiveState(to); | 301 to = Relocatable::ArchiveState(to); |
293 #ifdef ENABLE_DEBUGGER_SUPPORT | 302 #ifdef ENABLE_DEBUGGER_SUPPORT |
294 to = Debug::ArchiveDebug(to); | 303 to = Debug::ArchiveDebug(to); |
295 #endif | 304 #endif |
296 to = StackGuard::ArchiveStackGuard(to); | 305 to = StackGuard::ArchiveStackGuard(to); |
297 to = RegExpStack::ArchiveStack(to); | 306 to = RegExpStack::ArchiveStack(to); |
298 to = Bootstrapper::ArchiveState(to); | 307 to = Bootstrapper::ArchiveState(to); |
299 lazily_archived_thread_.Initialize(ThreadHandle::INVALID); | 308 thread_manager_data.lazily_archived_thread_.Initialize(ThreadHandle::INVALID); |
300 lazily_archived_thread_state_ = NULL; | 309 thread_manager_data.lazily_archived_thread_state_ = NULL; |
301 } | 310 } |
302 | 311 |
303 | 312 |
304 void ThreadManager::FreeThreadResources() { | 313 void ThreadManager::FreeThreadResources() { |
305 HandleScopeImplementer::FreeThreadResources(); | 314 HandleScopeImplementer::FreeThreadResources(); |
306 Top::FreeThreadResources(); | 315 Top::FreeThreadResources(); |
307 #ifdef ENABLE_DEBUGGER_SUPPORT | 316 #ifdef ENABLE_DEBUGGER_SUPPORT |
308 Debug::FreeThreadResources(); | 317 Debug::FreeThreadResources(); |
309 #endif | 318 #endif |
310 StackGuard::FreeThreadResources(); | 319 StackGuard::FreeThreadResources(); |
311 RegExpStack::FreeThreadResources(); | 320 RegExpStack::FreeThreadResources(); |
312 Bootstrapper::FreeThreadResources(); | 321 Bootstrapper::FreeThreadResources(); |
313 } | 322 } |
314 | 323 |
315 | 324 |
316 bool ThreadManager::IsArchived() { | 325 bool ThreadManager::IsArchived() { |
317 return Thread::HasThreadLocal(thread_state_key); | 326 return Thread::HasThreadLocal( |
| 327 v8_context()->thread_manager_data_.thread_state_key_); |
318 } | 328 } |
319 | 329 |
320 | 330 |
321 void ThreadManager::Iterate(ObjectVisitor* v) { | 331 void ThreadManager::Iterate(ObjectVisitor* v) { |
322 // Expecting no threads during serialization/deserialization | 332 // Expecting no threads during serialization/deserialization |
323 for (ThreadState* state = ThreadState::FirstInUse(); | 333 for (ThreadState* state = ThreadState::FirstInUse(); |
324 state != NULL; | 334 state != NULL; |
325 state = state->Next()) { | 335 state = state->Next()) { |
326 char* data = state->data(); | 336 char* data = state->data(); |
327 data = HandleScopeImplementer::Iterate(v, data); | 337 data = HandleScopeImplementer::Iterate(v, data); |
(...skipping 19 matching lines...) Expand all Loading... |
347 state != NULL; | 357 state != NULL; |
348 state = state->Next()) { | 358 state = state->Next()) { |
349 char* data = state->data(); | 359 char* data = state->data(); |
350 data += HandleScopeImplementer::ArchiveSpacePerThread(); | 360 data += HandleScopeImplementer::ArchiveSpacePerThread(); |
351 Top::MarkCompactEpilogue(is_compacting, data); | 361 Top::MarkCompactEpilogue(is_compacting, data); |
352 } | 362 } |
353 } | 363 } |
354 | 364 |
355 | 365 |
356 int ThreadManager::CurrentId() { | 366 int ThreadManager::CurrentId() { |
357 return Thread::GetThreadLocalInt(thread_id_key); | 367 return Thread::GetThreadLocalInt( |
| 368 v8_context()->thread_manager_data_.thread_id_key_); |
358 } | 369 } |
359 | 370 |
360 | 371 |
361 void ThreadManager::AssignId() { | 372 void ThreadManager::AssignId() { |
362 if (!HasId()) { | 373 if (!HasId()) { |
363 ASSERT(Locker::IsLocked()); | 374 ASSERT(Locker::IsLocked()); |
364 int thread_id = ++last_id_; | 375 ThreadManagerData& thread_manager_data = v8_context()->thread_manager_data_; |
| 376 int thread_id = ++thread_manager_data.last_id_; |
365 ASSERT(thread_id > 0); // see the comment near last_id_ definition. | 377 ASSERT(thread_id > 0); // see the comment near last_id_ definition. |
366 Thread::SetThreadLocalInt(thread_id_key, thread_id); | 378 Thread::SetThreadLocalInt(thread_manager_data.thread_id_key_, thread_id); |
367 Top::set_thread_id(thread_id); | 379 Top::set_thread_id(thread_id); |
368 } | 380 } |
369 } | 381 } |
370 | 382 |
371 | 383 |
372 bool ThreadManager::HasId() { | 384 bool ThreadManager::HasId() { |
373 return Thread::HasThreadLocal(thread_id_key); | 385 return Thread::HasThreadLocal( |
| 386 v8_context()->thread_manager_data_.thread_id_key_); |
374 } | 387 } |
375 | 388 |
376 | 389 |
377 void ThreadManager::TerminateExecution(int thread_id) { | 390 void ThreadManager::TerminateExecution(int thread_id) { |
378 for (ThreadState* state = ThreadState::FirstInUse(); | 391 for (ThreadState* state = ThreadState::FirstInUse(); |
379 state != NULL; | 392 state != NULL; |
380 state = state->Next()) { | 393 state = state->Next()) { |
381 if (thread_id == state->id()) { | 394 if (thread_id == state->id()) { |
382 state->set_terminate_on_restore(true); | 395 state->set_terminate_on_restore(true); |
383 } | 396 } |
384 } | 397 } |
385 } | 398 } |
386 | 399 |
387 | |
388 // This is the ContextSwitcher singleton. There is at most a single thread | |
389 // running which delivers preemption events to V8 threads. | |
390 ContextSwitcher* ContextSwitcher::singleton_ = NULL; | |
391 | |
392 | |
393 ContextSwitcher::ContextSwitcher(int every_n_ms) | 400 ContextSwitcher::ContextSwitcher(int every_n_ms) |
394 : keep_going_(true), | 401 : keep_going_(true), |
395 sleep_ms_(every_n_ms) { | 402 sleep_ms_(every_n_ms) { |
396 } | 403 } |
397 | 404 |
398 | 405 |
399 // Set the scheduling interval of V8 threads. This function starts the | 406 // Set the scheduling interval of V8 threads. This function starts the |
400 // ContextSwitcher thread if needed. | 407 // ContextSwitcher thread if needed. |
401 void ContextSwitcher::StartPreemption(int every_n_ms) { | 408 void ContextSwitcher::StartPreemption(int every_n_ms) { |
402 ASSERT(Locker::IsLocked()); | 409 ASSERT(Locker::IsLocked()); |
403 if (singleton_ == NULL) { | 410 ContextSwitcher* & singleton = v8_context()->thread_manager_data_.singleton_; |
| 411 if (singleton == NULL) { |
404 // If the ContextSwitcher thread is not running at the moment start it now. | 412 // If the ContextSwitcher thread is not running at the moment start it now. |
405 singleton_ = new ContextSwitcher(every_n_ms); | 413 singleton = new ContextSwitcher(every_n_ms); |
406 singleton_->Start(); | 414 singleton->Start(); |
407 } else { | 415 } else { |
408 // ContextSwitcher thread is already running, so we just change the | 416 // ContextSwitcher thread is already running, so we just change the |
409 // scheduling interval. | 417 // scheduling interval. |
410 singleton_->sleep_ms_ = every_n_ms; | 418 singleton->sleep_ms_ = every_n_ms; |
411 } | 419 } |
412 } | 420 } |
413 | 421 |
414 | 422 |
415 // Disable preemption of V8 threads. If multiple threads want to use V8 they | 423 // Disable preemption of V8 threads. If multiple threads want to use V8 they |
416 // must cooperatively schedule amongst them from this point on. | 424 // must cooperatively schedule amongst them from this point on. |
417 void ContextSwitcher::StopPreemption() { | 425 void ContextSwitcher::StopPreemption() { |
418 ASSERT(Locker::IsLocked()); | 426 ASSERT(Locker::IsLocked()); |
419 if (singleton_ != NULL) { | 427 ContextSwitcher* & singleton = v8_context()->thread_manager_data_.singleton_; |
| 428 if (singleton != NULL) { |
420 // The ContextSwitcher thread is running. We need to stop it and release | 429 // The ContextSwitcher thread is running. We need to stop it and release |
421 // its resources. | 430 // its resources. |
422 singleton_->keep_going_ = false; | 431 singleton->keep_going_ = false; |
423 singleton_->Join(); // Wait for the ContextSwitcher thread to exit. | 432 singleton->Join(); // Wait for the ContextSwitcher thread to exit. |
424 // Thread has exited, now we can delete it. | 433 // Thread has exited, now we can delete it. |
425 delete(singleton_); | 434 delete(singleton); |
426 singleton_ = NULL; | 435 singleton = NULL; |
427 } | 436 } |
428 } | 437 } |
429 | 438 |
430 | 439 |
431 // Main loop of the ContextSwitcher thread: Preempt the currently running V8 | 440 // Main loop of the ContextSwitcher thread: Preempt the currently running V8 |
432 // thread at regular intervals. | 441 // thread at regular intervals. |
433 void ContextSwitcher::Run() { | 442 void ContextSwitcher::Run() { |
434 while (keep_going_) { | 443 while (keep_going_) { |
435 OS::Sleep(sleep_ms_); | 444 OS::Sleep(sleep_ms_); |
436 StackGuard::Preempt(); | 445 StackGuard::Preempt(); |
437 } | 446 } |
438 } | 447 } |
439 | 448 |
440 | 449 |
441 // Acknowledge the preemption by the receiving thread. | 450 // Acknowledge the preemption by the receiving thread. |
442 void ContextSwitcher::PreemptionReceived() { | 451 void ContextSwitcher::PreemptionReceived() { |
443 ASSERT(Locker::IsLocked()); | 452 ASSERT(Locker::IsLocked()); |
444 // There is currently no accounting being done for this. But could be in the | 453 // There is currently no accounting being done for this. But could be in the |
445 // future, which is why we leave this in. | 454 // future, which is why we leave this in. |
446 } | 455 } |
447 | 456 |
448 | 457 |
449 } // namespace internal | 458 } // namespace internal |
450 } // namespace v8 | 459 } // namespace v8 |
OLD | NEW |