| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2014 Google Inc. All rights reserved. | 2 * Copyright (C) 2014 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 174 return *this; | 174 return *this; |
| 175 } | 175 } |
| 176 | 176 |
| 177 template<typename U> | 177 template<typename U> |
| 178 PersistentBase& operator=(const Member<U>& other) | 178 PersistentBase& operator=(const Member<U>& other) |
| 179 { | 179 { |
| 180 assign(other); | 180 assign(other); |
| 181 return *this; | 181 return *this; |
| 182 } | 182 } |
| 183 | 183 |
| 184 #if defined(LEAK_SANITIZER) | 184 // Register the persistent node as a 'static reference', |
| 185 // belonging to the current thread and a persistent that must |
| 186 // be cleared when the ThreadState itself is cleared out and |
| 187 // destructed. |
| 188 // |
| 189 // Static singletons arrange for this to happen, either to ensure |
| 190 // clean LSan leak reports or to register a thread-local persistent |
| 191 // needing to be cleared out before the thread is terminated. |
| 185 PersistentBase* registerAsStaticReference() | 192 PersistentBase* registerAsStaticReference() |
| 186 { | 193 { |
| 187 if (m_persistentNode) { | 194 if (m_persistentNode) { |
| 188 ASSERT(ThreadState::current()); | 195 ASSERT(ThreadState::current()); |
| 189 ThreadState::current()->registerStaticPersistentNode(m_persistentNod
e); | 196 ThreadState::current()->registerStaticPersistentNode(m_persistentNod
e, nullptr); |
| 190 LEAK_SANITIZER_IGNORE_OBJECT(this); | 197 LEAK_SANITIZER_IGNORE_OBJECT(this); |
| 191 } | 198 } |
| 192 return this; | 199 return this; |
| 193 } | 200 } |
| 194 #endif | |
| 195 | 201 |
| 196 protected: | 202 protected: |
| 197 T* atomicGet() { return reinterpret_cast<T*>(acquireLoad(reinterpret_cast<vo
id* volatile*>(&m_raw))); } | 203 T* atomicGet() { return reinterpret_cast<T*>(acquireLoad(reinterpret_cast<vo
id* volatile*>(&m_raw))); } |
| 198 | 204 |
| 199 private: | 205 private: |
| 200 NO_LAZY_SWEEP_SANITIZE_ADDRESS | 206 NO_LAZY_SWEEP_SANITIZE_ADDRESS |
| 201 void assign(T* ptr) | 207 void assign(T* ptr) |
| 202 { | 208 { |
| 203 if (crossThreadnessConfiguration == CrossThreadPersistentConfiguration) | 209 if (crossThreadnessConfiguration == CrossThreadPersistentConfiguration) |
| 204 releaseStore(reinterpret_cast<void* volatile*>(&m_raw), ptr); | 210 releaseStore(reinterpret_cast<void* volatile*>(&m_raw), ptr); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 239 if (!m_persistentNode) | 245 if (!m_persistentNode) |
| 240 return; | 246 return; |
| 241 | 247 |
| 242 if (crossThreadnessConfiguration == CrossThreadPersistentConfiguration)
{ | 248 if (crossThreadnessConfiguration == CrossThreadPersistentConfiguration)
{ |
| 243 ProcessHeap::crossThreadPersistentRegion().freePersistentNode(m_pers
istentNode); | 249 ProcessHeap::crossThreadPersistentRegion().freePersistentNode(m_pers
istentNode); |
| 244 } else { | 250 } else { |
| 245 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::st
ate(); | 251 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::st
ate(); |
| 246 ASSERT(state->checkThread()); | 252 ASSERT(state->checkThread()); |
| 247 // Persistent handle must be created and destructed in the same thre
ad. | 253 // Persistent handle must be created and destructed in the same thre
ad. |
| 248 ASSERT(m_state == state); | 254 ASSERT(m_state == state); |
| 249 state->getPersistentRegion()->freePersistentNode(m_persistentNode); | 255 state->freePersistentNode(m_persistentNode); |
| 250 } | 256 } |
| 251 m_persistentNode = nullptr; | 257 m_persistentNode = nullptr; |
| 252 } | 258 } |
| 253 | 259 |
| 254 void checkPointer() | 260 void checkPointer() |
| 255 { | 261 { |
| 256 #if ENABLE(ASSERT) && defined(ADDRESS_SANITIZER) | 262 #if ENABLE(ASSERT) && defined(ADDRESS_SANITIZER) |
| 257 if (!m_raw) | 263 if (!m_raw) |
| 258 return; | 264 return; |
| 259 | 265 |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 322 Parent::operator=(other); | 328 Parent::operator=(other); |
| 323 return *this; | 329 return *this; |
| 324 } | 330 } |
| 325 | 331 |
| 326 template<typename U> | 332 template<typename U> |
| 327 Persistent& operator=(const Member<U>& other) | 333 Persistent& operator=(const Member<U>& other) |
| 328 { | 334 { |
| 329 Parent::operator=(other); | 335 Parent::operator=(other); |
| 330 return *this; | 336 return *this; |
| 331 } | 337 } |
| 332 | |
| 333 // Requests that the thread state clear this handle when the thread shuts | |
| 334 // down. This is intended for use with ThreadSpecific<Persistent<T>>. | |
| 335 // It's important that the Persistent<T> exist until then, because this | |
| 336 // takes a raw pointer to that handle. | |
| 337 // | |
| 338 // Example: | |
| 339 // Foo& sharedFoo() | |
| 340 // { | |
| 341 // DEFINE_THREAD_SAFE_STATIC_LOCAL( | |
| 342 // ThreadSpecific<Persistent<Foo>>, threadSpecificFoo, | |
| 343 // new ThreadSpecific<Persistent<Foo>>); | |
| 344 // Persistent<Foo>& fooHandle = *threadSpecificFoo; | |
| 345 // if (!fooHandle) { | |
| 346 // fooHandle = new Foo; | |
| 347 // fooHandle.clearOnThreadShutdown(); | |
| 348 // } | |
| 349 // return *fooHandle; | |
| 350 // } | |
| 351 void clearOnThreadShutdown() | |
| 352 { | |
| 353 void (*closure)(Persistent<T>*) = [](Persistent<T>* handle) | |
| 354 { | |
| 355 *handle = nullptr; | |
| 356 }; | |
| 357 ThreadState::current()->registerThreadShutdownHook(WTF::bind(closure, th
is)); | |
| 358 } | |
| 359 }; | 338 }; |
| 360 | 339 |
| 361 // WeakPersistent is a way to create a weak pointer from an off-heap object | 340 // WeakPersistent is a way to create a weak pointer from an off-heap object |
| 362 // to an on-heap object. The m_raw is automatically cleared when the pointee | 341 // to an on-heap object. The m_raw is automatically cleared when the pointee |
| 363 // gets collected. | 342 // gets collected. |
| 364 // | 343 // |
| 365 // We have to construct and destruct WeakPersistent in the same thread. | 344 // We have to construct and destruct WeakPersistent in the same thread. |
| 366 // | 345 // |
| 367 // Note that collections of WeakPersistents are not supported. Use a persistent | 346 // Note that collections of WeakPersistents are not supported. Use a persistent |
| 368 // collection of WeakMembers instead. | 347 // collection of WeakMembers instead. |
| (...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 547 uninitialize(); | 526 uninitialize(); |
| 548 } | 527 } |
| 549 | 528 |
| 550 template<typename VisitorDispatcher> | 529 template<typename VisitorDispatcher> |
| 551 void trace(VisitorDispatcher visitor) | 530 void trace(VisitorDispatcher visitor) |
| 552 { | 531 { |
| 553 static_assert(sizeof(Collection), "Collection must be fully defined"); | 532 static_assert(sizeof(Collection), "Collection must be fully defined"); |
| 554 visitor->trace(*static_cast<Collection*>(this)); | 533 visitor->trace(*static_cast<Collection*>(this)); |
| 555 } | 534 } |
| 556 | 535 |
| 557 #if defined(LEAK_SANITIZER) | 536 // See PersistentBase::registerAsStaticReference() comment. |
| 558 PersistentHeapCollectionBase* registerAsStaticReference() | 537 PersistentHeapCollectionBase* registerAsStaticReference() |
| 559 { | 538 { |
| 560 if (m_persistentNode) { | 539 if (m_persistentNode) { |
| 561 ASSERT(ThreadState::current()); | 540 ASSERT(ThreadState::current()); |
| 562 ThreadState::current()->registerStaticPersistentNode(m_persistentNod
e); | 541 ThreadState::current()->registerStaticPersistentNode(m_persistentNod
e, &PersistentHeapCollectionBase<Collection>::clearPersistentNode); |
| 563 LEAK_SANITIZER_IGNORE_OBJECT(this); | 542 LEAK_SANITIZER_IGNORE_OBJECT(this); |
| 564 } | 543 } |
| 565 return this; | 544 return this; |
| 566 } | 545 } |
| 567 #endif | |
| 568 | 546 |
| 569 private: | 547 private: |
| 570 | 548 |
| 549 // Used when the registered PersistentNode of this object is |
| 550 // released during ThreadState shutdown, clearing the association. |
| 551 static void clearPersistentNode(void *self) |
| 552 { |
| 553 (reinterpret_cast<PersistentHeapCollectionBase<Collection>*>(self))->uni
nitialize(); |
| 554 } |
| 555 |
| 571 NO_LAZY_SWEEP_SANITIZE_ADDRESS | 556 NO_LAZY_SWEEP_SANITIZE_ADDRESS |
| 572 void initialize() | 557 void initialize() |
| 573 { | 558 { |
| 574 // FIXME: Derive affinity based on the collection. | 559 // FIXME: Derive affinity based on the collection. |
| 575 ThreadState* state = ThreadState::current(); | 560 ThreadState* state = ThreadState::current(); |
| 576 ASSERT(state->checkThread()); | 561 ASSERT(state->checkThread()); |
| 577 m_persistentNode = state->getPersistentRegion()->allocatePersistentNode(
this, TraceMethodDelegate<PersistentHeapCollectionBase<Collection>, &PersistentH
eapCollectionBase<Collection>::trace>::trampoline); | 562 m_persistentNode = state->getPersistentRegion()->allocatePersistentNode(
this, TraceMethodDelegate<PersistentHeapCollectionBase<Collection>, &PersistentH
eapCollectionBase<Collection>::trace>::trampoline); |
| 578 #if ENABLE(ASSERT) | 563 #if ENABLE(ASSERT) |
| 579 m_state = state; | 564 m_state = state; |
| 580 #endif | 565 #endif |
| 581 } | 566 } |
| 582 | 567 |
| 583 void uninitialize() | 568 void uninitialize() |
| 584 { | 569 { |
| 570 if (!m_persistentNode) |
| 571 return; |
| 585 ThreadState* state = ThreadState::current(); | 572 ThreadState* state = ThreadState::current(); |
| 586 ASSERT(state->checkThread()); | 573 ASSERT(state->checkThread()); |
| 587 // Persistent handle must be created and destructed in the same thread. | 574 // Persistent handle must be created and destructed in the same thread. |
| 588 ASSERT(m_state == state); | 575 ASSERT(m_state == state); |
| 589 state->getPersistentRegion()->freePersistentNode(m_persistentNode); | 576 state->freePersistentNode(m_persistentNode); |
| 577 m_persistentNode = nullptr; |
| 590 } | 578 } |
| 591 | 579 |
| 592 PersistentNode* m_persistentNode; | 580 PersistentNode* m_persistentNode; |
| 593 #if ENABLE(ASSERT) | 581 #if ENABLE(ASSERT) |
| 594 ThreadState* m_state; | 582 ThreadState* m_state; |
| 595 #endif | 583 #endif |
| 596 }; | 584 }; |
| 597 | 585 |
| 598 template< | 586 template< |
| 599 typename KeyArg, | 587 typename KeyArg, |
| (...skipping 626 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1226 // into it. | 1214 // into it. |
| 1227 // | 1215 // |
| 1228 // TODO(sof): remove this hack once wtf/Functional.h can also work with a ty
pe like | 1216 // TODO(sof): remove this hack once wtf/Functional.h can also work with a ty
pe like |
| 1229 // CrossThreadWeakPersistent<>. | 1217 // CrossThreadWeakPersistent<>. |
| 1230 static WeakPtr<T> unwrap(const StorageType& value) { return WeakPtr<T>(WeakR
eference<T>::create(value.get())); } | 1218 static WeakPtr<T> unwrap(const StorageType& value) { return WeakPtr<T>(WeakR
eference<T>::create(value.get())); } |
| 1231 }; | 1219 }; |
| 1232 | 1220 |
| 1233 } // namespace WTF | 1221 } // namespace WTF |
| 1234 | 1222 |
| 1235 #endif | 1223 #endif |
| OLD | NEW |