| Index: base/threading/thread_local_storage.cc
|
| diff --git a/base/threading/thread_local_storage_win.cc b/base/threading/thread_local_storage.cc
|
| similarity index 59%
|
| copy from base/threading/thread_local_storage_win.cc
|
| copy to base/threading/thread_local_storage.cc
|
| index 0ae3cb4c8cd39471b9b532a5dc87e2b5d61647a8..523a0d0ded5d480b6180877fb035c4761f9cb0e3 100644
|
| --- a/base/threading/thread_local_storage_win.cc
|
| +++ b/base/threading/thread_local_storage.cc
|
| @@ -4,21 +4,22 @@
|
|
|
| #include "base/threading/thread_local_storage.h"
|
|
|
| -#include <windows.h>
|
| -
|
| +#include "base/atomicops.h"
|
| #include "base/logging.h"
|
|
|
| +using base::internal::PlatformThreadLocalStorage;
|
|
|
| namespace {
|
| -// In order to make TLS destructors work, we need to keep function
|
| -// pointers to the destructor for each TLS that we allocate.
|
| -// We make this work by allocating a single OS-level TLS, which
|
| -// contains an array of slots for the application to use. In
|
| -// parallel, we also allocate an array of destructors, which we
|
| -// keep track of and call when threads terminate.
|
| +// In order to make TLS destructors work, we need to keep around a function
|
| +// pointer to the destructor for each slot. We keep this array of pointers in a
|
| +// global (static) array.
|
| +// We use the single OS-level TLS slot (giving us one pointer per thread) to
|
| +// hold a pointer to a per-thread array (table) of slots that we allocate to
|
| +// Chromium consumers.
|
|
|
| // g_native_tls_key is the one native TLS that we use. It stores our table.
|
| -long g_native_tls_key = TLS_OUT_OF_INDEXES;
|
| +base::subtle::AtomicWord g_native_tls_key =
|
| + PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES;
|
|
|
| // g_last_used_tls_key is the high-water-mark of allocated thread local storage.
|
| // Each allocation is an index into our g_tls_destructors[]. Each such index is
|
| @@ -27,7 +28,7 @@ long g_native_tls_key = TLS_OUT_OF_INDEXES;
|
| // instance of ThreadLocalStorage::Slot has been freed (i.e., destructor called,
|
| // etc.). This reserved use of 0 is then stated as the initial value of
|
| // g_last_used_tls_key, so that the first issued index will be 1.
|
| -long g_last_used_tls_key = 0;
|
| +base::subtle::Atomic32 g_last_used_tls_key = 0;
|
|
|
| // The maximum number of 'slots' in our thread local storage stack.
|
| const int kThreadLocalStorageSize = 64;
|
| @@ -48,23 +49,43 @@ const int kMaxDestructorIterations = kThreadLocalStorageSize;
|
| volatile base::ThreadLocalStorage::TLSDestructorFunc
|
| g_tls_destructors[kThreadLocalStorageSize];
|
|
|
| +// This function is called to initialize our entire Chromium TLS system.
|
| +// It may be called very early, and we need to complete most all of the setup
|
| +// (initialization) before calling *any* memory allocator functions, which may
|
| +// recursively depend on this initialization.
|
| +// As a result, we use Atomics, and avoid anything (like a singleton) that might
|
| +// require memory allocations.
|
| void** ConstructTlsVector() {
|
| - if (g_native_tls_key == TLS_OUT_OF_INDEXES) {
|
| - long value = TlsAlloc();
|
| - DCHECK(value != TLS_OUT_OF_INDEXES);
|
| -
|
| - // Atomically test-and-set the tls_key. If the key is TLS_OUT_OF_INDEXES,
|
| - // go ahead and set it. Otherwise, do nothing, as another
|
| - // thread already did our dirty work.
|
| - if (TLS_OUT_OF_INDEXES != InterlockedCompareExchange(
|
| - &g_native_tls_key, value, TLS_OUT_OF_INDEXES)) {
|
| + PlatformThreadLocalStorage::TLSKey key =
|
| + base::subtle::NoBarrier_Load(&g_native_tls_key);
|
| + if (key == PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES) {
|
| + CHECK(PlatformThreadLocalStorage::AllocTLS(&key));
|
| +
|
| + // The TLS_KEY_OUT_OF_INDEXES is used to find out whether the key is set or
|
| + // not in NoBarrier_CompareAndSwap, but Posix doesn't have invalid key, we
|
| + // define an almost impossible value be it.
|
| + // If we really get TLS_KEY_OUT_OF_INDEXES as value of key, just alloc
|
| + // another TLS slot.
|
| + if (key == PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES) {
|
| + PlatformThreadLocalStorage::TLSKey tmp = key;
|
| + CHECK(PlatformThreadLocalStorage::AllocTLS(&key) &&
|
| + key != PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES);
|
| + PlatformThreadLocalStorage::FreeTLS(tmp);
|
| + }
|
| + // Atomically test-and-set the tls_key. If the key is
|
| + // TLS_KEY_OUT_OF_INDEXES, go ahead and set it. Otherwise, do nothing, as
|
| + // another thread already did our dirty work.
|
| + if (PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES !=
|
| + base::subtle::NoBarrier_CompareAndSwap(&g_native_tls_key,
|
| + PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES, key)) {
|
| // We've been shortcut. Another thread replaced g_native_tls_key first so
|
| // we need to destroy our index and use the one the other thread got
|
| // first.
|
| - TlsFree(value);
|
| + PlatformThreadLocalStorage::FreeTLS(key);
|
| + key = base::subtle::NoBarrier_Load(&g_native_tls_key);
|
| }
|
| }
|
| - DCHECK(!TlsGetValue(g_native_tls_key));
|
| + CHECK(!PlatformThreadLocalStorage::GetTLSValue(key));
|
|
|
| // Some allocators, such as TCMalloc, make use of thread local storage.
|
| // As a result, any attempt to call new (or malloc) will lazily cause such a
|
| @@ -77,26 +98,18 @@ void** ConstructTlsVector() {
|
| void* stack_allocated_tls_data[kThreadLocalStorageSize];
|
| memset(stack_allocated_tls_data, 0, sizeof(stack_allocated_tls_data));
|
| // Ensure that any rentrant calls change the temp version.
|
| - TlsSetValue(g_native_tls_key, stack_allocated_tls_data);
|
| + PlatformThreadLocalStorage::SetTLSValue(key, stack_allocated_tls_data);
|
|
|
| // Allocate an array to store our data.
|
| void** tls_data = new void*[kThreadLocalStorageSize];
|
| memcpy(tls_data, stack_allocated_tls_data, sizeof(stack_allocated_tls_data));
|
| - TlsSetValue(g_native_tls_key, tls_data);
|
| + PlatformThreadLocalStorage::SetTLSValue(key, tls_data);
|
| return tls_data;
|
| }
|
|
|
| -// Called when we terminate a thread, this function calls any TLS destructors
|
| -// that are pending for this thread.
|
| -void WinThreadExit() {
|
| - if (g_native_tls_key == TLS_OUT_OF_INDEXES)
|
| - return;
|
| -
|
| - void** tls_data = static_cast<void**>(TlsGetValue(g_native_tls_key));
|
| - // Maybe we have never initialized TLS for this thread.
|
| - if (!tls_data)
|
| - return;
|
| -
|
| +void OnThreadExitInternal(void* value) {
|
| + DCHECK(value);
|
| + void** tls_data = static_cast<void**>(value);
|
| // Some allocators, such as TCMalloc, use TLS. As a result, when a thread
|
| // terminates, one of the destructor calls we make may be to shut down an
|
| // allocator. We have to be careful that after we've shutdown all of the
|
| @@ -109,7 +122,9 @@ void WinThreadExit() {
|
| void* stack_allocated_tls_data[kThreadLocalStorageSize];
|
| memcpy(stack_allocated_tls_data, tls_data, sizeof(stack_allocated_tls_data));
|
| // Ensure that any re-entrant calls change the temp version.
|
| - TlsSetValue(g_native_tls_key, stack_allocated_tls_data);
|
| + PlatformThreadLocalStorage::TLSKey key =
|
| + base::subtle::NoBarrier_Load(&g_native_tls_key);
|
| + PlatformThreadLocalStorage::SetTLSValue(key, stack_allocated_tls_data);
|
| delete[] tls_data; // Our last dependence on an allocator.
|
|
|
| int remaining_attempts = kMaxDestructorIterations;
|
| @@ -122,10 +137,13 @@ void WinThreadExit() {
|
| // allocator) and should also be destroyed last. If we get the order wrong,
|
| // then we'll itterate several more times, so it is really not that
|
| // critical (but it might help).
|
| - for (int slot = g_last_used_tls_key; slot > 0; --slot) {
|
| + base::subtle::Atomic32 last_used_tls_key =
|
| + base::subtle::NoBarrier_Load(&g_last_used_tls_key);
|
| + for (int slot = last_used_tls_key; slot > 0; --slot) {
|
| void* value = stack_allocated_tls_data[slot];
|
| if (value == NULL)
|
| continue;
|
| +
|
| base::ThreadLocalStorage::TLSDestructorFunc destructor =
|
| g_tls_destructors[slot];
|
| if (destructor == NULL)
|
| @@ -144,13 +162,35 @@ void WinThreadExit() {
|
| }
|
|
|
| // Remove our stack allocated vector.
|
| - TlsSetValue(g_native_tls_key, NULL);
|
| + PlatformThreadLocalStorage::SetTLSValue(key, NULL);
|
| }
|
|
|
| } // namespace
|
|
|
| namespace base {
|
|
|
| +namespace internal {
|
| +
|
| +#if defined(OS_WIN)
|
| +void PlatformThreadLocalStorage::OnThreadExit() {
|
| + PlatformThreadLocalStorage::TLSKey key =
|
| + base::subtle::NoBarrier_Load(&g_native_tls_key);
|
| + if (key == PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES)
|
| + return;
|
| + void *tls_data = GetTLSValue(key);
|
| + // Maybe we have never initialized TLS for this thread.
|
| + if (!tls_data)
|
| + return;
|
| + OnThreadExitInternal(tls_data);
|
| +}
|
| +#elif defined(OS_POSIX)
|
| +void PlatformThreadLocalStorage::OnThreadExit(void* value) {
|
| + OnThreadExitInternal(value);
|
| +}
|
| +#endif // defined(OS_WIN)
|
| +
|
| +} // namespace internal
|
| +
|
| ThreadLocalStorage::Slot::Slot(TLSDestructorFunc destructor) {
|
| initialized_ = false;
|
| slot_ = 0;
|
| @@ -158,16 +198,16 @@ ThreadLocalStorage::Slot::Slot(TLSDestructorFunc destructor) {
|
| }
|
|
|
| bool ThreadLocalStorage::StaticSlot::Initialize(TLSDestructorFunc destructor) {
|
| - if (g_native_tls_key == TLS_OUT_OF_INDEXES || !TlsGetValue(g_native_tls_key))
|
| + PlatformThreadLocalStorage::TLSKey key =
|
| + base::subtle::NoBarrier_Load(&g_native_tls_key);
|
| + if (key == PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES ||
|
| + !PlatformThreadLocalStorage::GetTLSValue(key))
|
| ConstructTlsVector();
|
|
|
| // Grab a new slot.
|
| - slot_ = InterlockedIncrement(&g_last_used_tls_key);
|
| + slot_ = base::subtle::NoBarrier_AtomicIncrement(&g_last_used_tls_key, 1);
|
| DCHECK_GT(slot_, 0);
|
| - if (slot_ >= kThreadLocalStorageSize) {
|
| - NOTREACHED();
|
| - return false;
|
| - }
|
| + CHECK_LT(slot_, kThreadLocalStorageSize);
|
|
|
| // Setup our destructor.
|
| g_tls_destructors[slot_] = destructor;
|
| @@ -186,7 +226,9 @@ void ThreadLocalStorage::StaticSlot::Free() {
|
| }
|
|
|
| void* ThreadLocalStorage::StaticSlot::Get() const {
|
| - void** tls_data = static_cast<void**>(TlsGetValue(g_native_tls_key));
|
| + void** tls_data = static_cast<void**>(
|
| + PlatformThreadLocalStorage::GetTLSValue(
|
| + base::subtle::NoBarrier_Load(&g_native_tls_key)));
|
| if (!tls_data)
|
| tls_data = ConstructTlsVector();
|
| DCHECK_GT(slot_, 0);
|
| @@ -195,7 +237,9 @@ void* ThreadLocalStorage::StaticSlot::Get() const {
|
| }
|
|
|
| void ThreadLocalStorage::StaticSlot::Set(void* value) {
|
| - void** tls_data = static_cast<void**>(TlsGetValue(g_native_tls_key));
|
| + void** tls_data = static_cast<void**>(
|
| + PlatformThreadLocalStorage::GetTLSValue(
|
| + base::subtle::NoBarrier_Load(&g_native_tls_key)));
|
| if (!tls_data)
|
| tls_data = ConstructTlsVector();
|
| DCHECK_GT(slot_, 0);
|
| @@ -204,74 +248,3 @@ void ThreadLocalStorage::StaticSlot::Set(void* value) {
|
| }
|
|
|
| } // namespace base
|
| -
|
| -// Thread Termination Callbacks.
|
| -// Windows doesn't support a per-thread destructor with its
|
| -// TLS primitives. So, we build it manually by inserting a
|
| -// function to be called on each thread's exit.
|
| -// This magic is from http://www.codeproject.com/threads/tls.asp
|
| -// and it works for VC++ 7.0 and later.
|
| -
|
| -// Force a reference to _tls_used to make the linker create the TLS directory
|
| -// if it's not already there. (e.g. if __declspec(thread) is not used).
|
| -// Force a reference to p_thread_callback_base to prevent whole program
|
| -// optimization from discarding the variable.
|
| -#ifdef _WIN64
|
| -
|
| -#pragma comment(linker, "/INCLUDE:_tls_used")
|
| -#pragma comment(linker, "/INCLUDE:p_thread_callback_base")
|
| -
|
| -#else // _WIN64
|
| -
|
| -#pragma comment(linker, "/INCLUDE:__tls_used")
|
| -#pragma comment(linker, "/INCLUDE:_p_thread_callback_base")
|
| -
|
| -#endif // _WIN64
|
| -
|
| -// Static callback function to call with each thread termination.
|
| -void NTAPI OnThreadExit(PVOID module, DWORD reason, PVOID reserved) {
|
| - // On XP SP0 & SP1, the DLL_PROCESS_ATTACH is never seen. It is sent on SP2+
|
| - // and on W2K and W2K3. So don't assume it is sent.
|
| - if (DLL_THREAD_DETACH == reason || DLL_PROCESS_DETACH == reason)
|
| - WinThreadExit();
|
| -}
|
| -
|
| -// .CRT$XLA to .CRT$XLZ is an array of PIMAGE_TLS_CALLBACK pointers that are
|
| -// called automatically by the OS loader code (not the CRT) when the module is
|
| -// loaded and on thread creation. They are NOT called if the module has been
|
| -// loaded by a LoadLibrary() call. It must have implicitly been loaded at
|
| -// process startup.
|
| -// By implicitly loaded, I mean that it is directly referenced by the main EXE
|
| -// or by one of its dependent DLLs. Delay-loaded DLL doesn't count as being
|
| -// implicitly loaded.
|
| -//
|
| -// See VC\crt\src\tlssup.c for reference.
|
| -
|
| -// extern "C" suppresses C++ name mangling so we know the symbol name for the
|
| -// linker /INCLUDE:symbol pragma above.
|
| -extern "C" {
|
| -// The linker must not discard p_thread_callback_base. (We force a reference
|
| -// to this variable with a linker /INCLUDE:symbol pragma to ensure that.) If
|
| -// this variable is discarded, the OnThreadExit function will never be called.
|
| -#ifdef _WIN64
|
| -
|
| -// .CRT section is merged with .rdata on x64 so it must be constant data.
|
| -#pragma const_seg(".CRT$XLB")
|
| -// When defining a const variable, it must have external linkage to be sure the
|
| -// linker doesn't discard it.
|
| -extern const PIMAGE_TLS_CALLBACK p_thread_callback_base;
|
| -const PIMAGE_TLS_CALLBACK p_thread_callback_base = OnThreadExit;
|
| -
|
| -// Reset the default section.
|
| -#pragma const_seg()
|
| -
|
| -#else // _WIN64
|
| -
|
| -#pragma data_seg(".CRT$XLB")
|
| -PIMAGE_TLS_CALLBACK p_thread_callback_base = OnThreadExit;
|
| -
|
| -// Reset the default section.
|
| -#pragma data_seg()
|
| -
|
| -#endif // _WIN64
|
| -} // extern "C"
|
|
|