Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(714)

Side by Side Diff: base/threading/thread_local_storage.cc

Issue 1039143004: Add base::ThreadLocalStorage (Closed) Base URL: https://chromium.googlesource.com/chromium/mini_chromium@master
Patch Set: Created 5 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « base/threading/thread_local_storage.h ('k') | base/threading/thread_local_storage_posix.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/threading/thread_local_storage.h"
6
7 #include "base/atomicops.h"
8 #include "base/logging.h"
9
10 using base::internal::PlatformThreadLocalStorage;
11
12 namespace {
13
14 // In order to make TLS destructors work, we need to keep around a function
15 // pointer to the destructor for each slot. We keep this array of pointers in a
16 // global (static) array.
17 // We use the single OS-level TLS slot (giving us one pointer per thread) to
18 // hold a pointer to a per-thread array (table) of slots that we allocate to
19 // Chromium consumers.
20
21 // g_native_tls_key is the one native TLS that we use. It stores our table.
22 base::subtle::Atomic32 g_native_tls_key =
23 PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES;
24
25 // g_last_used_tls_key is the high-water-mark of allocated thread local storage.
26 // Each allocation is an index into our g_tls_destructors[]. Each such index is
27 // assigned to the instance variable slot_ in a ThreadLocalStorage::Slot
28 // instance. We reserve the value slot_ == 0 to indicate that the corresponding
29 // instance of ThreadLocalStorage::Slot has been freed (i.e., destructor called,
30 // etc.). This reserved use of 0 is then stated as the initial value of
31 // g_last_used_tls_key, so that the first issued index will be 1.
32 base::subtle::Atomic32 g_last_used_tls_key = 0;
33
34 // The maximum number of 'slots' in our thread local storage stack.
35 const int kThreadLocalStorageSize = 256;
36
37 // The maximum number of times to try to clear slots by calling destructors.
38 // Use pthread naming convention for clarity.
39 const int kMaxDestructorIterations = kThreadLocalStorageSize;
40
41 // An array of destructor function pointers for the slots. If a slot has a
42 // destructor, it will be stored in its corresponding entry in this array.
43 // The elements are volatile to ensure that when the compiler reads the value
44 // to potentially call the destructor, it does so once, and that value is tested
45 // for null-ness and then used. Yes, that would be a weird de-optimization,
46 // but I can imagine some register machines where it was just as easy to
47 // re-fetch an array element, and I want to be sure a call to free the key
48 // (i.e., null out the destructor entry) that happens on a separate thread can't
49 // hurt the racy calls to the destructors on another thread.
50 volatile base::ThreadLocalStorage::TLSDestructorFunc
51 g_tls_destructors[kThreadLocalStorageSize];
52
53 // This function is called to initialize our entire Chromium TLS system.
54 // It may be called very early, and we need to complete most all of the setup
55 // (initialization) before calling *any* memory allocator functions, which may
56 // recursively depend on this initialization.
57 // As a result, we use Atomics, and avoid anything (like a singleton) that might
58 // require memory allocations.
59 void** ConstructTlsVector() {
60 PlatformThreadLocalStorage::TLSKey key =
61 base::subtle::NoBarrier_Load(&g_native_tls_key);
62 if (key == PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES) {
63 CHECK(PlatformThreadLocalStorage::AllocTLS(&key));
64
65 // The TLS_KEY_OUT_OF_INDEXES is used to find out whether the key is set or
66 // not in NoBarrier_CompareAndSwap, but Posix doesn't have invalid key, we
67 // define an almost impossible value be it.
68 // If we really get TLS_KEY_OUT_OF_INDEXES as value of key, just alloc
69 // another TLS slot.
70 if (key == PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES) {
71 PlatformThreadLocalStorage::TLSKey tmp = key;
72 CHECK(PlatformThreadLocalStorage::AllocTLS(&key) &&
73 key != PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES);
74 PlatformThreadLocalStorage::FreeTLS(tmp);
75 }
76 // Atomically test-and-set the tls_key. If the key is
77 // TLS_KEY_OUT_OF_INDEXES, go ahead and set it. Otherwise, do nothing, as
78 // another thread already did our dirty work.
79 if (PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES !=
80 base::subtle::NoBarrier_CompareAndSwap(&g_native_tls_key,
81 PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES, key)) {
82 // We've been shortcut. Another thread replaced g_native_tls_key first so
83 // we need to destroy our index and use the one the other thread got
84 // first.
85 PlatformThreadLocalStorage::FreeTLS(key);
86 key = base::subtle::NoBarrier_Load(&g_native_tls_key);
87 }
88 }
89 CHECK(!PlatformThreadLocalStorage::GetTLSValue(key));
90
91 // Some allocators, such as TCMalloc, make use of thread local storage.
92 // As a result, any attempt to call new (or malloc) will lazily cause such a
93 // system to initialize, which will include registering for a TLS key. If we
94 // are not careful here, then that request to create a key will call new back,
95 // and we'll have an infinite loop. We avoid that as follows:
96 // Use a stack allocated vector, so that we don't have dependence on our
97 // allocator until our service is in place. (i.e., don't even call new until
98 // after we're setup)
99 void* stack_allocated_tls_data[kThreadLocalStorageSize];
100 memset(stack_allocated_tls_data, 0, sizeof(stack_allocated_tls_data));
101 // Ensure that any rentrant calls change the temp version.
102 PlatformThreadLocalStorage::SetTLSValue(key, stack_allocated_tls_data);
103
104 // Allocate an array to store our data.
105 void** tls_data = new void*[kThreadLocalStorageSize];
106 memcpy(tls_data, stack_allocated_tls_data, sizeof(stack_allocated_tls_data));
107 PlatformThreadLocalStorage::SetTLSValue(key, tls_data);
108 return tls_data;
109 }
110
111 void OnThreadExitInternal(void* value) {
112 DCHECK(value);
113 void** tls_data = static_cast<void**>(value);
114 // Some allocators, such as TCMalloc, use TLS. As a result, when a thread
115 // terminates, one of the destructor calls we make may be to shut down an
116 // allocator. We have to be careful that after we've shutdown all of the
117 // known destructors (perchance including an allocator), that we don't call
118 // the allocator and cause it to resurrect itself (with no possibly destructor
119 // call to follow). We handle this problem as follows:
120 // Switch to using a stack allocated vector, so that we don't have dependence
121 // on our allocator after we have called all g_tls_destructors. (i.e., don't
122 // even call delete[] after we're done with destructors.)
123 void* stack_allocated_tls_data[kThreadLocalStorageSize];
124 memcpy(stack_allocated_tls_data, tls_data, sizeof(stack_allocated_tls_data));
125 // Ensure that any re-entrant calls change the temp version.
126 PlatformThreadLocalStorage::TLSKey key =
127 base::subtle::NoBarrier_Load(&g_native_tls_key);
128 PlatformThreadLocalStorage::SetTLSValue(key, stack_allocated_tls_data);
129 delete[] tls_data; // Our last dependence on an allocator.
130
131 int remaining_attempts = kMaxDestructorIterations;
132 bool need_to_scan_destructors = true;
133 while (need_to_scan_destructors) {
134 need_to_scan_destructors = false;
135 // Try to destroy the first-created-slot (which is slot 1) in our last
136 // destructor call. That user was able to function, and define a slot with
137 // no other services running, so perhaps it is a basic service (like an
138 // allocator) and should also be destroyed last. If we get the order wrong,
139 // then we'll itterate several more times, so it is really not that
140 // critical (but it might help).
141 base::subtle::Atomic32 last_used_tls_key =
142 base::subtle::NoBarrier_Load(&g_last_used_tls_key);
143 for (int slot = last_used_tls_key; slot > 0; --slot) {
144 void* tls_value = stack_allocated_tls_data[slot];
145 if (tls_value == NULL)
146 continue;
147
148 base::ThreadLocalStorage::TLSDestructorFunc destructor =
149 g_tls_destructors[slot];
150 if (destructor == NULL)
151 continue;
152 stack_allocated_tls_data[slot] = NULL; // pre-clear the slot.
153 destructor(tls_value);
154 // Any destructor might have called a different service, which then set
155 // a different slot to a non-NULL value. Hence we need to check
156 // the whole vector again. This is a pthread standard.
157 need_to_scan_destructors = true;
158 }
159 if (--remaining_attempts <= 0) {
160 NOTREACHED(); // Destructors might not have been called.
161 break;
162 }
163 }
164
165 // Remove our stack allocated vector.
166 PlatformThreadLocalStorage::SetTLSValue(key, NULL);
167 }
168
169 } // namespace
170
171 namespace base {
172
173 namespace internal {
174
175 #if defined(OS_WIN)
176 void PlatformThreadLocalStorage::OnThreadExit() {
177 PlatformThreadLocalStorage::TLSKey key =
178 base::subtle::NoBarrier_Load(&g_native_tls_key);
179 if (key == PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES)
180 return;
181 void *tls_data = GetTLSValue(key);
182 // Maybe we have never initialized TLS for this thread.
183 if (!tls_data)
184 return;
185 OnThreadExitInternal(tls_data);
186 }
187 #elif defined(OS_POSIX)
188 void PlatformThreadLocalStorage::OnThreadExit(void* value) {
189 OnThreadExitInternal(value);
190 }
191 #endif // defined(OS_WIN)
192
193 } // namespace internal
194
195 ThreadLocalStorage::Slot::Slot(TLSDestructorFunc destructor) {
196 initialized_ = false;
197 slot_ = 0;
198 Initialize(destructor);
199 }
200
201 bool ThreadLocalStorage::StaticSlot::Initialize(TLSDestructorFunc destructor) {
202 PlatformThreadLocalStorage::TLSKey key =
203 base::subtle::NoBarrier_Load(&g_native_tls_key);
204 if (key == PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES ||
205 !PlatformThreadLocalStorage::GetTLSValue(key))
206 ConstructTlsVector();
207
208 // Grab a new slot.
209 slot_ = base::subtle::NoBarrier_AtomicIncrement(&g_last_used_tls_key, 1);
210 DCHECK_GT(slot_, 0);
211 CHECK_LT(slot_, kThreadLocalStorageSize);
212
213 // Setup our destructor.
214 g_tls_destructors[slot_] = destructor;
215 initialized_ = true;
216 return true;
217 }
218
219 void ThreadLocalStorage::StaticSlot::Free() {
220 // At this time, we don't reclaim old indices for TLS slots.
221 // So all we need to do is wipe the destructor.
222 DCHECK_GT(slot_, 0);
223 DCHECK_LT(slot_, kThreadLocalStorageSize);
224 g_tls_destructors[slot_] = NULL;
225 slot_ = 0;
226 initialized_ = false;
227 }
228
229 void* ThreadLocalStorage::StaticSlot::Get() const {
230 void** tls_data = static_cast<void**>(
231 PlatformThreadLocalStorage::GetTLSValue(
232 base::subtle::NoBarrier_Load(&g_native_tls_key)));
233 if (!tls_data)
234 tls_data = ConstructTlsVector();
235 DCHECK_GT(slot_, 0);
236 DCHECK_LT(slot_, kThreadLocalStorageSize);
237 return tls_data[slot_];
238 }
239
240 void ThreadLocalStorage::StaticSlot::Set(void* value) {
241 void** tls_data = static_cast<void**>(
242 PlatformThreadLocalStorage::GetTLSValue(
243 base::subtle::NoBarrier_Load(&g_native_tls_key)));
244 if (!tls_data)
245 tls_data = ConstructTlsVector();
246 DCHECK_GT(slot_, 0);
247 DCHECK_LT(slot_, kThreadLocalStorageSize);
248 tls_data[slot_] = value;
249 }
250
251 } // namespace base
OLDNEW
« no previous file with comments | « base/threading/thread_local_storage.h ('k') | base/threading/thread_local_storage_posix.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698