Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(47)

Side by Side Diff: Source/wtf/Atomics.h

Issue 1256053006: Protect AudioScheduledSoureNode::m_startTime and m_endTime. (Closed) Base URL: https://chromium.googlesource.com/chromium/blink.git@master
Patch Set: Add static_assert Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « Source/modules/webaudio/AudioScheduledSourceNode.cpp ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved.
3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com)
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions 6 * modification, are permitted provided that the following conditions
7 * are met: 7 * are met:
8 * 8 *
9 * 1. Redistributions of source code must retain the above copyright 9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer. 10 * notice, this list of conditions and the following disclaimer.
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
155 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), stat ic_cast<__tsan_atomic64>(value), __tsan_memory_order_release); 155 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), stat ic_cast<__tsan_atomic64>(value), __tsan_memory_order_release);
156 } 156 }
157 ALWAYS_INLINE void releaseStore(volatile unsigned long long* ptr, unsigned long long value) 157 ALWAYS_INLINE void releaseStore(volatile unsigned long long* ptr, unsigned long long value)
158 { 158 {
159 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), stat ic_cast<__tsan_atomic64>(value), __tsan_memory_order_release); 159 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), stat ic_cast<__tsan_atomic64>(value), __tsan_memory_order_release);
160 } 160 }
161 ALWAYS_INLINE void releaseStore(void* volatile* ptr, void* value) 161 ALWAYS_INLINE void releaseStore(void* volatile* ptr, void* value)
162 { 162 {
163 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), rein terpret_cast<__tsan_atomic64>(value), __tsan_memory_order_release); 163 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), rein terpret_cast<__tsan_atomic64>(value), __tsan_memory_order_release);
164 } 164 }
165 ALWAYS_INLINE void releaseStore(volatile float* ptr, float value)
166 {
167 static_assert(sizeof(int) == sizeof(float), "int and float are different siz es");
168 union {
169 int ivalue;
170 float fvalue;
171 } u;
172 u.fvalue = value;
173 __tsan_atomic32_store(reinterpret_cast<volatile __tsan_atomic32*>(ptr), u.iv alue, __tsan_memory_order_release);
174 }
175 ALWAYS_INLINE void releaseStore(volatile double* ptr, double value)
176 {
177 static_assert(sizeof(long) == sizeof(double), "long and double are different sizes");
178 union {
179 long ivalue;
180 double dvalue;
181 } u;
182 u.dvalue = value;
183 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), u.iv alue, __tsan_memory_order_release);
184 }
165 185
166 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) 186 ALWAYS_INLINE int acquireLoad(volatile const int* ptr)
167 { 187 {
168 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); 188 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire);
169 } 189 }
170 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) 190 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr)
171 { 191 {
172 return static_cast<unsigned>(__tsan_atomic32_load(reinterpret_cast<volatile const int*>(ptr), __tsan_memory_order_acquire)); 192 return static_cast<unsigned>(__tsan_atomic32_load(reinterpret_cast<volatile const int*>(ptr), __tsan_memory_order_acquire));
173 } 193 }
174 ALWAYS_INLINE long acquireLoad(volatile const long* ptr) 194 ALWAYS_INLINE long acquireLoad(volatile const long* ptr)
175 { 195 {
176 return static_cast<long>(__tsan_atomic64_load(reinterpret_cast<volatile cons t __tsan_atomic64*>(ptr), __tsan_memory_order_acquire)); 196 return static_cast<long>(__tsan_atomic64_load(reinterpret_cast<volatile cons t __tsan_atomic64*>(ptr), __tsan_memory_order_acquire));
177 } 197 }
178 ALWAYS_INLINE unsigned long acquireLoad(volatile const unsigned long* ptr) 198 ALWAYS_INLINE unsigned long acquireLoad(volatile const unsigned long* ptr)
179 { 199 {
180 return static_cast<unsigned long>(__tsan_atomic64_load(reinterpret_cast<vola tile const __tsan_atomic64*>(ptr), __tsan_memory_order_acquire)); 200 return static_cast<unsigned long>(__tsan_atomic64_load(reinterpret_cast<vola tile const __tsan_atomic64*>(ptr), __tsan_memory_order_acquire));
181 } 201 }
182 ALWAYS_INLINE void* acquireLoad(void* volatile const* ptr) 202 ALWAYS_INLINE void* acquireLoad(void* volatile const* ptr)
183 { 203 {
184 return reinterpret_cast<void*>(__tsan_atomic64_load(reinterpret_cast<volatil e const __tsan_atomic64*>(ptr), __tsan_memory_order_acquire)); 204 return reinterpret_cast<void*>(__tsan_atomic64_load(reinterpret_cast<volatil e const __tsan_atomic64*>(ptr), __tsan_memory_order_acquire));
185 } 205 }
206 ALWAYS_INLINE float acquireLoad(volatile const float* ptr)
207 {
208 static_assert(sizeof(int) == sizeof(float), "int and float are different siz es");
209 union {
210 int ivalue;
211 float fvalue;
212 } u;
213 u.ivalue = __tsan_atomic32_load(reinterpret_cast<volatile const int*>(ptr), __tsan_memory_order_acquire);
214 return u.fvalue;
215 }
216 ALWAYS_INLINE double acquireLoad(volatile const double* ptr)
217 {
218 static_assert(sizeof(long) == sizeof(double), "long and double are different sizes");
219 union {
220 long ivalue;
221 double dvalue;
222 } u;
223 u.ivalue = static_cast<long>(__tsan_atomic64_load(reinterpret_cast<volatile const __tsan_atomic64*>(ptr), __tsan_memory_order_acquire));
224 return u.dvalue;
225 }
186 #endif 226 #endif
187 227
188 #else // defined(THREAD_SANITIZER) 228 #else // defined(THREAD_SANITIZER)
189 229
190 #if CPU(X86) || CPU(X86_64) 230 #if CPU(X86) || CPU(X86_64)
191 // Only compiler barrier is needed. 231 // Only compiler barrier is needed.
192 #if COMPILER(MSVC) 232 #if COMPILER(MSVC)
193 // Starting from Visual Studio 2005 compiler guarantees acquire and release 233 // Starting from Visual Studio 2005 compiler guarantees acquire and release
194 // semantics for operations on volatile variables. See MSDN entry for 234 // semantics for operations on volatile variables. See MSDN entry for
195 // MemoryBarrier macro. 235 // MemoryBarrier macro.
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
236 ALWAYS_INLINE void releaseStore(volatile unsigned long long* ptr, unsigned long long value) 276 ALWAYS_INLINE void releaseStore(volatile unsigned long long* ptr, unsigned long long value)
237 { 277 {
238 MEMORY_BARRIER(); 278 MEMORY_BARRIER();
239 *ptr = value; 279 *ptr = value;
240 } 280 }
241 ALWAYS_INLINE void releaseStore(void* volatile* ptr, void* value) 281 ALWAYS_INLINE void releaseStore(void* volatile* ptr, void* value)
242 { 282 {
243 MEMORY_BARRIER(); 283 MEMORY_BARRIER();
244 *ptr = value; 284 *ptr = value;
245 } 285 }
286 ALWAYS_INLINE void releaseStore(volatile float* ptr, float value)
287 {
288 MEMORY_BARRIER();
289 *ptr = value;
290 }
291 ALWAYS_INLINE void releaseStore(volatile double* ptr, double value)
292 {
293 MEMORY_BARRIER();
294 *ptr = value;
295 }
246 296
297
247 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) 298 ALWAYS_INLINE int acquireLoad(volatile const int* ptr)
248 { 299 {
249 int value = *ptr; 300 int value = *ptr;
250 MEMORY_BARRIER(); 301 MEMORY_BARRIER();
251 return value; 302 return value;
252 } 303 }
253 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) 304 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr)
254 { 305 {
255 unsigned value = *ptr; 306 unsigned value = *ptr;
256 MEMORY_BARRIER(); 307 MEMORY_BARRIER();
257 return value; 308 return value;
258 } 309 }
259 ALWAYS_INLINE long acquireLoad(volatile const long* ptr) 310 ALWAYS_INLINE long acquireLoad(volatile const long* ptr)
260 { 311 {
261 long value = *ptr; 312 long value = *ptr;
262 MEMORY_BARRIER(); 313 MEMORY_BARRIER();
263 return value; 314 return value;
264 } 315 }
265 ALWAYS_INLINE unsigned long acquireLoad(volatile const unsigned long* ptr) 316 ALWAYS_INLINE unsigned long acquireLoad(volatile const unsigned long* ptr)
266 { 317 {
267 unsigned long value = *ptr; 318 unsigned long value = *ptr;
268 MEMORY_BARRIER(); 319 MEMORY_BARRIER();
269 return value; 320 return value;
270 } 321 }
271 ALWAYS_INLINE unsigned long long acquireLoad(volatile const unsigned long long* ptr) 322 ALWAYS_INLINE unsigned long long acquireLoad(volatile const unsigned long long* ptr)
272 { 323 {
273 unsigned long long value = *ptr; 324 unsigned long long value = *ptr;
Alexander Potapenko 2015/08/10 14:01:30 Just noticed that this function (and the correspon
Raymond Toy 2015/08/10 16:36:42 I think a bug should be filed for that and fixed i
tkent 2015/08/11 00:41:28 I filed crbug.com/519096 .
274 MEMORY_BARRIER(); 325 MEMORY_BARRIER();
275 return value; 326 return value;
276 } 327 }
277 ALWAYS_INLINE void* acquireLoad(void* volatile const* ptr) 328 ALWAYS_INLINE void* acquireLoad(void* volatile const* ptr)
278 { 329 {
279 void* value = *ptr; 330 void* value = *ptr;
280 MEMORY_BARRIER(); 331 MEMORY_BARRIER();
281 return value; 332 return value;
282 } 333 }
334 ALWAYS_INLINE float acquireLoad(volatile const float* ptr)
335 {
336 float value = *ptr;
337 MEMORY_BARRIER();
338 return value;
339 }
340 ALWAYS_INLINE double acquireLoad(volatile const double* ptr)
Alexander Potapenko 2015/08/10 14:01:30 I'm no expert in ARM CPUs, do you have an idea whe
341 {
342 double value = *ptr;
343 MEMORY_BARRIER();
344 return value;
345 }
283 346
284 #if defined(ADDRESS_SANITIZER) 347 #if defined(ADDRESS_SANITIZER)
285 348
286 NO_SANITIZE_ADDRESS ALWAYS_INLINE void asanUnsafeReleaseStore(volatile unsigned* ptr, unsigned value) 349 NO_SANITIZE_ADDRESS ALWAYS_INLINE void asanUnsafeReleaseStore(volatile unsigned* ptr, unsigned value)
287 { 350 {
288 MEMORY_BARRIER(); 351 MEMORY_BARRIER();
289 *ptr = value; 352 *ptr = value;
290 } 353 }
291 354
292 NO_SANITIZE_ADDRESS ALWAYS_INLINE unsigned asanUnsafeAcquireLoad(volatile const unsigned* ptr) 355 NO_SANITIZE_ADDRESS ALWAYS_INLINE unsigned asanUnsafeAcquireLoad(volatile const unsigned* ptr)
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
327 using WTF::acquireLoad; 390 using WTF::acquireLoad;
328 using WTF::releaseStore; 391 using WTF::releaseStore;
329 392
330 // These methods allow loading from and storing to poisoned memory. Only 393 // These methods allow loading from and storing to poisoned memory. Only
331 // use these methods if you know what you are doing since they will 394 // use these methods if you know what you are doing since they will
332 // silence use-after-poison errors from ASan. 395 // silence use-after-poison errors from ASan.
333 using WTF::asanUnsafeAcquireLoad; 396 using WTF::asanUnsafeAcquireLoad;
334 using WTF::asanUnsafeReleaseStore; 397 using WTF::asanUnsafeReleaseStore;
335 398
336 #endif // Atomics_h 399 #endif // Atomics_h
OLDNEW
« no previous file with comments | « Source/modules/webaudio/AudioScheduledSourceNode.cpp ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698