| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. | 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. |
| 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) | 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions | 6 * modification, are permitted provided that the following conditions |
| 7 * are met: | 7 * are met: |
| 8 * | 8 * |
| 9 * 1. Redistributions of source code must retain the above copyright | 9 * 1. Redistributions of source code must retain the above copyright |
| 10 * notice, this list of conditions and the following disclaimer. | 10 * notice, this list of conditions and the following disclaimer. |
| (...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 212 #if CPU(X86) || CPU(X86_64) | 212 #if CPU(X86) || CPU(X86_64) |
| 213 // Only compiler barrier is needed. | 213 // Only compiler barrier is needed. |
| 214 #if COMPILER(MSVC) | 214 #if COMPILER(MSVC) |
| 215 // Starting from Visual Studio 2005 compiler guarantees acquire and release | 215 // Starting from Visual Studio 2005 compiler guarantees acquire and release |
| 216 // semantics for operations on volatile variables. See MSDN entry for | 216 // semantics for operations on volatile variables. See MSDN entry for |
| 217 // MemoryBarrier macro. | 217 // MemoryBarrier macro. |
| 218 #define MEMORY_BARRIER() | 218 #define MEMORY_BARRIER() |
| 219 #else | 219 #else |
| 220 #define MEMORY_BARRIER() __asm__ __volatile__("" : : : "memory") | 220 #define MEMORY_BARRIER() __asm__ __volatile__("" : : : "memory") |
| 221 #endif | 221 #endif |
| 222 #elif CPU(ARM) && (OS(LINUX) || OS(ANDROID)) | 222 #elif CPU(ARM) && OS(ANDROID) |
| 223 // On ARM __sync_synchronize generates dmb which is very expensive on single | 223 // On ARM __sync_synchronize generates dmb which is very expensive on single |
| 224 // core devices which don't actually need it. Avoid the cost by calling into | 224 // core devices which don't actually need it. Avoid the cost by calling into |
| 225 // kuser_memory_barrier helper. | 225 // kuser_memory_barrier helper. |
| 226 inline void memoryBarrier() | 226 inline void memoryBarrier() |
| 227 { | 227 { |
| 228 // Note: This is a function call, which is also an implicit compiler barrier
. | 228 // Note: This is a function call, which is also an implicit compiler barrier
. |
| 229 typedef void (*KernelMemoryBarrierFunc)(); | 229 typedef void (*KernelMemoryBarrierFunc)(); |
| 230 ((KernelMemoryBarrierFunc)0xffff0fa0)(); | 230 ((KernelMemoryBarrierFunc)0xffff0fa0)(); |
| 231 } | 231 } |
| 232 #define MEMORY_BARRIER() memoryBarrier() | 232 #define MEMORY_BARRIER() memoryBarrier() |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 367 using WTF::noBarrierLoad; | 367 using WTF::noBarrierLoad; |
| 368 using WTF::noBarrierStore; | 368 using WTF::noBarrierStore; |
| 369 | 369 |
| 370 // These methods allow loading from and storing to poisoned memory. Only | 370 // These methods allow loading from and storing to poisoned memory. Only |
| 371 // use these methods if you know what you are doing since they will | 371 // use these methods if you know what you are doing since they will |
| 372 // silence use-after-poison errors from ASan. | 372 // silence use-after-poison errors from ASan. |
| 373 using WTF::asanUnsafeAcquireLoad; | 373 using WTF::asanUnsafeAcquireLoad; |
| 374 using WTF::asanUnsafeReleaseStore; | 374 using WTF::asanUnsafeReleaseStore; |
| 375 | 375 |
| 376 #endif // Atomics_h | 376 #endif // Atomics_h |
| OLD | NEW |