| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 161 // See comments in Atomic64 version of Release_Store(), below. | 161 // See comments in Atomic64 version of Release_Store(), below. |
| 162 ATOMICOPS_COMPILER_BARRIER(); | 162 ATOMICOPS_COMPILER_BARRIER(); |
| 163 return value; | 163 return value; |
| 164 } | 164 } |
| 165 | 165 |
| 166 inline Atomic32 Release_Load(volatile const Atomic32* ptr) { | 166 inline Atomic32 Release_Load(volatile const Atomic32* ptr) { |
| 167 MemoryBarrier(); | 167 MemoryBarrier(); |
| 168 return *ptr; | 168 return *ptr; |
| 169 } | 169 } |
| 170 | 170 |
| 171 #if defined(__x86_64__) | 171 #if defined(__x86_64__) && defined(V8_HOST_ARCH_64_BIT) |
| 172 | 172 |
| 173 // 64-bit low-level operations on 64-bit platform. | 173 // 64-bit low-level operations on 64-bit platform. |
| 174 | 174 |
| 175 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr, | 175 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr, |
| 176 Atomic64 old_value, | 176 Atomic64 old_value, |
| 177 Atomic64 new_value) { | 177 Atomic64 new_value) { |
| 178 Atomic64 prev; | 178 Atomic64 prev; |
| 179 __asm__ __volatile__("lock; cmpxchgq %1,%2" | 179 __asm__ __volatile__("lock; cmpxchgq %1,%2" |
| 180 : "=a" (prev) | 180 : "=a" (prev) |
| 181 : "q" (new_value), "m" (*ptr), "0" (old_value) | 181 : "q" (new_value), "m" (*ptr), "0" (old_value) |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 278 return NoBarrier_CompareAndSwap(ptr, old_value, new_value); | 278 return NoBarrier_CompareAndSwap(ptr, old_value, new_value); |
| 279 } | 279 } |
| 280 | 280 |
| 281 #endif // defined(__x86_64__) | 281 #endif // defined(__x86_64__) |
| 282 | 282 |
| 283 } } // namespace v8::internal | 283 } } // namespace v8::internal |
| 284 | 284 |
| 285 #undef ATOMICOPS_COMPILER_BARRIER | 285 #undef ATOMICOPS_COMPILER_BARRIER |
| 286 | 286 |
| 287 #endif // V8_ATOMICOPS_INTERNALS_X86_GCC_H_ | 287 #endif // V8_ATOMICOPS_INTERNALS_X86_GCC_H_ |
| OLD | NEW |