OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. | 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. |
3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) | 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions | 6 * modification, are permitted provided that the following conditions |
7 * are met: | 7 * are met: |
8 * | 8 * |
9 * 1. Redistributions of source code must retain the above copyright | 9 * 1. Redistributions of source code must retain the above copyright |
10 * notice, this list of conditions and the following disclaimer. | 10 * notice, this list of conditions and the following disclaimer. |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
49 | 49 |
50 namespace WTF { | 50 namespace WTF { |
51 | 51 |
52 #if COMPILER(MSVC) | 52 #if COMPILER(MSVC) |
53 | 53 |
54 // atomicAdd returns the result of the addition. | 54 // atomicAdd returns the result of the addition. |
55 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) | 55 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) |
56 { | 56 { |
57 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), stat
ic_cast<long>(increment)) + increment; | 57 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), stat
ic_cast<long>(increment)) + increment; |
58 } | 58 } |
59 ALWAYS_INLINE unsigned atomicAdd(unsigned volatile* addend, unsigned increment) | |
60 { | |
61 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), stat
ic_cast<long>(increment)) + increment; | |
62 } | |
63 #if defined(_WIN64) | |
64 ALWAYS_INLINE unsigned long atomicAdd(unsigned long volatile* addend, unsigned l
ong increment) | |
65 { | |
66 return InterlockedExchangeAdd64(reinterpret_cast<long long volatile*>(addend
), static_cast<long long>(increment)) + increment; | |
67 } | |
68 #endif | |
69 | 59 |
70 // atomicSubtract returns the result of the subtraction. | 60 // atomicSubtract returns the result of the subtraction. |
71 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement) | 61 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement) |
72 { | 62 { |
73 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), stat
ic_cast<long>(-decrement)) - decrement; | 63 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), stat
ic_cast<long>(-decrement)) - decrement; |
74 } | 64 } |
75 ALWAYS_INLINE unsigned atomicSubtract(unsigned volatile* addend, unsigned decrem
ent) | |
76 { | |
77 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), -sta
tic_cast<long>(decrement)) - decrement; | |
78 } | |
79 #if defined(_WIN64) | |
80 ALWAYS_INLINE unsigned long atomicSubtract(unsigned long volatile* addend, unsig
ned long decrement) | |
81 { | |
82 return InterlockedExchangeAdd64(reinterpret_cast<long long volatile*>(addend
), -static_cast<long long>(decrement)) - decrement; | |
83 } | |
84 #endif | |
85 | 65 |
86 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return InterlockedIncr
ement(reinterpret_cast<long volatile*>(addend)); } | 66 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return InterlockedIncr
ement(reinterpret_cast<long volatile*>(addend)); } |
87 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return InterlockedDecr
ement(reinterpret_cast<long volatile*>(addend)); } | 67 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return InterlockedDecr
ement(reinterpret_cast<long volatile*>(addend)); } |
88 | 68 |
89 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return Interlo
ckedIncrement64(reinterpret_cast<long long volatile*>(addend)); } | 69 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return Interlo
ckedIncrement64(reinterpret_cast<long long volatile*>(addend)); } |
90 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return Interlo
ckedDecrement64(reinterpret_cast<long long volatile*>(addend)); } | 70 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return Interlo
ckedDecrement64(reinterpret_cast<long long volatile*>(addend)); } |
91 | 71 |
92 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr) | 72 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr) |
93 { | 73 { |
94 int ret = InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 1); | 74 int ret = InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 1); |
95 ASSERT(!ret || ret == 1); | 75 ASSERT(!ret || ret == 1); |
96 return ret; | 76 return ret; |
97 } | 77 } |
98 | 78 |
99 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr) | 79 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr) |
100 { | 80 { |
101 ASSERT(*ptr == 1); | 81 ASSERT(*ptr == 1); |
102 InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 0); | 82 InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 0); |
103 } | 83 } |
104 | 84 |
105 #else | 85 #else |
106 | 86 |
107 // atomicAdd returns the result of the addition. | 87 // atomicAdd returns the result of the addition. |
108 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) { return __sync
_add_and_fetch(addend, increment); } | 88 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) { return __sync
_add_and_fetch(addend, increment); } |
109 ALWAYS_INLINE unsigned atomicAdd(unsigned volatile* addend, unsigned increment)
{ return __sync_add_and_fetch(addend, increment); } | |
110 ALWAYS_INLINE unsigned long atomicAdd(unsigned long volatile* addend, unsigned l
ong increment) { return __sync_add_and_fetch(addend, increment); } | |
111 // atomicSubtract returns the result of the subtraction. | 89 // atomicSubtract returns the result of the subtraction. |
112 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement) { return _
_sync_sub_and_fetch(addend, decrement); } | 90 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement) { return _
_sync_sub_and_fetch(addend, decrement); } |
113 ALWAYS_INLINE unsigned atomicSubtract(unsigned volatile* addend, unsigned decrem
ent) { return __sync_sub_and_fetch(addend, decrement); } | |
114 ALWAYS_INLINE unsigned long atomicSubtract(unsigned long volatile* addend, unsig
ned long decrement) { return __sync_sub_and_fetch(addend, decrement); } | |
115 | 91 |
116 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return atomicAdd(adden
d, 1); } | 92 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return atomicAdd(adden
d, 1); } |
117 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return atomicSubtract(
addend, 1); } | 93 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return atomicSubtract(
addend, 1); } |
118 | 94 |
119 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return __sync_
add_and_fetch(addend, 1); } | 95 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return __sync_
add_and_fetch(addend, 1); } |
120 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return __sync_
sub_and_fetch(addend, 1); } | 96 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return __sync_
sub_and_fetch(addend, 1); } |
121 | 97 |
122 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr) | 98 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr) |
123 { | 99 { |
124 int ret = __sync_lock_test_and_set(ptr, 1); | 100 int ret = __sync_lock_test_and_set(ptr, 1); |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
264 using WTF::acquireLoad; | 240 using WTF::acquireLoad; |
265 using WTF::releaseStore; | 241 using WTF::releaseStore; |
266 | 242 |
267 // These methods allow loading from and storing to poisoned memory. Only | 243 // These methods allow loading from and storing to poisoned memory. Only |
268 // use these methods if you know what you are doing since they will | 244 // use these methods if you know what you are doing since they will |
269 // silence use-after-poison errors from ASan. | 245 // silence use-after-poison errors from ASan. |
270 using WTF::asanUnsafeAcquireLoad; | 246 using WTF::asanUnsafeAcquireLoad; |
271 using WTF::asanUnsafeReleaseStore; | 247 using WTF::asanUnsafeReleaseStore; |
272 | 248 |
273 #endif // Atomics_h | 249 #endif // Atomics_h |
OLD | NEW |