Index: src/runtime/runtime-atomics.cc |
diff --git a/src/runtime/runtime-atomics.cc b/src/runtime/runtime-atomics.cc |
index c9b78769cdc5a8630b3e254de8719c2d7e2583c5..e8c4d9e9c2ecd5344af7997fadee96ddf77b09db 100644 |
--- a/src/runtime/runtime-atomics.cc |
+++ b/src/runtime/runtime-atomics.cc |
@@ -103,12 +103,15 @@ inline void StoreSeqCst(uint64_t* p, uint64_t value) { |
#elif V8_CC_MSVC |
-#define _InterlockedCompareExchange32 _InterlockedCompareExchange |
-#define _InterlockedExchange32 _InterlockedExchange |
-#define _InterlockedExchangeAdd32 _InterlockedExchangeAdd |
-#define _InterlockedAnd32 _InterlockedAnd |
-#define _InterlockedOr32 _InterlockedOr |
-#define _InterlockedXor32 _InterlockedXor |
+#define InterlockedCompareExchange32 _InterlockedCompareExchange |
+#define InterlockedExchange32 _InterlockedExchange |
+#define InterlockedExchangeAdd32 _InterlockedExchangeAdd |
+#define InterlockedAnd32 _InterlockedAnd |
+#define InterlockedOr32 _InterlockedOr |
+#define InterlockedXor32 _InterlockedXor |
+#define InterlockedExchangeAdd16 _InterlockedExchangeAdd16 |
+#define InterlockedCompareExchange8 _InterlockedCompareExchange8 |
+#define InterlockedExchangeAdd8 _InterlockedExchangeAdd8 |
#define INTEGER_TYPES(V) \ |
V(int8_t, 8, char) \ |
@@ -120,52 +123,54 @@ inline void StoreSeqCst(uint64_t* p, uint64_t value) { |
V(int64_t, 64, LONGLONG) \ |
V(uint64_t, 64, LONGLONG) |
-#define ATOMIC_OPS(type, suffix, vctype) \ |
- inline type CompareExchangeSeqCst(volatile type* p, type oldval, \ |
- type newval) { \ |
- return _InterlockedCompareExchange##suffix( \ |
- reinterpret_cast<volatile vctype*>(p), bit_cast<vctype>(newval), \ |
- bit_cast<vctype>(oldval)); \ |
- } \ |
- inline type LoadSeqCst(volatile type* p) { return *p; } \ |
- inline void StoreSeqCst(volatile type* p, type value) { \ |
- _InterlockedExchange##suffix(reinterpret_cast<volatile vctype*>(p), \ |
- bit_cast<vctype>(value)); \ |
- } \ |
- inline type AddSeqCst(volatile type* p, type value) { \ |
- return _InterlockedExchangeAdd##suffix( \ |
- reinterpret_cast<volatile vctype*>(p), bit_cast<vctype>(value)); \ |
- } \ |
- inline type SubSeqCst(volatile type* p, type value) { \ |
- return _InterlockedExchangeAdd##suffix( \ |
- reinterpret_cast<volatile vctype*>(p), -bit_cast<vctype>(value)); \ |
- } \ |
- inline type AndSeqCst(volatile type* p, type value) { \ |
- return _InterlockedAnd##suffix(reinterpret_cast<volatile vctype*>(p), \ |
- bit_cast<vctype>(value)); \ |
- } \ |
- inline type OrSeqCst(volatile type* p, type value) { \ |
- return _InterlockedOr##suffix(reinterpret_cast<volatile vctype*>(p), \ |
- bit_cast<vctype>(value)); \ |
- } \ |
- inline type XorSeqCst(volatile type* p, type value) { \ |
- return _InterlockedXor##suffix(reinterpret_cast<volatile vctype*>(p), \ |
- bit_cast<vctype>(value)); \ |
- } \ |
- inline type ExchangeSeqCst(volatile type* p, type value) { \ |
- return _InterlockedExchange##suffix(reinterpret_cast<volatile vctype*>(p), \ |
- bit_cast<vctype>(value)); \ |
+#define ATOMIC_OPS(type, suffix, vctype) \ |
+ inline type CompareExchangeSeqCst(type* p, type oldval, type newval) { \ |
+ return InterlockedCompareExchange##suffix(reinterpret_cast<vctype*>(p), \ |
+ bit_cast<vctype>(newval), \ |
+ bit_cast<vctype>(oldval)); \ |
+ } \ |
+ inline type LoadSeqCst(type* p) { return *p; } \ |
+ inline void StoreSeqCst(type* p, type value) { \ |
+ InterlockedExchange##suffix(reinterpret_cast<vctype*>(p), \ |
+ bit_cast<vctype>(value)); \ |
+ } \ |
+ inline type AddSeqCst(type* p, type value) { \ |
+ return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \ |
+ bit_cast<vctype>(value)); \ |
+ } \ |
+ inline type SubSeqCst(type* p, type value) { \ |
+ return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \ |
+ -bit_cast<vctype>(value)); \ |
+ } \ |
+ inline type AndSeqCst(type* p, type value) { \ |
+ return InterlockedAnd##suffix(reinterpret_cast<vctype*>(p), \ |
+ bit_cast<vctype>(value)); \ |
+ } \ |
+ inline type OrSeqCst(type* p, type value) { \ |
+ return InterlockedOr##suffix(reinterpret_cast<vctype*>(p), \ |
+ bit_cast<vctype>(value)); \ |
+ } \ |
+ inline type XorSeqCst(type* p, type value) { \ |
+ return InterlockedXor##suffix(reinterpret_cast<vctype*>(p), \ |
+ bit_cast<vctype>(value)); \ |
+ } \ |
+ inline type ExchangeSeqCst(type* p, type value) { \ |
+ return InterlockedExchange##suffix(reinterpret_cast<vctype*>(p), \ |
+ bit_cast<vctype>(value)); \ |
} |
INTEGER_TYPES(ATOMIC_OPS) |
#undef ATOMIC_OPS |
#undef INTEGER_TYPES |
-#undef _InterlockedCompareExchange32 |
-#undef _InterlockedExchange32 |
-#undef _InterlockedExchangeAdd32 |
-#undef _InterlockedAnd32 |
-#undef _InterlockedOr32 |
-#undef _InterlockedXor32 |
+#undef InterlockedCompareExchange32 |
+#undef InterlockedExchange32 |
+#undef InterlockedExchangeAdd32 |
+#undef InterlockedAnd32 |
+#undef InterlockedOr32 |
+#undef InterlockedXor32 |
+#undef InterlockedExchangeAdd16 |
+#undef InterlockedCompareExchange8 |
+#undef InterlockedExchangeAdd8 |
#else |