Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(233)

Side by Side Diff: Source/wtf/Atomics.h

Issue 723513002: Oilpan: Refactor the way we calculate heap statistics (Closed) Base URL: svn://svn.chromium.org/blink/trunk
Patch Set: Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved.
3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com)
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions 6 * modification, are permitted provided that the following conditions
7 * are met: 7 * are met:
8 * 8 *
9 * 1. Redistributions of source code must retain the above copyright 9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer. 10 * notice, this list of conditions and the following disclaimer.
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
47 #include <sanitizer/asan_interface.h> 47 #include <sanitizer/asan_interface.h>
48 #endif 48 #endif
49 49
50 namespace WTF { 50 namespace WTF {
51 51
52 #if COMPILER(MSVC) 52 #if COMPILER(MSVC)
53 53
54 // atomicAdd returns the result of the addition. 54 // atomicAdd returns the result of the addition.
55 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) 55 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment)
56 { 56 {
57 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), stat ic_cast<long>(increment)) + increment; 57 return InterlockedExchangeAdd(reinterpret_cast<LONG volatile*>(addend), stat ic_cast<LONG>(increment)) + increment;
tkent 2014/11/17 10:30:15 Replacing |long| with |LONG| is inconsistent with
haraken 2014/11/17 10:43:36 Done.
58 } 58 }
59 ALWAYS_INLINE unsigned atomicAdd(unsigned volatile* addend, unsigned increment)
60 {
61 return InterlockedExchangeAdd(reinterpret_cast<ULONG volatile*>(addend), sta tic_cast<ULONG>(increment)) + increment;
tkent 2014/11/17 10:30:15 Why do you cast to ULONG? The arguments of Interl
haraken 2014/11/17 10:43:36 Fixed.
62 }
63 #if defined(_WIN64)
64 ALWAYS_INLINE unsigned long atomicAdd(unsigned long volatile* addend, unsigned l ong increment)
65 {
66 return InterlockedExchangeAdd64(reinterpret_cast<ULONGLONG volatile*>(addend ), static_cast<ULONGLONG>(increment)) + increment;
tkent 2014/11/17 10:30:15 Ditto. |LONGLONG volatile*| and |LONGLONG|.
haraken 2014/11/17 10:43:36 Done.
67 }
68 #endif
59 69
60 // atomicSubtract returns the result of the subtraction. 70 // atomicSubtract returns the result of the subtraction.
61 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement) 71 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement)
62 { 72 {
63 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), stat ic_cast<long>(-decrement)) - decrement; 73 return InterlockedExchangeAdd(reinterpret_cast<LONG volatile*>(addend), stat ic_cast<LONG>(-decrement)) - decrement;
64 } 74 }
75 ALWAYS_INLINE unsigned atomicSubtract(unsigned volatile* addend, unsigned decrem ent)
76 {
77 return InterlockedExchangeAdd(reinterpret_cast<ULONG volatile*>(addend), -st atic_cast<LONG>(decrement)) - decrement;
78 }
79 #if defined(_WIN64)
80 ALWAYS_INLINE unsigned long atomicSubtract(unsigned long volatile* addend, unsig ned long decrement)
81 {
82 return InterlockedExchangeAdd64(reinterpret_cast<ULONGLONG volatile*>(addend ), -static_cast<LONGLONG>(decrement)) - decrement;
83 }
84 #endif
65 85
66 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return InterlockedIncr ement(reinterpret_cast<long volatile*>(addend)); } 86 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return InterlockedIncr ement(reinterpret_cast<long volatile*>(addend)); }
67 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return InterlockedDecr ement(reinterpret_cast<long volatile*>(addend)); } 87 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return InterlockedDecr ement(reinterpret_cast<long volatile*>(addend)); }
68 88
69 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return Interlo ckedIncrement64(reinterpret_cast<long long volatile*>(addend)); } 89 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return Interlo ckedIncrement64(reinterpret_cast<long long volatile*>(addend)); }
70 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return Interlo ckedDecrement64(reinterpret_cast<long long volatile*>(addend)); } 90 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return Interlo ckedDecrement64(reinterpret_cast<long long volatile*>(addend)); }
71 91
72 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr) 92 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr)
73 { 93 {
74 int ret = InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 1); 94 int ret = InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 1);
75 ASSERT(!ret || ret == 1); 95 ASSERT(!ret || ret == 1);
76 return ret; 96 return ret;
77 } 97 }
78 98
79 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr) 99 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr)
80 { 100 {
81 ASSERT(*ptr == 1); 101 ASSERT(*ptr == 1);
82 InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 0); 102 InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 0);
83 } 103 }
84 104
85 #else 105 #else
86 106
87 // atomicAdd returns the result of the addition. 107 // atomicAdd returns the result of the addition.
88 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) { return __sync _add_and_fetch(addend, increment); } 108 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) { return __sync _add_and_fetch(addend, increment); }
109 ALWAYS_INLINE unsigned atomicAdd(unsigned volatile* addend, unsigned increment) { return __sync_add_and_fetch(addend, increment); }
110 ALWAYS_INLINE unsigned long atomicAdd(unsigned long volatile* addend, unsigned l ong increment) { return __sync_add_and_fetch(addend, increment); }
89 // atomicSubtract returns the result of the subtraction. 111 // atomicSubtract returns the result of the subtraction.
90 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement) { return _ _sync_sub_and_fetch(addend, decrement); } 112 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement) { return _ _sync_sub_and_fetch(addend, decrement); }
113 ALWAYS_INLINE unsigned atomicSubtract(unsigned volatile* addend, unsigned decrem ent) { return __sync_sub_and_fetch(addend, decrement); }
114 ALWAYS_INLINE unsigned long atomicSubtract(unsigned long volatile* addend, unsig ned long decrement) { return __sync_sub_and_fetch(addend, decrement); }
91 115
92 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return atomicAdd(adden d, 1); } 116 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return atomicAdd(adden d, 1); }
93 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return atomicSubtract( addend, 1); } 117 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return atomicSubtract( addend, 1); }
94 118
95 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return __sync_ add_and_fetch(addend, 1); } 119 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return __sync_ add_and_fetch(addend, 1); }
96 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return __sync_ sub_and_fetch(addend, 1); } 120 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return __sync_ sub_and_fetch(addend, 1); }
97 121
98 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr) 122 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr)
99 { 123 {
100 int ret = __sync_lock_test_and_set(ptr, 1); 124 int ret = __sync_lock_test_and_set(ptr, 1);
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
240 using WTF::acquireLoad; 264 using WTF::acquireLoad;
241 using WTF::releaseStore; 265 using WTF::releaseStore;
242 266
243 // These methods allow loading from and storing to poisoned memory. Only 267 // These methods allow loading from and storing to poisoned memory. Only
244 // use these methods if you know what you are doing since they will 268 // use these methods if you know what you are doing since they will
245 // silence use-after-poison errors from ASan. 269 // silence use-after-poison errors from ASan.
246 using WTF::asanUnsafeAcquireLoad; 270 using WTF::asanUnsafeAcquireLoad;
247 using WTF::asanUnsafeReleaseStore; 271 using WTF::asanUnsafeReleaseStore;
248 272
249 #endif // Atomics_h 273 #endif // Atomics_h
OLDNEW
« Source/platform/heap/ThreadState.cpp ('K') | « Source/platform/heap/ThreadState.cpp ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698