]> code.delx.au - pulseaudio/blob - src/pulsecore/atomic.h
unify static TLS support, make use of gcc __thread attribute if available
[pulseaudio] / src / pulsecore / atomic.h
1 #ifndef foopulseatomichfoo
2 #define foopulseatomichfoo
3
4 /* $Id$ */
5
6 /***
7 This file is part of PulseAudio.
8
9 Copyright 2006 Lennart Poettering
10
11 PulseAudio is free software; you can redistribute it and/or modify
12 it under the terms of the GNU Lesser General Public License as
13 published by the Free Software Foundation; either version 2 of the
14 License, or (at your option) any later version.
15
16 PulseAudio is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
20
21 You should have received a copy of the GNU Lesser General Public
22 License along with PulseAudio; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
24 USA.
25 ***/
26
27 /*
28 * atomic_ops guarantees us that sizeof(AO_t) == sizeof(void*). It is
29 * not guaranteed however, that sizeof(AO_t) == sizeof(size_t).
30 * however very likely.
31 *
32 * For now we do only full memory barriers. Eventually we might want
33 * to support more elaborate memory barriers, in which case we will add
34 * suffixes to the function names.
35 *
36 * On gcc >= 4.1 we use the builtin atomic functions. otherwise we use
37 * libatomic_ops
38 */
39
40 /* We have to include config.h here (for the __sync stuff), which sucks */
41 #ifdef HAVE_CONFIG_H
42 #include <config.h>
43 #endif
44
45 #ifdef HAVE_ATOMIC_BUILTINS
46
47 /* __sync based implementation */
48
49 typedef struct pa_atomic {
50 volatile int value;
51 } pa_atomic_t;
52
53 #define PA_ATOMIC_INIT(v) { .value = (v) }
54
55 static inline int pa_atomic_load(const pa_atomic_t *a) {
56 __sync_synchronize();
57 return a->value;
58 }
59
60 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
61 a->value = i;
62 __sync_synchronize();
63 }
64
65 /* Returns the previously set value */
66 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
67 return __sync_fetch_and_add(&a->value, i);
68 }
69
70 /* Returns the previously set value */
71 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
72 return __sync_fetch_and_sub(&a->value, i);
73 }
74
75 /* Returns the previously set value */
76 static inline int pa_atomic_inc(pa_atomic_t *a) {
77 return pa_atomic_add(a, 1);
78 }
79
80 /* Returns the previously set value */
81 static inline int pa_atomic_dec(pa_atomic_t *a) {
82 return pa_atomic_sub(a, 1);
83 }
84
85 /* Returns non-zero when the operation was successful. */
86 static inline int pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
87 return __sync_bool_compare_and_swap(&a->value, old_i, new_i);
88 }
89
90 typedef struct pa_atomic_ptr {
91 volatile unsigned long value;
92 } pa_atomic_ptr_t;
93
94 #define PA_ATOMIC_PTR_INIT(v) { .value = (long) (v) }
95
96 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
97 __sync_synchronize();
98 return (void*) a->value;
99 }
100
101 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
102 a->value = (unsigned long) p;
103 __sync_synchronize();
104 }
105
106 static inline int pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
107 return __sync_bool_compare_and_swap(&a->value, (long) old_p, (long) new_p);
108 }
109
110 #elif defined(__GNUC__) && (defined(__amd64__) || defined(__x86_64__))
111
112 #error "The native atomic operations implementation for AMD64 has not been tested. libatomic_ops is known to not work properly on AMD64 and your gcc version is too old for the gcc-builtin atomic ops support. You have three options now: make the native atomic operations implementation for AMD64 work, fix libatomic_ops, or upgrade your GCC."
113
114 /* Addapted from glibc */
115
116 typedef struct pa_atomic {
117 volatile int value;
118 } pa_atomic_t;
119
120 #define PA_ATOMIC_INIT(v) { .value = (v) }
121
122 static inline int pa_atomic_load(const pa_atomic_t *a) {
123 return a->value;
124 }
125
126 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
127 a->value = i;
128 }
129
130 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
131 int result;
132
133 __asm __volatile ("lock; xaddl %0, %1"
134 : "=r" (result), "=m" (a->value)
135 : "0" (i), "m" (a->value));
136
137 return result;
138 }
139
140 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
141 return pa_atomic_add(a, -i);
142 }
143
144 static inline int pa_atomic_inc(pa_atomic_t *a) {
145 return pa_atomic_add(a, 1);
146 }
147
148 static inline int pa_atomic_dec(pa_atomic_t *a) {
149 return pa_atomic_sub(a, 1);
150 }
151
152 static inline int pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
153 int result;
154
155 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
156 : "=a" (result), "=m" (a->value)
157 : "r" (new_i), "m" (a->value), "0" (old_i));
158
159 return result == oldval;
160 }
161
162 typedef struct pa_atomic_ptr {
163 volatile unsigned long value;
164 } pa_atomic_ptr_t;
165
166 #define PA_ATOMIC_PTR_INIT(v) { .value = (long) (v) }
167
168 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
169 return (void*) a->value;
170 }
171
172 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
173 a->value = (unsigned long) p;
174 }
175
176 static inline int pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
177 void *result;
178
179 __asm__ __volatile__ ("lock; cmpxchgq %q2, %1"
180 : "=a" (result), "=m" (a->value)
181 : "r" (new_p), "m" (a->value), "0" (old_p));
182
183 return result;
184 }
185
186 #else
187
188 /* libatomic_ops based implementation */
189
190 #include <atomic_ops.h>
191
192 typedef struct pa_atomic {
193 volatile AO_t value;
194 } pa_atomic_t;
195
196 #define PA_ATOMIC_INIT(v) { .value = (v) }
197
198 static inline int pa_atomic_load(const pa_atomic_t *a) {
199 return (int) AO_load_full((AO_t*) &a->value);
200 }
201
202 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
203 AO_store_full(&a->value, (AO_t) i);
204 }
205
206 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
207 return AO_fetch_and_add_full(&a->value, (AO_t) i);
208 }
209
210 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
211 return AO_fetch_and_add_full(&a->value, (AO_t) -i);
212 }
213
214 static inline int pa_atomic_inc(pa_atomic_t *a) {
215 return AO_fetch_and_add1_full(&a->value);
216 }
217
218 static inline int pa_atomic_dec(pa_atomic_t *a) {
219 return AO_fetch_and_sub1_full(&a->value);
220 }
221
222 static inline int pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
223 return AO_compare_and_swap_full(&a->value, old_i, new_i);
224 }
225
226 typedef struct pa_atomic_ptr {
227 volatile AO_t value;
228 } pa_atomic_ptr_t;
229
230 #define PA_ATOMIC_PTR_INIT(v) { .value = (AO_t) (v) }
231
232 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
233 return (void*) AO_load_full((AO_t*) &a->value);
234 }
235
236 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
237 AO_store_full(&a->value, (AO_t) p);
238 }
239
240 static inline int pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
241 return AO_compare_and_swap_full(&a->value, (AO_t) old_p, (AO_t) new_p);
242 }
243
244 #endif
245
246 #endif