]> code.delx.au - pulseaudio/blob - src/pulsecore/atomic.h
add native amd64 atomic int implementation
[pulseaudio] / src / pulsecore / atomic.h
1 #ifndef foopulseatomichfoo
2 #define foopulseatomichfoo
3
4 /* $Id$ */
5
6 /***
7 This file is part of PulseAudio.
8
9 Copyright 2006 Lennart Poettering
10
11 PulseAudio is free software; you can redistribute it and/or modify
12 it under the terms of the GNU Lesser General Public License as
13 published by the Free Software Foundation; either version 2 of the
14 License, or (at your option) any later version.
15
16 PulseAudio is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
20
21 You should have received a copy of the GNU Lesser General Public
22 License along with PulseAudio; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
24 USA.
25 ***/
26
27 /*
28 * atomic_ops guarantees us that sizeof(AO_t) == sizeof(void*). It is
29 * not guaranteed however, that sizeof(AO_t) == sizeof(size_t).
30 * however very likely.
31 *
32 * For now we do only full memory barriers. Eventually we might want
33 * to support more elaborate memory barriers, in which case we will add
34 * suffixes to the function names.
35 *
36 * On gcc >= 4.1 we use the builtin atomic functions. otherwise we use
37 * libatomic_ops
38 */
39
40 /* We have to include config.h here, which sucks */
41 #ifdef HAVE_CONFIG_H
42 #include <config.h>
43 #endif
44
45 #ifdef HAVE_ATOMIC_BUILTINS
46
47 /* __sync based implementation */
48
49 typedef struct pa_atomic {
50 volatile int value;
51 } pa_atomic_t;
52
53 #define PA_ATOMIC_INIT(v) { .value = (v) }
54
55 static inline int pa_atomic_load(const pa_atomic_t *a) {
56 __sync_synchronize();
57 return a->value;
58 }
59
60 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
61 a->value = i;
62 __sync_synchronize();
63 }
64
65 /* Returns the previously set value */
66 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
67 return __sync_fetch_and_add(&a->value, i);
68 }
69
70 /* Returns the previously set value */
71 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
72 return __sync_fetch_and_sub(&a->value, i);
73 }
74
75 /* Returns the previously set value */
76 static inline int pa_atomic_inc(pa_atomic_t *a) {
77 return pa_atomic_add(a, 1);
78 }
79
80 /* Returns the previously set value */
81 static inline int pa_atomic_dec(pa_atomic_t *a) {
82 return pa_atomic_sub(a, 1);
83 }
84
85 /* Returns non-zero when the operation was successful. */
86 static inline int pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
87 return __sync_bool_compare_and_swap(&a->value, old_i, new_i);
88 }
89
90 typedef struct pa_atomic_ptr {
91 volatile unsigned long value;
92 } pa_atomic_ptr_t;
93
94 #define PA_ATOMIC_PTR_INIT(v) { .value = (long) (v) }
95
96 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
97 __sync_synchronize();
98 return (void*) a->value;
99 }
100
101 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
102 a->value = (unsigned long) p;
103 __sync_synchronize();
104 }
105
106 static inline int pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
107 return __sync_bool_compare_and_swap(&a->value, (long) old_p, (long) new_p);
108 }
109
110 #elif defined(__GNUC__) && (defined(__amd64__) || defined(__x86_64__))
111
112 /* Addapted from glibc */
113
114 typedef struct pa_atomic {
115 volatile int value;
116 } pa_atomic_t;
117
118 #define PA_ATOMIC_INIT(v) { .value = (v) }
119
120 static inline int pa_atomic_load(const pa_atomic_t *a) {
121 return a->value;
122 }
123
124 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
125 a->value = i;
126 }
127
128 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
129 int result;
130
131 __asm __volatile ("lock; xaddl %0, %1"
132 : "=r" (result), "=m" (a->value)
133 : "0" (i), "m" (a->value));
134
135 return result;
136 }
137
138 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
139 return pa_atomic_add(a, -i);
140 }
141
142 static inline int pa_atomic_inc(pa_atomic_t *a) {
143 return pa_atomic_add(a, 1);
144 }
145
146 static inline int pa_atomic_dec(pa_atomic_t *a) {
147 return pa_atomic_sub(a, 1);
148 }
149
150 static inline int pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
151 int result;
152
153 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
154 : "=a" (result), "=m" (a->value)
155 : "r" (new_i), "m" (a->value), "0" (old_i));
156
157 return result == oldval;
158 }
159
160 typedef struct pa_atomic_ptr {
161 volatile unsigned long value;
162 } pa_atomic_ptr_t;
163
164 #define PA_ATOMIC_PTR_INIT(v) { .value = (long) (v) }
165
166 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
167 return (void*) a->value;
168 }
169
170 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
171 a->value = (unsigned long) p;
172 }
173
174 static inline int pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
175 void *result;
176
177 __asm__ __volatile__ ("lock; cmpxchgq %q2, %1"
178 : "=a" (result), "=m" (a->value)
179 : "r" (new_p), "m" (a->value), "0" (old_p));
180
181 return result;
182 }
183
184 #else
185
186 /* libatomic_ops based implementation */
187
188 #include <atomic_ops.h>
189
190 typedef struct pa_atomic {
191 volatile AO_t value;
192 } pa_atomic_t;
193
194 #define PA_ATOMIC_INIT(v) { .value = (v) }
195
196 static inline int pa_atomic_load(const pa_atomic_t *a) {
197 return (int) AO_load_full((AO_t*) &a->value);
198 }
199
200 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
201 AO_store_full(&a->value, (AO_t) i);
202 }
203
204 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
205 return AO_fetch_and_add_full(&a->value, (AO_t) i);
206 }
207
208 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
209 return AO_fetch_and_add_full(&a->value, (AO_t) -i);
210 }
211
212 static inline int pa_atomic_inc(pa_atomic_t *a) {
213 return AO_fetch_and_add1_full(&a->value);
214 }
215
216 static inline int pa_atomic_dec(pa_atomic_t *a) {
217 return AO_fetch_and_sub1_full(&a->value);
218 }
219
220 static inline int pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
221 return AO_compare_and_swap_full(&a->value, old_i, new_i);
222 }
223
224 typedef struct pa_atomic_ptr {
225 volatile AO_t value;
226 } pa_atomic_ptr_t;
227
228 #define PA_ATOMIC_PTR_INIT(v) { .value = (AO_t) (v) }
229
230 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
231 return (void*) AO_load_full((AO_t*) &a->value);
232 }
233
234 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
235 AO_store_full(&a->value, (AO_t) p);
236 }
237
238 static inline int pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
239 return AO_compare_and_swap_full(&a->value, (AO_t) old_p, (AO_t) new_p);
240 }
241
242 #endif
243
244 #endif