]>
code.delx.au - pulseaudio/blob - src/pulsecore/atomic.h
1 #ifndef foopulseatomichfoo
2 #define foopulseatomichfoo
5 This file is part of PulseAudio.
7 Copyright 2006-2008 Lennart Poettering
8 Copyright 2008 Nokia Corporation
10 PulseAudio is free software; you can redistribute it and/or modify
11 it under the terms of the GNU Lesser General Public License as
12 published by the Free Software Foundation; either version 2.1 of the
13 License, or (at your option) any later version.
15 PulseAudio is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU Lesser General Public
21 License along with PulseAudio; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
26 #include <pulsecore/macro.h>
29 * atomic_ops guarantees us that sizeof(AO_t) == sizeof(void*). It is
30 * not guaranteed however, that sizeof(AO_t) == sizeof(size_t).
31 * however very likely.
33 * For now we do only full memory barriers. Eventually we might want
34 * to support more elaborate memory barriers, in which case we will add
35 * suffixes to the function names.
37 * On gcc >= 4.1 we use the builtin atomic functions. otherwise we use
42 #error "Please include config.h before including this file!"
45 #ifdef HAVE_ATOMIC_BUILTINS
47 /* __sync based implementation */
49 typedef struct pa_atomic
{
53 #define PA_ATOMIC_INIT(v) { .value = (v) }
55 static inline int pa_atomic_load ( const pa_atomic_t
* a
) {
60 static inline void pa_atomic_store ( pa_atomic_t
* a
, int i
) {
65 /* Returns the previously set value */
66 static inline int pa_atomic_add ( pa_atomic_t
* a
, int i
) {
67 return __sync_fetch_and_add (& a
-> value
, i
);
70 /* Returns the previously set value */
71 static inline int pa_atomic_sub ( pa_atomic_t
* a
, int i
) {
72 return __sync_fetch_and_sub (& a
-> value
, i
);
75 /* Returns the previously set value */
76 static inline int pa_atomic_inc ( pa_atomic_t
* a
) {
77 return pa_atomic_add ( a
, 1 );
80 /* Returns the previously set value */
81 static inline int pa_atomic_dec ( pa_atomic_t
* a
) {
82 return pa_atomic_sub ( a
, 1 );
85 /* Returns TRUE when the operation was successful. */
86 static inline pa_bool_t
pa_atomic_cmpxchg ( pa_atomic_t
* a
, int old_i
, int new_i
) {
87 return __sync_bool_compare_and_swap (& a
-> value
, old_i
, new_i
);
90 typedef struct pa_atomic_ptr
{
91 volatile unsigned long value
;
94 #define PA_ATOMIC_PTR_INIT(v) { .value = (long) (v) }
96 static inline void * pa_atomic_ptr_load ( const pa_atomic_ptr_t
* a
) {
98 return ( void *) a
-> value
;
101 static inline void pa_atomic_ptr_store ( pa_atomic_ptr_t
* a
, void * p
) {
102 a
-> value
= ( unsigned long ) p
;
103 __sync_synchronize ();
106 static inline pa_bool_t
pa_atomic_ptr_cmpxchg ( pa_atomic_ptr_t
* a
, void * old_p
, void * new_p
) {
107 return __sync_bool_compare_and_swap (& a
-> value
, ( long ) old_p
, ( long ) new_p
);
110 #elif defined(__NetBSD__) && defined(HAVE_SYS_ATOMIC_H)
112 /* NetBSD 5.0+ atomic_ops(3) implementation */
114 #include <sys/atomic.h>
116 typedef struct pa_atomic
{
117 volatile unsigned int value
;
120 #define PA_ATOMIC_INIT(v) { .value = (unsigned int) (v) }
122 static inline int pa_atomic_load ( const pa_atomic_t
* a
) {
124 return ( int ) a
-> value
;
127 static inline void pa_atomic_store ( pa_atomic_t
* a
, int i
) {
128 a
-> value
= ( unsigned int ) i
;
132 /* Returns the previously set value */
133 static inline int pa_atomic_add ( pa_atomic_t
* a
, int i
) {
134 int nv
= ( int ) atomic_add_int_nv (& a
-> value
, i
);
138 /* Returns the previously set value */
139 static inline int pa_atomic_sub ( pa_atomic_t
* a
, int i
) {
140 int nv
= ( int ) atomic_add_int_nv (& a
-> value
, - i
);
144 /* Returns the previously set value */
145 static inline int pa_atomic_inc ( pa_atomic_t
* a
) {
146 int nv
= ( int ) atomic_inc_uint_nv (& a
-> value
);
150 /* Returns the previously set value */
151 static inline int pa_atomic_dec ( pa_atomic_t
* a
) {
152 int nv
= ( int ) atomic_dec_uint_nv (& a
-> value
);
156 /* Returns TRUE when the operation was successful. */
157 static inline pa_bool_t
pa_atomic_cmpxchg ( pa_atomic_t
* a
, int old_i
, int new_i
) {
158 unsigned int r
= atomic_cas_uint (& a
-> value
, ( unsigned int ) old_i
, ( unsigned int ) new_i
);
159 return ( int ) r
== old_i
;
162 typedef struct pa_atomic_ptr
{
163 volatile void * value
;
166 #define PA_ATOMIC_PTR_INIT(v) { .value = (v) }
168 static inline void * pa_atomic_ptr_load ( const pa_atomic_ptr_t
* a
) {
170 return ( void *) a
-> value
;
173 static inline void pa_atomic_ptr_store ( pa_atomic_ptr_t
* a
, void * p
) {
178 static inline pa_bool_t
pa_atomic_ptr_cmpxchg ( pa_atomic_ptr_t
* a
, void * old_p
, void * new_p
) {
179 void * r
= atomic_cas_ptr (& a
-> value
, old_p
, new_p
);
183 #elif defined(__GNUC__) && (defined(__amd64__) || defined(__x86_64__))
185 #warn "The native atomic operations implementation for AMD64 has not been tested thoroughly. libatomic_ops is known to not work properly on AMD64 and your gcc version is too old for the gcc-builtin atomic ops support. You have three options now: test the native atomic operations implementation for AMD64, fix libatomic_ops, or upgrade your GCC."
187 /* Addapted from glibc */
189 typedef struct pa_atomic
{
193 #define PA_ATOMIC_INIT(v) { .value = (v) }
195 static inline int pa_atomic_load ( const pa_atomic_t
* a
) {
199 static inline void pa_atomic_store ( pa_atomic_t
* a
, int i
) {
203 static inline int pa_atomic_add ( pa_atomic_t
* a
, int i
) {
206 __asm
__volatile ( "lock; xaddl %0, %1"
207 : "=r" ( result
), "=m" ( a
-> value
)
208 : "0" ( i
), "m" ( a
-> value
));
213 static inline int pa_atomic_sub ( pa_atomic_t
* a
, int i
) {
214 return pa_atomic_add ( a
, - i
);
217 static inline int pa_atomic_inc ( pa_atomic_t
* a
) {
218 return pa_atomic_add ( a
, 1 );
221 static inline int pa_atomic_dec ( pa_atomic_t
* a
) {
222 return pa_atomic_sub ( a
, 1 );
225 static inline pa_bool_t
pa_atomic_cmpxchg ( pa_atomic_t
* a
, int old_i
, int new_i
) {
228 __asm__
__volatile__ ( "lock; cmpxchgl %2, %1"
229 : "=a" ( result
), "=m" ( a
-> value
)
230 : "r" ( new_i
), "m" ( a
-> value
), "0" ( old_i
));
232 return result
== old_i
;
235 typedef struct pa_atomic_ptr
{
236 volatile unsigned long value
;
239 #define PA_ATOMIC_PTR_INIT(v) { .value = (long) (v) }
241 static inline void * pa_atomic_ptr_load ( const pa_atomic_ptr_t
* a
) {
242 return ( void *) a
-> value
;
245 static inline void pa_atomic_ptr_store ( pa_atomic_ptr_t
* a
, void * p
) {
246 a
-> value
= ( unsigned long ) p
;
249 static inline pa_bool_t
pa_atomic_ptr_cmpxchg ( pa_atomic_ptr_t
* a
, void * old_p
, void * new_p
) {
252 __asm__
__volatile__ ( "lock; cmpxchgq %q2, %1"
253 : "=a" ( result
), "=m" ( a
-> value
)
254 : "r" ( new_p
), "m" ( a
-> value
), "0" ( old_p
));
256 return result
== old_p
;
259 #elif defined(ATOMIC_ARM_INLINE_ASM)
262 These should only be enabled if we have ARMv6 or better.
265 typedef struct pa_atomic
{
269 #define PA_ATOMIC_INIT(v) { .value = (v) }
271 static inline void pa_memory_barrier ( void ) {
272 #ifdef ATOMIC_ARM_MEMORY_BARRIER_ENABLED
273 asm volatile ( "mcr p15, 0, r0, c7, c10, 5 @ dmb" );
277 static inline int pa_atomic_load ( const pa_atomic_t
* a
) {
282 static inline void pa_atomic_store ( pa_atomic_t
* a
, int i
) {
287 /* Returns the previously set value */
288 static inline int pa_atomic_add ( pa_atomic_t
* a
, int i
) {
289 unsigned long not_exclusive
;
290 int new_val
, old_val
;
294 asm volatile ( "ldrex %0, [%3] \n "
296 "strex %1, %2, [%3] \n "
297 : "=&r" ( old_val
), "=&r" ( not_exclusive
), "=&r" ( new_val
)
298 : "r" (& a
-> value
), "Ir" ( i
)
300 } while ( not_exclusive
);
306 /* Returns the previously set value */
307 static inline int pa_atomic_sub ( pa_atomic_t
* a
, int i
) {
308 unsigned long not_exclusive
;
309 int new_val
, old_val
;
313 asm volatile ( "ldrex %0, [%3] \n "
315 "strex %1, %2, [%3] \n "
316 : "=&r" ( old_val
), "=&r" ( not_exclusive
), "=&r" ( new_val
)
317 : "r" (& a
-> value
), "Ir" ( i
)
319 } while ( not_exclusive
);
325 static inline int pa_atomic_inc ( pa_atomic_t
* a
) {
326 return pa_atomic_add ( a
, 1 );
329 static inline int pa_atomic_dec ( pa_atomic_t
* a
) {
330 return pa_atomic_sub ( a
, 1 );
333 static inline pa_bool_t
pa_atomic_cmpxchg ( pa_atomic_t
* a
, int old_i
, int new_i
) {
334 unsigned long not_equal
, not_exclusive
;
338 asm volatile ( "ldrex %0, [%2] \n "
341 "strexeq %0, %4, [%2] \n "
342 : "=&r" ( not_exclusive
), "=&r" ( not_equal
)
343 : "r" (& a
-> value
), "Ir" ( old_i
), "r" ( new_i
)
345 } while ( not_exclusive
&& ! not_equal
);
351 typedef struct pa_atomic_ptr
{
352 volatile unsigned long value
;
355 #define PA_ATOMIC_PTR_INIT(v) { .value = (long) (v) }
357 static inline void * pa_atomic_ptr_load ( const pa_atomic_ptr_t
* a
) {
359 return ( void *) a
-> value
;
362 static inline void pa_atomic_ptr_store ( pa_atomic_ptr_t
* a
, void * p
) {
363 a
-> value
= ( unsigned long ) p
;
367 static inline pa_bool_t
pa_atomic_ptr_cmpxchg ( pa_atomic_ptr_t
* a
, void * old_p
, void * new_p
) {
368 unsigned long not_equal
, not_exclusive
;
372 asm volatile ( "ldrex %0, [%2] \n "
375 "strexeq %0, %4, [%2] \n "
376 : "=&r" ( not_exclusive
), "=&r" ( not_equal
)
377 : "r" (& a
-> value
), "Ir" ( old_p
), "r" ( new_p
)
379 } while ( not_exclusive
&& ! not_equal
);
385 #elif defined(ATOMIC_ARM_LINUX_HELPERS)
387 /* See file arch/arm/kernel/entry-armv.S in your kernel sources for more
388 information about these functions. The arm kernel helper functions first
390 Apply --disable-atomic-arm-linux-helpers flag to confugure if you prefere
391 inline asm implementation or you have an obsolete Linux kernel.
394 typedef void ( __kernel_dmb_t
)( void );
395 #define __kernel_dmb (*(__kernel_dmb_t *)0xffff0fa0)
397 static inline void pa_memory_barrier ( void ) {
398 #ifndef ATOMIC_ARM_MEMORY_BARRIER_ENABLED
403 /* Atomic exchange (__kernel_cmpxchg_t contains memory barriers if needed) */
404 typedef int ( __kernel_cmpxchg_t
)( int oldval
, int newval
, volatile int * ptr
);
405 #define __kernel_cmpxchg (*(__kernel_cmpxchg_t *)0xffff0fc0)
407 /* This is just to get rid of all warnings */
408 typedef int ( __kernel_cmpxchg_u_t
)( unsigned long oldval
, unsigned long newval
, volatile unsigned long * ptr
);
409 #define __kernel_cmpxchg_u (*(__kernel_cmpxchg_u_t *)0xffff0fc0)
411 typedef struct pa_atomic
{
415 #define PA_ATOMIC_INIT(v) { .value = (v) }
417 static inline int pa_atomic_load ( const pa_atomic_t
* a
) {
422 static inline void pa_atomic_store ( pa_atomic_t
* a
, int i
) {
427 /* Returns the previously set value */
428 static inline int pa_atomic_add ( pa_atomic_t
* a
, int i
) {
432 } while ( __kernel_cmpxchg ( old_val
, old_val
+ i
, & a
-> value
));
436 /* Returns the previously set value */
437 static inline int pa_atomic_sub ( pa_atomic_t
* a
, int i
) {
441 } while ( __kernel_cmpxchg ( old_val
, old_val
- i
, & a
-> value
));
445 /* Returns the previously set value */
446 static inline int pa_atomic_inc ( pa_atomic_t
* a
) {
447 return pa_atomic_add ( a
, 1 );
450 /* Returns the previously set value */
451 static inline int pa_atomic_dec ( pa_atomic_t
* a
) {
452 return pa_atomic_sub ( a
, 1 );
455 /* Returns TRUE when the operation was successful. */
456 static inline pa_bool_t
pa_atomic_cmpxchg ( pa_atomic_t
* a
, int old_i
, int new_i
) {
459 failed
= !! __kernel_cmpxchg ( old_i
, new_i
, & a
-> value
);
460 } while ( failed
&& a
-> value
== old_i
);
464 typedef struct pa_atomic_ptr
{
465 volatile unsigned long value
;
468 #define PA_ATOMIC_PTR_INIT(v) { .value = (unsigned long) (v) }
470 static inline void * pa_atomic_ptr_load ( const pa_atomic_ptr_t
* a
) {
472 return ( void *) a
-> value
;
475 static inline void pa_atomic_ptr_store ( pa_atomic_ptr_t
* a
, void * p
) {
476 a
-> value
= ( unsigned long ) p
;
480 static inline pa_bool_t
pa_atomic_ptr_cmpxchg ( pa_atomic_ptr_t
* a
, void * old_p
, void * new_p
) {
483 failed
= !! __kernel_cmpxchg_u (( unsigned long ) old_p
, ( unsigned long ) new_p
, & a
-> value
);
484 } while ( failed
&& a
-> value
== ( unsigned long ) old_p
);
490 /* libatomic_ops based implementation */
492 #include <atomic_ops.h>
494 typedef struct pa_atomic
{
498 #define PA_ATOMIC_INIT(v) { .value = (AO_t) (v) }
500 static inline int pa_atomic_load ( const pa_atomic_t
* a
) {
501 return ( int ) AO_load_full (( AO_t
*) & a
-> value
);
504 static inline void pa_atomic_store ( pa_atomic_t
* a
, int i
) {
505 AO_store_full (& a
-> value
, ( AO_t
) i
);
508 static inline int pa_atomic_add ( pa_atomic_t
* a
, int i
) {
509 return ( int ) AO_fetch_and_add_full (& a
-> value
, ( AO_t
) i
);
512 static inline int pa_atomic_sub ( pa_atomic_t
* a
, int i
) {
513 return ( int ) AO_fetch_and_add_full (& a
-> value
, ( AO_t
) - i
);
516 static inline int pa_atomic_inc ( pa_atomic_t
* a
) {
517 return ( int ) AO_fetch_and_add1_full (& a
-> value
);
520 static inline int pa_atomic_dec ( pa_atomic_t
* a
) {
521 return ( int ) AO_fetch_and_sub1_full (& a
-> value
);
524 static inline pa_bool_t
pa_atomic_cmpxchg ( pa_atomic_t
* a
, int old_i
, int new_i
) {
525 return AO_compare_and_swap_full (& a
-> value
, ( unsigned long ) old_i
, ( unsigned long ) new_i
);
528 typedef struct pa_atomic_ptr
{
532 #define PA_ATOMIC_PTR_INIT(v) { .value = (AO_t) (v) }
534 static inline void * pa_atomic_ptr_load ( const pa_atomic_ptr_t
* a
) {
535 return ( void *) AO_load_full (( AO_t
*) & a
-> value
);
538 static inline void pa_atomic_ptr_store ( pa_atomic_ptr_t
* a
, void * p
) {
539 AO_store_full (& a
-> value
, ( AO_t
) p
);
542 static inline pa_bool_t
pa_atomic_ptr_cmpxchg ( pa_atomic_ptr_t
* a
, void * old_p
, void * new_p
) {
543 return AO_compare_and_swap_full (& a
-> value
, ( AO_t
) old_p
, ( AO_t
) new_p
);