00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020 #ifndef _RTAI_ASM_X8664_ATOMIC_H
00021 #define _RTAI_ASM_X8664_ATOMIC_H
00022
00023 #ifdef __KERNEL__
00024
00025 #include <linux/bitops.h>
00026 #include <asm/atomic.h>
00027 #include <asm/system.h>
00028
00029 #if LINUX_VERSION_CODE < KERNEL_VERSION(2,6,11)
00030
00031 #define atomic_xchg(ptr, v) xchg(ptr, v)
00032 #define atomic_cmpxchg(ptr, o, n) cmpxchg((unsigned long *)(ptr), o, n)
00033
00034 #endif
00035
00036 #else
00037
00038 #ifdef CONFIG_SMP
00039 #define LOCK_PREFIX "lock ; "
00040 #else
00041 #define LOCK_PREFIX ""
00042 #endif
00043
00044 typedef struct { volatile int counter; } atomic_t;
00045
00046
00047 struct __rtai_xchg_dummy { unsigned long a[100]; };
00048 #define __rtai_xg(x) ((struct __rtai_xchg_dummy *)(x))
00049
00050 static inline unsigned long atomic_xchg (volatile void *ptr, unsigned long x)
00051 {
00052 __asm__ __volatile__(LOCK_PREFIX "xchgq %0,%1"
00053 :"=r" (x)
00054 :"m" (*__rtai_xg(ptr)), "0" (x)
00055 :"memory");
00056 return x;
00057 }
00058
00059 static inline unsigned long atomic_cmpxchg (volatile void *ptr, unsigned long o, unsigned long n)
00060 {
00061 unsigned long prev;
00062
00063 __asm__ __volatile__(LOCK_PREFIX "cmpxchgq %1,%2"
00064 : "=a"(prev)
00065 : "q"(n), "m" (*__rtai_xg(ptr)), "0" (o)
00066 : "memory");
00067
00068 return prev;
00069 }
00070
00071 static __inline__ int atomic_dec_and_test(atomic_t *v)
00072 {
00073 unsigned char c;
00074
00075 __asm__ __volatile__(
00076 LOCK_PREFIX "decl %0; sete %1"
00077 :"=m" (v->counter), "=qm" (c)
00078 :"m" (v->counter) : "memory");
00079 return c != 0;
00080 }
00081
00082 static __inline__ void atomic_inc(atomic_t *v)
00083 {
00084 __asm__ __volatile__(
00085 LOCK_PREFIX "incl %0"
00086 :"=m" (v->counter)
00087 :"m" (v->counter));
00088 }
00089
00090
00091 #undef ADDR
00092
00093 #endif
00094
00095 #endif