00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019 #ifndef _RTAI_ASM_M68KNOMMU_ATOMIC_H
00020 #define _RTAI_ASM_M68KNOMMU_ATOMIC_H
00021
00022 #include <linux/autoconf.h>
00023
00024 #ifdef __KERNEL__
00025
00026 #include <linux/bitops.h>
00027 #include <asm/atomic.h>
00028 #include <asm/system.h>
00029
00030 #if LINUX_VERSION_CODE < KERNEL_VERSION(2,6,11)
00031
00032 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
00033 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
00034
00035 #endif
00036
00037 #else
00038
00039 #ifndef likely
00040 #if __GNUC__ == 2 && __GNUC_MINOR__ < 96
00041 #define __builtin_expect(x, expected_value) (x)
00042 #endif
00043 #define likely(x) __builtin_expect(!!(x), 1)
00044 #define unlikely(x) __builtin_expect(!!(x), 0)
00045 #endif
00046
00047 #define atomic_t int
00048
00049 struct __rtai_xchg_dummy { unsigned long a[100]; };
00050 #define __rtai_xg(x) ((struct __rtai_xchg_dummy *)(x))
00051
00052 static inline unsigned long atomic_xchg(volatile void *ptr, unsigned long x)
00053 {
00054 register unsigned tmp __asm__ ("%d0");
00055 register unsigned __ptr __asm__ ("%a1") = (unsigned)ptr;
00056 register unsigned long __x __asm__ ("%d2") = x;
00057 __asm__ __volatile__ ( "trap #13\n\t" : "+d" (tmp) : "a" (__ptr), "d" (__x) : "memory" );
00058 return tmp;
00059 }
00060
00061 static inline unsigned long atomic_cmpxchg(volatile void *ptr, unsigned long o, unsigned long n)
00062 {
00063 register unsigned prev __asm__ ("%d0");
00064 register unsigned __ptr __asm__ ("%a1") = (unsigned)ptr;
00065 register unsigned long __o __asm__ ("%d2") = o;
00066 register unsigned long __n __asm__ ("%d3") = n;
00067 __asm__ __volatile__ ( "trap #12\n\t" : "+d" (prev) : "a" (__ptr), "d" (__o), "d" (__n) : "memory" );
00068 return prev;
00069 }
00070
00071 static __inline__ int atomic_dec_and_test(atomic_t *v)
00072 {
00073 char c;
00074 __asm__ __volatile__("subql #1,%1; seq %0" : "=d" (c), "+m" (*v));
00075 return c != 0;
00076 }
00077
00078 static __inline__ void atomic_inc(atomic_t *v)
00079 {
00080 __asm__ __volatile__("addql #1,%0" : "+m" (*v));
00081 }
00082
00083
00084 #undef ADDR
00085
00086 #endif
00087
00088 #endif