b442718e2af19a7e611aa36d03f04118834f6ab8
8 #define container_of(ptr, type, member) ({ \
9 const typeof( ((type *)0)->member ) *__mptr = (ptr); \
10 (type *)( (char *)__mptr - offsetof(type,member) );})
13 #define KERN_NOTICE ""
17 #define KERN_WARNING ""
21 #define MAX_ERRNO 4095
23 #define IS_ERR_VALUE(x) unlikely((x) >= (unsigned long)-MAX_ERRNO)
25 static inline void *ERR_PTR(long error
)
27 return (void *) error
;
30 static inline long PTR_ERR(const void *ptr
)
35 static inline long IS_ERR(const void *ptr
)
37 return IS_ERR_VALUE((unsigned long)ptr
);
41 /* FIXED SIZE INTEGERS */
50 #define min_t(type, x, y) ({ \
53 __min1 < __min2 ? __min1: __min2; })
55 #define max_t(type, x, y) ({ \
58 __max1 > __max2 ? __max1: __max2; })
65 #define DEFINE_MUTEX(m) pthread_mutex_t (m) = PTHREAD_MUTEX_INITIALIZER;
66 #define DECLARE_MUTEX(m) extern pthread_mutex_t (m);
68 #define mutex_lock(m) pthread_mutex_lock(m)
70 #define mutex_unlock(m) pthread_mutex_unlock(m)
74 typedef int spinlock_t
;
76 #define spin_lock(a) /* nothing */
77 #define spin_unlock(a) /* nothing */
78 #define spin_lock_init(a) /* nothing */
85 #define kmalloc(s, t) malloc(s)
86 #define kzalloc(s, t) zmalloc(s)
87 #define kfree(p) free((void *)p)
88 #define kstrdup(s, t) strdup(s)
90 #define zmalloc(s) calloc(1, s)
97 #define printk(fmt, args...) printf(fmt, ## args)
101 #define smp_rmb() do {} while(0)
102 #define smp_wmb() do {} while(0)
103 #define smp_mb() do {} while(0)
104 #define smp_mb__after_atomic_inc() do {} while(0)
106 #define read_barrier_depends() do {} while(0)
107 #define smp_read_barrier_depends() do {} while(0)
111 #define rcu_assign_pointer(a, b) do {} while(0)
112 #define call_rcu_sched(a,b) do {} while(0)
113 #define rcu_barrier_sched() do {} while(0)
114 #define rcu_read_lock_sched_notrace() do{} while (0)
115 #define rcu_read_unlock_sched_notrace() do{} while (0)
121 typedef struct { sig_atomic_t counter
; } atomic_t
;
123 static inline int atomic_dec_and_test(atomic_t
*p
)
129 static inline void atomic_set(atomic_t
*p
, int v
)
134 static inline void atomic_inc(atomic_t
*p
)
139 static int atomic_read(atomic_t
*p
)
144 #define atomic_long_t atomic_t
145 #define atomic_long_set atomic_set
146 #define atomic_long_read atomic_read
150 #define __xg(x) ((volatile long *)(x))
152 #define cmpxchg(ptr, o, n) \
153 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
154 (unsigned long)(n), sizeof(*(ptr))))
156 static inline unsigned long __cmpxchg(volatile void *ptr
, unsigned long old
,
157 unsigned long new, int size
)
162 asm volatile("lock cmpxchgb %b1,%2"
164 : "q"(new), "m"(*__xg(ptr
)), "0"(old
)
168 asm volatile("lock cmpxchgw %w1,%2"
170 : "r"(new), "m"(*__xg(ptr
)), "0"(old
)
174 asm volatile("lock cmpxchgl %k1,%2"
176 : "r"(new), "m"(*__xg(ptr
)), "0"(old
)
180 asm volatile("lock cmpxchgq %1,%2"
182 : "r"(new), "m"(*__xg(ptr
)), "0"(old
)
189 //#define local_cmpxchg cmpxchg
190 #define local_cmpxchg(l, o, n) (cmpxchg(&((l)->a.counter), (o), (n)))
192 #define atomic_long_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
197 //typedef int local_t;
204 static inline void local_inc(local_t
*l
)
209 static inline void local_set(local_t
*l
, int v
)
214 static inline void local_add(int v
, local_t
*l
)
219 static int local_add_return(int v
, local_t
*l
)
221 return l
->a
.counter
+= v
;
224 static inline int local_read(local_t
*l
)
232 #define ____cacheline_aligned
238 static inline unsigned int hweight32(unsigned int w
)
240 unsigned int res
= w
- ((w
>> 1) & 0x55555555);
241 res
= (res
& 0x33333333) + ((res
>> 2) & 0x33333333);
242 res
= (res
+ (res
>> 4)) & 0x0F0F0F0F;
243 res
= res
+ (res
>> 8);
244 return (res
+ (res
>> 16)) & 0x000000FF;
247 static inline int fls(int x
)
250 //ust// #ifdef CONFIG_X86_CMOV
253 : "=&r" (r
) : "rm" (x
), "rm" (-1));
255 //ust// asm("bsrl %1,%0\n\t"
257 //ust// "movl $-1,%0\n"
258 //ust// "1:" : "=r" (r) : "rm" (x));
263 static __inline__
int get_count_order(unsigned int count
)
267 order
= fls(count
) - 1;
268 if (count
& (count
- 1))
278 #define ALIGN(x,a) __ALIGN_MASK(x,(typeof(x))(a)-1)
279 #define __ALIGN_MASK(x,mask) (((x)+(mask))&~(mask))
280 #define PAGE_ALIGN(addr) ALIGN(addr, PAGE_SIZE)
281 #define PAGE_SIZE sysconf(_SC_PAGE_SIZE)
282 #define PAGE_MASK (PAGE_SIZE-1)
289 #define ARRAY_SIZE(arr) (sizeof(arr) / sizeof((arr)[0]))
293 static inline u64
trace_clock_read64(void)
298 static inline unsigned int trace_clock_frequency(void)
303 static inline u32
trace_clock_freq_scale(void)
311 #define list_add_rcu list_add
312 #define list_for_each_entry_rcu list_for_each_entry
315 #define EXPORT_SYMBOL_GPL(a) /*nothing*/
317 #define smp_processor_id() (-1)
319 #endif /* KERNELCOMPAT_H */
This page took 0.044398 seconds and 3 git commands to generate.