atomic.h 4.7 KB
Newer Older
B
Bryan Wu 已提交
1 2 3
#ifndef __ARCH_BLACKFIN_ATOMIC__
#define __ARCH_BLACKFIN_ATOMIC__

4
#include <linux/types.h>
B
Bryan Wu 已提交
5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
#include <asm/system.h>	/* local_irq_XXX() */

/*
 * Atomic operations that C can't guarantee us.  Useful for
 * resource counting etc..
 *
 * Generally we do not concern about SMP BFIN systems, so we don't have
 * to deal with that.
 *
 * Tony Kou (tonyko@lineo.ca)   Lineo Inc.   2001
 */

#define ATOMIC_INIT(i)	{ (i) }
#define atomic_set(v, i)	(((v)->counter) = i)

20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91
#ifdef CONFIG_SMP

#define atomic_read(v)	__raw_uncached_fetch_asm(&(v)->counter)

asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);

asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value);

asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value);

asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value);

asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);

asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);

static inline void atomic_add(int i, atomic_t *v)
{
	__raw_atomic_update_asm(&v->counter, i);
}

static inline void atomic_sub(int i, atomic_t *v)
{
	__raw_atomic_update_asm(&v->counter, -i);
}

static inline int atomic_add_return(int i, atomic_t *v)
{
	return __raw_atomic_update_asm(&v->counter, i);
}

static inline int atomic_sub_return(int i, atomic_t *v)
{
	return __raw_atomic_update_asm(&v->counter, -i);
}

static inline void atomic_inc(volatile atomic_t *v)
{
	__raw_atomic_update_asm(&v->counter, 1);
}

static inline void atomic_dec(volatile atomic_t *v)
{
	__raw_atomic_update_asm(&v->counter, -1);
}

static inline void atomic_clear_mask(int mask, atomic_t *v)
{
	__raw_atomic_clear_asm(&v->counter, mask);
}

static inline void atomic_set_mask(int mask, atomic_t *v)
{
	__raw_atomic_set_asm(&v->counter, mask);
}

static inline int atomic_test_mask(int mask, atomic_t *v)
{
	return __raw_atomic_test_asm(&v->counter, mask);
}

/* Atomic operations are already serializing */
#define smp_mb__before_atomic_dec()    barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc()    barrier()
#define smp_mb__after_atomic_inc() barrier()

#else /* !CONFIG_SMP */

#define atomic_read(v)	((v)->counter)

static inline void atomic_add(int i, atomic_t *v)
B
Bryan Wu 已提交
92 93 94
{
	long flags;

95
	local_irq_save_hw(flags);
B
Bryan Wu 已提交
96
	v->counter += i;
97
	local_irq_restore_hw(flags);
B
Bryan Wu 已提交
98 99
}

100
static inline void atomic_sub(int i, atomic_t *v)
B
Bryan Wu 已提交
101 102 103
{
	long flags;

104
	local_irq_save_hw(flags);
B
Bryan Wu 已提交
105
	v->counter -= i;
106
	local_irq_restore_hw(flags);
B
Bryan Wu 已提交
107 108 109

}

110
static inline int atomic_add_return(int i, atomic_t *v)
B
Bryan Wu 已提交
111 112 113 114
{
	int __temp = 0;
	long flags;

115
	local_irq_save_hw(flags);
B
Bryan Wu 已提交
116 117
	v->counter += i;
	__temp = v->counter;
118
	local_irq_restore_hw(flags);
B
Bryan Wu 已提交
119 120 121 122 123


	return __temp;
}

124
static inline int atomic_sub_return(int i, atomic_t *v)
B
Bryan Wu 已提交
125 126 127 128
{
	int __temp = 0;
	long flags;

129
	local_irq_save_hw(flags);
B
Bryan Wu 已提交
130 131
	v->counter -= i;
	__temp = v->counter;
132
	local_irq_restore_hw(flags);
B
Bryan Wu 已提交
133 134 135 136

	return __temp;
}

137
static inline void atomic_inc(volatile atomic_t *v)
B
Bryan Wu 已提交
138 139 140
{
	long flags;

141
	local_irq_save_hw(flags);
B
Bryan Wu 已提交
142
	v->counter++;
143
	local_irq_restore_hw(flags);
B
Bryan Wu 已提交
144 145
}

146
static inline void atomic_dec(volatile atomic_t *v)
B
Bryan Wu 已提交
147 148 149
{
	long flags;

150
	local_irq_save_hw(flags);
B
Bryan Wu 已提交
151
	v->counter--;
152
	local_irq_restore_hw(flags);
B
Bryan Wu 已提交
153 154
}

155
static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
B
Bryan Wu 已提交
156 157 158
{
	long flags;

159
	local_irq_save_hw(flags);
B
Bryan Wu 已提交
160
	v->counter &= ~mask;
161
	local_irq_restore_hw(flags);
B
Bryan Wu 已提交
162 163
}

164
static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
B
Bryan Wu 已提交
165 166 167
{
	long flags;

168
	local_irq_save_hw(flags);
B
Bryan Wu 已提交
169
	v->counter |= mask;
170
	local_irq_restore_hw(flags);
B
Bryan Wu 已提交
171 172 173 174 175 176 177 178
}

/* Atomic operations are already serializing */
#define smp_mb__before_atomic_dec()    barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc()    barrier()
#define smp_mb__after_atomic_inc() barrier()

179 180 181
#endif /* !CONFIG_SMP */

#define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0)
B
Bryan Wu 已提交
182 183 184
#define atomic_dec_return(v) atomic_sub_return(1,(v))
#define atomic_inc_return(v) atomic_add_return(1,(v))

185 186 187 188 189 190 191 192 193 194 195 196 197
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))

#define atomic_add_unless(v, a, u)				\
({								\
	int c, old;						\
	c = atomic_read(v);					\
	while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
		c = old;					\
	c != (u);						\
})
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)

B
Bryan Wu 已提交
198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213
/*
 * atomic_inc_and_test - increment and test
 * @v: pointer of type atomic_t
 *
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)

#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)

#include <asm-generic/atomic.h>

#endif				/* __ARCH_BLACKFIN_ATOMIC __ */