提交 7847777a 编写于 作者: A Arun Sharma 提交者: Linus Torvalds

atomic: cleanup asm-generic atomic*.h inclusion

After changing all consumers of atomics to include <linux/atomic.h>, we
ran into some compile time errors due to this dependency chain:

linux/atomic.h
  -> asm/atomic.h
    -> asm-generic/atomic-long.h

where atomic-long.h could use funcs defined later in linux/atomic.h
without a prototype.  This patches moves the code that includes
asm-generic/atomic*.h to linux/atomic.h.

Archs that need <asm-generic/atomic64.h> need to select
CONFIG_GENERIC_ATOMIC64 from now on (some of them used to include it
unconditionally).

Compile tested on i386 and x86_64 with allnoconfig.
Signed-off-by: NArun Sharma <asharma@fb.com>
Cc: Eric Dumazet <eric.dumazet@gmail.com>
Cc: Ingo Molnar <mingo@elte.hu>
Cc: David Miller <davem@davemloft.net>
Acked-by: NMike Frysinger <vapier@gentoo.org>
Signed-off-by: NAndrew Morton <akpm@linux-foundation.org>
Signed-off-by: NLinus Torvalds <torvalds@linux-foundation.org>
上级 f24219b4
...@@ -255,5 +255,4 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -255,5 +255,4 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
#define smp_mb__before_atomic_inc() smp_mb() #define smp_mb__before_atomic_inc() smp_mb()
#define smp_mb__after_atomic_inc() smp_mb() #define smp_mb__after_atomic_inc() smp_mb()
#include <asm-generic/atomic-long.h>
#endif /* _ALPHA_ATOMIC_H */ #endif /* _ALPHA_ATOMIC_H */
...@@ -459,9 +459,6 @@ static inline int atomic64_add_unless(atomic64_t *v, u64 a, u64 u) ...@@ -459,9 +459,6 @@ static inline int atomic64_add_unless(atomic64_t *v, u64 a, u64 u)
#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL) #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
#else /* !CONFIG_GENERIC_ATOMIC64 */ #endif /* !CONFIG_GENERIC_ATOMIC64 */
#include <asm-generic/atomic64.h>
#endif
#include <asm-generic/atomic-long.h>
#endif #endif
#endif #endif
...@@ -188,6 +188,4 @@ static inline int atomic_sub_if_positive(int i, atomic_t *v) ...@@ -188,6 +188,4 @@ static inline int atomic_sub_if_positive(int i, atomic_t *v)
#define smp_mb__before_atomic_inc() barrier() #define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier()
#include <asm-generic/atomic-long.h>
#endif /* __ASM_AVR32_ATOMIC_H */ #endif /* __ASM_AVR32_ATOMIC_H */
...@@ -111,10 +111,7 @@ static inline void atomic_set_mask(int mask, atomic_t *v) ...@@ -111,10 +111,7 @@ static inline void atomic_set_mask(int mask, atomic_t *v)
#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0) #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
#include <asm-generic/atomic-long.h>
#endif #endif
#include <asm-generic/atomic64.h>
#endif #endif
...@@ -157,5 +157,4 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u) ...@@ -157,5 +157,4 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
#define smp_mb__before_atomic_inc() barrier() #define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier()
#include <asm-generic/atomic-long.h>
#endif #endif
...@@ -257,5 +257,4 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) ...@@ -257,5 +257,4 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
} }
#include <asm-generic/atomic-long.h>
#endif /* _ASM_ATOMIC_H */ #endif /* _ASM_ATOMIC_H */
...@@ -145,5 +145,4 @@ static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v) ...@@ -145,5 +145,4 @@ static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v)
#define smp_mb__before_atomic_inc() barrier() #define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier()
#include <asm-generic/atomic-long.h>
#endif /* __ARCH_H8300_ATOMIC __ */ #endif /* __ARCH_H8300_ATOMIC __ */
...@@ -215,5 +215,4 @@ atomic64_add_negative (__s64 i, atomic64_t *v) ...@@ -215,5 +215,4 @@ atomic64_add_negative (__s64 i, atomic64_t *v)
#define smp_mb__before_atomic_inc() barrier() #define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier()
#include <asm-generic/atomic-long.h>
#endif /* _ASM_IA64_ATOMIC_H */ #endif /* _ASM_IA64_ATOMIC_H */
...@@ -313,5 +313,4 @@ static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr) ...@@ -313,5 +313,4 @@ static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr)
#define smp_mb__before_atomic_inc() barrier() #define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier()
#include <asm-generic/atomic-long.h>
#endif /* _ASM_M32R_ATOMIC_H */ #endif /* _ASM_M32R_ATOMIC_H */
...@@ -205,6 +205,4 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) ...@@ -205,6 +205,4 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
#define smp_mb__before_atomic_inc() barrier() #define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier()
#include <asm-generic/atomic-long.h>
#include <asm-generic/atomic64.h>
#endif /* __ARCH_M68K_ATOMIC __ */ #endif /* __ARCH_M68K_ATOMIC __ */
...@@ -765,10 +765,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -765,10 +765,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
*/ */
#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0) #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
#else /* !CONFIG_64BIT */
#include <asm-generic/atomic64.h>
#endif /* CONFIG_64BIT */ #endif /* CONFIG_64BIT */
/* /*
...@@ -780,6 +776,4 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -780,6 +776,4 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
#define smp_mb__before_atomic_inc() smp_mb__before_llsc() #define smp_mb__before_atomic_inc() smp_mb__before_llsc()
#define smp_mb__after_atomic_inc() smp_llsc_mb() #define smp_mb__after_atomic_inc() smp_llsc_mb()
#include <asm-generic/atomic-long.h>
#endif /* _ASM_ATOMIC_H */ #endif /* _ASM_ATOMIC_H */
...@@ -343,8 +343,6 @@ static inline void atomic_set_mask(unsigned long mask, unsigned long *addr) ...@@ -343,8 +343,6 @@ static inline void atomic_set_mask(unsigned long mask, unsigned long *addr)
#define smp_mb__before_atomic_inc() barrier() #define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier()
#include <asm-generic/atomic-long.h>
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */
#endif /* CONFIG_SMP */ #endif /* CONFIG_SMP */
#endif /* _ASM_ATOMIC_H */ #endif /* _ASM_ATOMIC_H */
...@@ -335,12 +335,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -335,12 +335,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
#else /* CONFIG_64BIT */
#include <asm-generic/atomic64.h>
#endif /* !CONFIG_64BIT */ #endif /* !CONFIG_64BIT */
#include <asm-generic/atomic-long.h>
#endif /* _ASM_PARISC_ATOMIC_H_ */ #endif /* _ASM_PARISC_ATOMIC_H_ */
...@@ -469,11 +469,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -469,11 +469,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
#else /* __powerpc64__ */
#include <asm-generic/atomic64.h>
#endif /* __powerpc64__ */ #endif /* __powerpc64__ */
#include <asm-generic/atomic-long.h>
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */
#endif /* _ASM_POWERPC_ATOMIC_H_ */ #endif /* _ASM_POWERPC_ATOMIC_H_ */
...@@ -331,6 +331,4 @@ static inline long long atomic64_dec_if_positive(atomic64_t *v) ...@@ -331,6 +331,4 @@ static inline long long atomic64_dec_if_positive(atomic64_t *v)
#define smp_mb__before_atomic_inc() smp_mb() #define smp_mb__before_atomic_inc() smp_mb()
#define smp_mb__after_atomic_inc() smp_mb() #define smp_mb__after_atomic_inc() smp_mb()
#include <asm-generic/atomic-long.h>
#endif /* __ARCH_S390_ATOMIC__ */ #endif /* __ARCH_S390_ATOMIC__ */
...@@ -67,7 +67,4 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u) ...@@ -67,7 +67,4 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
#define smp_mb__before_atomic_inc() smp_mb() #define smp_mb__before_atomic_inc() smp_mb()
#define smp_mb__after_atomic_inc() smp_mb() #define smp_mb__after_atomic_inc() smp_mb()
#include <asm-generic/atomic-long.h>
#include <asm-generic/atomic64.h>
#endif /* __ASM_SH_ATOMIC_H */ #endif /* __ASM_SH_ATOMIC_H */
...@@ -160,5 +160,4 @@ static inline int __atomic24_sub(int i, atomic24_t *v) ...@@ -160,5 +160,4 @@ static inline int __atomic24_sub(int i, atomic24_t *v)
#endif /* !(__KERNEL__) */ #endif /* !(__KERNEL__) */
#include <asm-generic/atomic-long.h>
#endif /* !(__ARCH_SPARC_ATOMIC__) */ #endif /* !(__ARCH_SPARC_ATOMIC__) */
...@@ -113,5 +113,4 @@ static inline long atomic64_add_unless(atomic64_t *v, long a, long u) ...@@ -113,5 +113,4 @@ static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
#define smp_mb__before_atomic_inc() barrier() #define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier()
#include <asm-generic/atomic-long.h>
#endif /* !(__ARCH_SPARC64_ATOMIC__) */ #endif /* !(__ARCH_SPARC64_ATOMIC__) */
...@@ -177,9 +177,4 @@ extern unsigned long __cmpxchg_called_with_bad_pointer(void); ...@@ -177,9 +177,4 @@ extern unsigned long __cmpxchg_called_with_bad_pointer(void);
#include <asm/atomic_64.h> #include <asm/atomic_64.h>
#endif #endif
/* Provide the appropriate atomic_long_t definitions. */
#ifndef __ASSEMBLY__
#include <asm-generic/atomic-long.h>
#endif
#endif /* _ASM_TILE_ATOMIC_H */ #endif /* _ASM_TILE_ATOMIC_H */
...@@ -318,5 +318,4 @@ static inline void atomic_or_long(unsigned long *v1, unsigned long v2) ...@@ -318,5 +318,4 @@ static inline void atomic_or_long(unsigned long *v1, unsigned long v2)
# include "atomic64_64.h" # include "atomic64_64.h"
#endif #endif
#include <asm-generic/atomic-long.h>
#endif /* _ASM_X86_ATOMIC_H */ #endif /* _ASM_X86_ATOMIC_H */
...@@ -291,7 +291,6 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v) ...@@ -291,7 +291,6 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
#define smp_mb__before_atomic_inc() barrier() #define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier()
#include <asm-generic/atomic-long.h>
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */
#endif /* _XTENSA_ATOMIC_H */ #endif /* _XTENSA_ATOMIC_H */
......
...@@ -154,7 +154,5 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr) ...@@ -154,7 +154,5 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
#define smp_mb__before_atomic_inc() barrier() #define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier()
#include <asm-generic/atomic-long.h>
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */
#endif /* __ASM_GENERIC_ATOMIC_H */ #endif /* __ASM_GENERIC_ATOMIC_H */
...@@ -96,4 +96,8 @@ static inline void atomic_or(int i, atomic_t *v) ...@@ -96,4 +96,8 @@ static inline void atomic_or(int i, atomic_t *v)
} }
#endif /* #ifndef CONFIG_ARCH_HAS_ATOMIC_OR */ #endif /* #ifndef CONFIG_ARCH_HAS_ATOMIC_OR */
#include <asm-generic/atomic-long.h>
#ifdef CONFIG_GENERIC_ATOMIC64
#include <asm-generic/atomic64.h>
#endif
#endif /* _LINUX_ATOMIC_H */ #endif /* _LINUX_ATOMIC_H */
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册