提交 e2afe674 编写于 作者: A Adrian Bunk 提交者: Linus Torvalds

[PATCH] include/asm-i386/: "extern inline" -> "static inline"

"extern inline" doesn't make much sense.
Signed-off-by: NAdrian Bunk <bunk@stusta.de>
Signed-off-by: NAndrew Morton <akpm@osdl.org>
Signed-off-by: NLinus Torvalds <torvalds@osdl.org>
上级 ea0e0a4f
...@@ -35,7 +35,7 @@ ...@@ -35,7 +35,7 @@
*/ */
#define div_long_long_rem(a,b,c) div_ll_X_l_rem(a,b,c) #define div_long_long_rem(a,b,c) div_ll_X_l_rem(a,b,c)
extern inline long static inline long
div_ll_X_l_rem(long long divs, long div, long *rem) div_ll_X_l_rem(long long divs, long div, long *rem)
{ {
long dum2; long dum2;
......
...@@ -679,7 +679,7 @@ static inline void rep_nop(void) ...@@ -679,7 +679,7 @@ static inline void rep_nop(void)
However we don't do prefetches for pre XP Athlons currently However we don't do prefetches for pre XP Athlons currently
That should be fixed. */ That should be fixed. */
#define ARCH_HAS_PREFETCH #define ARCH_HAS_PREFETCH
extern inline void prefetch(const void *x) static inline void prefetch(const void *x)
{ {
alternative_input(ASM_NOP4, alternative_input(ASM_NOP4,
"prefetchnta (%1)", "prefetchnta (%1)",
...@@ -693,7 +693,7 @@ extern inline void prefetch(const void *x) ...@@ -693,7 +693,7 @@ extern inline void prefetch(const void *x)
/* 3dnow! prefetch to get an exclusive cache line. Useful for /* 3dnow! prefetch to get an exclusive cache line. Useful for
spinlocks to avoid one state transition in the cache coherency protocol. */ spinlocks to avoid one state transition in the cache coherency protocol. */
extern inline void prefetchw(const void *x) static inline void prefetchw(const void *x)
{ {
alternative_input(ASM_NOP4, alternative_input(ASM_NOP4,
"prefetchw (%1)", "prefetchw (%1)",
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册