[PATCH] include/asm-i386/: "extern inline" -> "static inline"
authorAdrian Bunk <bunk@stusta.de>
Sat, 10 Sep 2005 07:27:16 +0000 (00:27 -0700)
committerLinus Torvalds <torvalds@g5.osdl.org>
Sat, 10 Sep 2005 17:06:34 +0000 (10:06 -0700)
"extern inline" doesn't make much sense.

Signed-off-by: Adrian Bunk <bunk@stusta.de>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
include/asm-i386/div64.h
include/asm-i386/processor.h

index 28ed8b296afc137af6b3043179fc97eb230a2f54..75c67c785bb8dfd5ddcebf1e07579b5270db2a7c 100644 (file)
@@ -35,7 +35,7 @@
  */
 #define div_long_long_rem(a,b,c) div_ll_X_l_rem(a,b,c)
 
-extern inline long
+static inline long
 div_ll_X_l_rem(long long divs, long div, long *rem)
 {
        long dum2;
index 37bef8ed7bed12bd33bf3670ccf7991516aa7484..0a4ec764377ca1aa0db8c607a35e9ddfd586d297 100644 (file)
@@ -679,7 +679,7 @@ static inline void rep_nop(void)
    However we don't do prefetches for pre XP Athlons currently
    That should be fixed. */
 #define ARCH_HAS_PREFETCH
-extern inline void prefetch(const void *x)
+static inline void prefetch(const void *x)
 {
        alternative_input(ASM_NOP4,
                          "prefetchnta (%1)",
@@ -693,7 +693,7 @@ extern inline void prefetch(const void *x)
 
 /* 3dnow! prefetch to get an exclusive cache line. Useful for 
    spinlocks to avoid one state transition in the cache coherency protocol. */
-extern inline void prefetchw(const void *x)
+static inline void prefetchw(const void *x)
 {
        alternative_input(ASM_NOP4,
                          "prefetchw (%1)",