x86: remove fastcall from include/asm-x86
authorHarvey Harrison <harvey.harrison@gmail.com>
Wed, 30 Jan 2008 12:31:17 +0000 (13:31 +0100)
committerIngo Molnar <mingo@elte.hu>
Wed, 30 Jan 2008 12:31:17 +0000 (13:31 +0100)
Signed-off-by: Harvey Harrison <harvey.harrison@gmail.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
include/asm-x86/apic.h
include/asm-x86/hw_irq_32.h
include/asm-x86/mutex_32.h
include/asm-x86/semaphore_32.h

index 423022759cb2e84660c66f6f050366b216d2a109..72bf09cf13ab1dc9fd24ceaea7972948d1ba5e50 100644 (file)
@@ -59,17 +59,17 @@ extern unsigned boot_cpu_id;
 #define setup_secondary_clock setup_secondary_APIC_clock
 #endif
 
-static inline fastcall void native_apic_write(unsigned long reg, u32 v)
+static inline void native_apic_write(unsigned long reg, u32 v)
 {
        *((volatile u32 *)(APIC_BASE + reg)) = v;
 }
 
-static inline fastcall void native_apic_write_atomic(unsigned long reg, u32 v)
+static inline void native_apic_write_atomic(unsigned long reg, u32 v)
 {
        (void) xchg((u32*)(APIC_BASE + reg), v);
 }
 
-static inline fastcall u32 native_apic_read(unsigned long reg)
+static inline u32 native_apic_read(unsigned long reg)
 {
        return *((volatile u32 *)(APIC_BASE + reg));
 }
index 0bedbdf5e9078af5d579f80832a057911067f6b8..b93e35a708ace5e3bdcff3b9fe3664d0fe756873 100644 (file)
 extern void (*interrupt[NR_IRQS])(void);
 
 #ifdef CONFIG_SMP
-fastcall void reschedule_interrupt(void);
-fastcall void invalidate_interrupt(void);
-fastcall void call_function_interrupt(void);
+void reschedule_interrupt(void);
+void invalidate_interrupt(void);
+void call_function_interrupt(void);
 #endif
 
 #ifdef CONFIG_X86_LOCAL_APIC
-fastcall void apic_timer_interrupt(void);
-fastcall void error_interrupt(void);
-fastcall void spurious_interrupt(void);
-fastcall void thermal_interrupt(void);
+void apic_timer_interrupt(void);
+void error_interrupt(void);
+void spurious_interrupt(void);
+void thermal_interrupt(void);
 #define platform_legacy_irq(irq)       ((irq) < 16)
 #endif
 
index 7a17d9e58ad6586140e84a91a0a850b1b527e77a..bbeefb96ddfd6062657334b15915598ed4b26595 100644 (file)
@@ -26,7 +26,7 @@ do {                                                                  \
        unsigned int dummy;                                             \
                                                                        \
        typecheck(atomic_t *, count);                                   \
-       typecheck_fn(fastcall void (*)(atomic_t *), fail_fn);           \
+       typecheck_fn(void (*)(atomic_t *), fail_fn);            \
                                                                        \
        __asm__ __volatile__(                                           \
                LOCK_PREFIX "   decl (%%eax)    \n"                     \
@@ -51,8 +51,7 @@ do {                                                                  \
  * or anything the slow path function returns
  */
 static inline int
-__mutex_fastpath_lock_retval(atomic_t *count,
-                            int fastcall (*fail_fn)(atomic_t *))
+__mutex_fastpath_lock_retval(atomic_t *count, int (*fail_fn)(atomic_t *))
 {
        if (unlikely(atomic_dec_return(count) < 0))
                return fail_fn(count);
@@ -78,7 +77,7 @@ do {                                                                  \
        unsigned int dummy;                                             \
                                                                        \
        typecheck(atomic_t *, count);                                   \
-       typecheck_fn(fastcall void (*)(atomic_t *), fail_fn);           \
+       typecheck_fn(void (*)(atomic_t *), fail_fn);            \
                                                                        \
        __asm__ __volatile__(                                           \
                LOCK_PREFIX "   incl (%%eax)    \n"                     \
index 835c1d751a9f8cdcb7270a8034c9cdbb97309e13..cc826e85323f3c1f18eb00a82123e91b76008894 100644 (file)
@@ -83,10 +83,10 @@ static inline void init_MUTEX_LOCKED (struct semaphore *sem)
        sema_init(sem, 0);
 }
 
-fastcall void __down_failed(void /* special register calling convention */);
-fastcall int  __down_failed_interruptible(void  /* params in registers */);
-fastcall int  __down_failed_trylock(void  /* params in registers */);
-fastcall void __up_wakeup(void /* special register calling convention */);
+void __down_failed(void /* special register calling convention */);
+int  __down_failed_interruptible(void  /* params in registers */);
+int  __down_failed_trylock(void  /* params in registers */);
+void __up_wakeup(void /* special register calling convention */);
 
 /*
  * This is ugly, but we want the default case to fall through.