Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 341d8854 authored by Harvey Harrison's avatar Harvey Harrison Committed by Ingo Molnar
Browse files

x86: remove fastcall from include/asm-x86

parent 75604d7f
Loading
Loading
Loading
Loading
+3 −3
Original line number Original line Diff line number Diff line
@@ -59,17 +59,17 @@ extern unsigned boot_cpu_id;
#define setup_secondary_clock setup_secondary_APIC_clock
#define setup_secondary_clock setup_secondary_APIC_clock
#endif
#endif


static inline fastcall void native_apic_write(unsigned long reg, u32 v)
static inline void native_apic_write(unsigned long reg, u32 v)
{
{
	*((volatile u32 *)(APIC_BASE + reg)) = v;
	*((volatile u32 *)(APIC_BASE + reg)) = v;
}
}


static inline fastcall void native_apic_write_atomic(unsigned long reg, u32 v)
static inline void native_apic_write_atomic(unsigned long reg, u32 v)
{
{
	(void) xchg((u32*)(APIC_BASE + reg), v);
	(void) xchg((u32*)(APIC_BASE + reg), v);
}
}


static inline fastcall u32 native_apic_read(unsigned long reg)
static inline u32 native_apic_read(unsigned long reg)
{
{
	return *((volatile u32 *)(APIC_BASE + reg));
	return *((volatile u32 *)(APIC_BASE + reg));
}
}
+7 −7
Original line number Original line Diff line number Diff line
@@ -29,16 +29,16 @@
extern void (*interrupt[NR_IRQS])(void);
extern void (*interrupt[NR_IRQS])(void);


#ifdef CONFIG_SMP
#ifdef CONFIG_SMP
fastcall void reschedule_interrupt(void);
void reschedule_interrupt(void);
fastcall void invalidate_interrupt(void);
void invalidate_interrupt(void);
fastcall void call_function_interrupt(void);
void call_function_interrupt(void);
#endif
#endif


#ifdef CONFIG_X86_LOCAL_APIC
#ifdef CONFIG_X86_LOCAL_APIC
fastcall void apic_timer_interrupt(void);
void apic_timer_interrupt(void);
fastcall void error_interrupt(void);
void error_interrupt(void);
fastcall void spurious_interrupt(void);
void spurious_interrupt(void);
fastcall void thermal_interrupt(void);
void thermal_interrupt(void);
#define platform_legacy_irq(irq)	((irq) < 16)
#define platform_legacy_irq(irq)	((irq) < 16)
#endif
#endif


+3 −4
Original line number Original line Diff line number Diff line
@@ -26,7 +26,7 @@ do { \
	unsigned int dummy;						\
	unsigned int dummy;						\
									\
									\
	typecheck(atomic_t *, count);					\
	typecheck(atomic_t *, count);					\
	typecheck_fn(fastcall void (*)(atomic_t *), fail_fn);		\
	typecheck_fn(void (*)(atomic_t *), fail_fn);		\
									\
									\
	__asm__ __volatile__(						\
	__asm__ __volatile__(						\
		LOCK_PREFIX "   decl (%%eax)	\n"			\
		LOCK_PREFIX "   decl (%%eax)	\n"			\
@@ -51,8 +51,7 @@ do { \
 * or anything the slow path function returns
 * or anything the slow path function returns
 */
 */
static inline int
static inline int
__mutex_fastpath_lock_retval(atomic_t *count,
__mutex_fastpath_lock_retval(atomic_t *count, int (*fail_fn)(atomic_t *))
			     int fastcall (*fail_fn)(atomic_t *))
{
{
	if (unlikely(atomic_dec_return(count) < 0))
	if (unlikely(atomic_dec_return(count) < 0))
		return fail_fn(count);
		return fail_fn(count);
@@ -78,7 +77,7 @@ do { \
	unsigned int dummy;						\
	unsigned int dummy;						\
									\
									\
	typecheck(atomic_t *, count);					\
	typecheck(atomic_t *, count);					\
	typecheck_fn(fastcall void (*)(atomic_t *), fail_fn);		\
	typecheck_fn(void (*)(atomic_t *), fail_fn);		\
									\
									\
	__asm__ __volatile__(						\
	__asm__ __volatile__(						\
		LOCK_PREFIX "   incl (%%eax)	\n"			\
		LOCK_PREFIX "   incl (%%eax)	\n"			\
+4 −4
Original line number Original line Diff line number Diff line
@@ -83,10 +83,10 @@ static inline void init_MUTEX_LOCKED (struct semaphore *sem)
	sema_init(sem, 0);
	sema_init(sem, 0);
}
}


fastcall void __down_failed(void /* special register calling convention */);
void __down_failed(void /* special register calling convention */);
fastcall int  __down_failed_interruptible(void  /* params in registers */);
int  __down_failed_interruptible(void  /* params in registers */);
fastcall int  __down_failed_trylock(void  /* params in registers */);
int  __down_failed_trylock(void  /* params in registers */);
fastcall void __up_wakeup(void /* special register calling convention */);
void __up_wakeup(void /* special register calling convention */);


/*
/*
 * This is ugly, but we want the default case to fall through.
 * This is ugly, but we want the default case to fall through.