Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 12f2bbd6 authored by Linus Torvalds's avatar Linus Torvalds
Browse files

Merge branch 'x86-asmlinkage-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull x86 asmlinkage (LTO) changes from Peter Anvin:
 "This patchset adds more infrastructure for link time optimization
  (LTO).

  This patchset was pulled into my tree late because of a
  miscommunication (part of the patchset was picked up by other
  maintainers).  However, the patchset is strictly build-related and
  seems to be okay in testing"

* 'x86-asmlinkage-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  x86, asmlinkage, xen: Fix type of NMI
  x86, asmlinkage, xen, kvm: Make {xen,kvm}_lock_spinning global and visible
  x86: Use inline assembler instead of global register variable to get sp
  x86, asmlinkage, paravirt: Make paravirt thunks global
  x86, asmlinkage, paravirt: Don't rely on local assembler labels
  x86, asmlinkage, lguest: Fix C functions used by inline assembler
parents 10ffe3db 07ba06d9
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -781,9 +781,9 @@ static __always_inline void __ticket_unlock_kick(struct arch_spinlock *lock,
 */
#define PV_CALLEE_SAVE_REGS_THUNK(func)					\
	extern typeof(func) __raw_callee_save_##func;			\
	static void *__##func##__ __used = func;			\
									\
	asm(".pushsection .text;"					\
	    ".globl __raw_callee_save_" #func " ; "			\
	    "__raw_callee_save_" #func ": "				\
	    PV_SAVE_ALL_CALLER_REGS					\
	    "call " #func ";"						\
+5 −4
Original line number Diff line number Diff line
@@ -388,10 +388,11 @@ extern struct pv_lock_ops pv_lock_ops;
	_paravirt_alt(insn_string, "%c[paravirt_typenum]", "%c[paravirt_clobber]")

/* Simple instruction patching code. */
#define NATIVE_LABEL(a,x,b) "\n\t.globl " a #x "_" #b "\n" a #x "_" #b ":\n\t"

#define DEF_NATIVE(ops, name, code)					\
	extern const char start_##ops##_##name[] __visible,		\
			  end_##ops##_##name[] __visible;		\
	asm("start_" #ops "_" #name ": " code "; end_" #ops "_" #name ":")
	__visible extern const char start_##ops##_##name[], end_##ops##_##name[];	\
	asm(NATIVE_LABEL("start_", ops, name) code NATIVE_LABEL("end_", ops, name))

unsigned paravirt_patch_nop(void);
unsigned paravirt_patch_ident_32(void *insnbuf, unsigned len);
+5 −3
Original line number Diff line number Diff line
@@ -163,9 +163,11 @@ struct thread_info {
 */
#ifndef __ASSEMBLY__


/* how to get the current stack pointer from C */
register unsigned long current_stack_pointer asm("esp") __used;
#define current_stack_pointer ({		\
	unsigned long sp;			\
	asm("mov %%esp,%0" : "=g" (sp));	\
	sp;					\
})

/* how to get the thread information struct from C */
static inline struct thread_info *current_thread_info(void)
+1 −1
Original line number Diff line number Diff line
@@ -673,7 +673,7 @@ static cpumask_t waiting_cpus;
/* Track spinlock on which a cpu is waiting */
static DEFINE_PER_CPU(struct kvm_lock_waiting, klock_waiting);

static void kvm_lock_spinning(struct arch_spinlock *lock, __ticket_t want)
__visible void kvm_lock_spinning(struct arch_spinlock *lock, __ticket_t want)
{
	struct kvm_lock_waiting *w;
	int cpu;
+4 −4
Original line number Diff line number Diff line
@@ -33,7 +33,7 @@
 * and vice versa.
 */

static unsigned long vsmp_save_fl(void)
asmlinkage unsigned long vsmp_save_fl(void)
{
	unsigned long flags = native_save_fl();

@@ -43,7 +43,7 @@ static unsigned long vsmp_save_fl(void)
}
PV_CALLEE_SAVE_REGS_THUNK(vsmp_save_fl);

static void vsmp_restore_fl(unsigned long flags)
__visible void vsmp_restore_fl(unsigned long flags)
{
	if (flags & X86_EFLAGS_IF)
		flags &= ~X86_EFLAGS_AC;
@@ -53,7 +53,7 @@ static void vsmp_restore_fl(unsigned long flags)
}
PV_CALLEE_SAVE_REGS_THUNK(vsmp_restore_fl);

static void vsmp_irq_disable(void)
asmlinkage void vsmp_irq_disable(void)
{
	unsigned long flags = native_save_fl();

@@ -61,7 +61,7 @@ static void vsmp_irq_disable(void)
}
PV_CALLEE_SAVE_REGS_THUNK(vsmp_irq_disable);

static void vsmp_irq_enable(void)
asmlinkage void vsmp_irq_enable(void)
{
	unsigned long flags = native_save_fl();

Loading