Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f2ab4461 authored by Zachary Amsden's avatar Zachary Amsden Committed by Linus Torvalds
Browse files

[PATCH] x86: more asm cleanups



Some more assembler cleanups I noticed along the way.

Signed-off-by: default avatarZachary Amsden <zach@vmware.com>
Cc: "H. Peter Anvin" <hpa@zytor.com>
Signed-off-by: default avatarAndrew Morton <akpm@osdl.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@osdl.org>
parent 4f0cb8d9
Loading
Loading
Loading
Loading
+3 −6
Original line number Diff line number Diff line
@@ -82,16 +82,13 @@ static void __devinit Intel_errata_workarounds(struct cpuinfo_x86 *c)
 */
static int __devinit num_cpu_cores(struct cpuinfo_x86 *c)
{
	unsigned int eax;
	unsigned int eax, ebx, ecx, edx;

	if (c->cpuid_level < 4)
		return 1;

	__asm__("cpuid"
		: "=a" (eax)
		: "0" (4), "c" (0)
		: "bx", "dx");

	/* Intel has a non-standard dependency on %ecx for this CPUID level. */
	cpuid_count(4, 0, &eax, &ebx, &ecx, &edx);
	if (eax & 0x1f)
		return ((eax >> 26) + 1);
	else
+1 −1
Original line number Diff line number Diff line
@@ -153,7 +153,7 @@ static int crash_nmi_callback(struct pt_regs *regs, int cpu)
	disable_local_APIC();
	atomic_dec(&waiting_for_crash_ipi);
	/* Assume hlt works */
	__asm__("hlt");
	halt();
	for(;;);

	return 1;
+2 −8
Original line number Diff line number Diff line
@@ -93,10 +93,7 @@ static void set_idt(void *newidt, __u16 limit)
	curidt.size    = limit;
	curidt.address = (unsigned long)newidt;

	__asm__ __volatile__ (
		"lidtl %0\n"
		: : "m" (curidt)
		);
	load_idt(&curidt);
};


@@ -108,10 +105,7 @@ static void set_gdt(void *newgdt, __u16 limit)
	curgdt.size    = limit;
	curgdt.address = (unsigned long)newgdt;

	__asm__ __volatile__ (
		"lgdtl %0\n"
		: : "m" (curgdt)
		);
	load_gdt(&curgdt);
};

static void load_segments(void)
+6 −25
Original line number Diff line number Diff line
@@ -46,23 +46,13 @@

static struct class *msr_class;

/* Note: "err" is handled in a funny way below.  Otherwise one version
   of gcc or another breaks. */

static inline int wrmsr_eio(u32 reg, u32 eax, u32 edx)
{
	int err;

	asm volatile ("1:	wrmsr\n"
		      "2:\n"
		      ".section .fixup,\"ax\"\n"
		      "3:	movl %4,%0\n"
		      "	jmp 2b\n"
		      ".previous\n"
		      ".section __ex_table,\"a\"\n"
		      "	.align 4\n" "	.long 1b,3b\n" ".previous":"=&bDS" (err)
		      :"a"(eax), "d"(edx), "c"(reg), "i"(-EIO), "0"(0));

	err = wrmsr_safe(reg, eax, edx);
	if (err)
		err = -EIO;
	return err;
}

@@ -70,18 +60,9 @@ static inline int rdmsr_eio(u32 reg, u32 *eax, u32 *edx)
{
	int err;

	asm volatile ("1:	rdmsr\n"
		      "2:\n"
		      ".section .fixup,\"ax\"\n"
		      "3:	movl %4,%0\n"
		      "	jmp 2b\n"
		      ".previous\n"
		      ".section __ex_table,\"a\"\n"
		      "	.align 4\n"
		      "	.long 1b,3b\n"
		      ".previous":"=&bDS" (err), "=a"(*eax), "=d"(*edx)
		      :"c"(reg), "i"(-EIO), "0"(0));

	err = rdmsr_safe(reg, eax, edx);
	if (err)
		err = -EIO;
	return err;
}

+1 −1
Original line number Diff line number Diff line
@@ -164,7 +164,7 @@ static inline void play_dead(void)
	 */
	local_irq_disable();
	while (1)
		__asm__ __volatile__("hlt":::"memory");
		halt();
}
#else
static inline void play_dead(void)
Loading