Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 1f7afb08 authored by Thomas Gleixner's avatar Thomas Gleixner Committed by Thomas Gleixner
Browse files

x86: unify include/asm/cache_32/64.h



Same file, except for whitespace, comment formatting and:

32-bit:	unsigned long *virt_addr = va;
64-bit: unsigned int *virt_addr = va;

Both can be safely replaced by:
	u32 i, *virt_addr = va;

Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
parent 327c21bc
Loading
Loading
Loading
Loading
+17 −4
Original line number Diff line number Diff line
#ifdef CONFIG_X86_32
# include "edac_32.h"
#else
# include "edac_64.h"
#ifndef _ASM_X86_EDAC_H
#define _ASM_X86_EDAC_H

/* ECC atomic, DMA, SMP and interrupt safe scrub function */

static __inline__ void atomic_scrub(void *va, u32 size)
{
	u32 i, *virt_addr = va;

	/*
	 * Very carefully read and write to memory atomically so we
	 * are interrupt, DMA and SMP safe.
	 */
	for (i = 0; i < size / 4; i++, virt_addr++)
		__asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
}

#endif

include/asm-x86/edac_32.h

deleted100644 → 0
+0 −18
Original line number Diff line number Diff line
#ifndef ASM_EDAC_H
#define ASM_EDAC_H

/* ECC atomic, DMA, SMP and interrupt safe scrub function */

static __inline__ void atomic_scrub(void *va, u32 size)
{
	unsigned long *virt_addr = va;
	u32 i;

	for (i = 0; i < size / 4; i++, virt_addr++)
		/* Very carefully read and write to memory atomically
		 * so we are interrupt, DMA and SMP safe.
		 */
		__asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
}

#endif

include/asm-x86/edac_64.h

deleted100644 → 0
+0 −18
Original line number Diff line number Diff line
#ifndef ASM_EDAC_H
#define ASM_EDAC_H

/* ECC atomic, DMA, SMP and interrupt safe scrub function */

static __inline__ void atomic_scrub(void *va, u32 size)
{
	unsigned int *virt_addr = va;
	u32 i;

	for (i = 0; i < size / 4; i++, virt_addr++)
		/* Very carefully read and write to memory atomically
		 * so we are interrupt, DMA and SMP safe.
		 */
		__asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
}

#endif