Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 87e29cac authored by Paul Mundt's avatar Paul Mundt
Browse files

sh: Use L1_CACHE_BYTES for .data.cacheline_aligned.



Previously this was using a hardcoded 32, use L1_CACHE_BYTES for
cacheline alignment instead.

Signed-off-by: default avatarPaul Mundt <lethal@linux-sh.org>
parent 5c36e657
Loading
Loading
Loading
Loading
+2 −1
Original line number Diff line number Diff line
@@ -3,6 +3,7 @@
 * Written by Niibe Yutaka
 */
#include <asm/thread_info.h>
#include <asm/cache.h>
#include <asm-generic/vmlinux.lds.h>

#ifdef CONFIG_CPU_LITTLE_ENDIAN
@@ -53,7 +54,7 @@ SECTIONS
  . = ALIGN(PAGE_SIZE);
  .data.page_aligned : { *(.data.page_aligned) }

  . = ALIGN(32);
  . = ALIGN(L1_CACHE_BYTES);
  __per_cpu_start = .;
  .data.percpu : { *(.data.percpu) }
  __per_cpu_end = .;
+2 −1
Original line number Diff line number Diff line
@@ -21,6 +21,7 @@

#define L1_CACHE_ALIGN(x)	(((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1))

#ifndef __ASSEMBLY__
struct cache_info {
	unsigned int ways;		/* Number of cache ways */
	unsigned int sets;		/* Number of cache sets */
@@ -47,6 +48,6 @@ struct cache_info {

	unsigned long flags;
};

#endif /* __ASSEMBLY__ */
#endif /* __KERNEL__ */
#endif /* __ASM_SH_CACHE_H */