Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit d113fcd9 authored by David S. Miller's avatar David S. Miller Committed by David S. Miller
Browse files

[SPARC]: Merge asm-sparc{,64}/cache.h

parent f610bbc6
Loading
Loading
Loading
Loading
+4 −0
Original line number Diff line number Diff line
@@ -89,6 +89,10 @@ SECTIONS
	.data.cacheline_aligned : {
		*(.data.cacheline_aligned)
	}
	. = ALIGN(32);
	.data.read_mostly : {
		*(.data.read_mostly)
	}

	__bss_start = .;
	.sbss : {
+15 −6
Original line number Diff line number Diff line
/* $Id: cache.h,v 1.9 1999/08/14 03:51:58 anton Exp $
 * cache.h:  Cache specific code for the Sparc.  These include flushing
/* cache.h:  Cache specific code for the Sparc.  These include flushing
 *           and direct tag/data line access.
 *
 * Copyright (C) 1995 David S. Miller (davem@caip.rutgers.edu)
 * Copyright (C) 1995, 2007 David S. Miller (davem@davemloft.net)
 */

#ifndef _SPARC_CACHE_H
#define _SPARC_CACHE_H

#include <asm/asi.h>

#define L1_CACHE_SHIFT 5
#define L1_CACHE_BYTES 32
#define L1_CACHE_ALIGN(x) ((((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1)))

#define SMP_CACHE_BYTES 32
#ifdef CONFIG_SPARC32
#define SMP_CACHE_BYTES_SHIFT 5
#else
#define SMP_CACHE_BYTES_SHIFT 6
#endif

#define SMP_CACHE_BYTES (1 << SMP_CACHE_BYTES_SHIFT)

#define __read_mostly __attribute__((__section__(".data.read_mostly")))

#ifdef CONFIG_SPARC32
#include <asm/asi.h>

/* Direct access to the instruction cache is provided through and
 * alternate address space.  The IDC bit must be off in the ICCR on
@@ -125,5 +133,6 @@ static inline void flush_ei_user(unsigned int addr)
			     "r" (addr), "i" (ASI_M_FLUSH_USER) :
			     "memory");
}
#endif /* CONFIG_SPARC32 */

#endif /* !(_SPARC_CACHE_H) */
+1 −18
Original line number Diff line number Diff line
/*
 * include/asm-sparc64/cache.h
 */
#ifndef __ARCH_SPARC64_CACHE_H
#define __ARCH_SPARC64_CACHE_H

/* bytes per L1 cache line */
#define        L1_CACHE_SHIFT	5
#define        L1_CACHE_BYTES	32 /* Two 16-byte sub-blocks per line. */

#define        L1_CACHE_ALIGN(x)       (((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1))

#define        SMP_CACHE_BYTES_SHIFT	6
#define        SMP_CACHE_BYTES		(1 << SMP_CACHE_BYTES_SHIFT) /* L2 cache line size. */

#define __read_mostly __attribute__((__section__(".data.read_mostly")))

#endif
#include <asm-sparc/cache.h>