Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 79a69d34 authored by Linus Torvalds's avatar Linus Torvalds
Browse files
Pull arm64 patches from Catalin Marinas:

 - SMP support for the PSCI booting protocol (power state coordination
   interface).

 - Simple earlyprintk support.

 - Platform devices populated by default from the DT (SoC-agnostic).

 - CONTEXTIDR support (used by external trace tools).

* tag 'arm64-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/cmarinas/linux-aarch64:
  arm64: mm: update CONTEXTIDR register to contain PID of current process
  arm64: atomics: fix grossly inconsistent asm constraints for exclusives
  arm64: compat: use compat_uptr_t type for compat_ucontext.uc_link
  arm64: Select ARCH_WANT_FRAME_POINTERS
  arm64: Add kvm_para.h and xor.h generic headers
  arm64: SMP: enable PSCI boot method
  arm64: psci: add support for PSCI invocations from the kernel
  arm64: SMP: rework the SMP code to be enabling method agnostic
  arm64: perf: add guest vs host discrimination
  arm64: add COMPAT_PSR_*_BIT flags
  arm64: Add simple earlyprintk support
  arm64: Populate the platform devices
parents 6db167df ec45d1cf
Loading
Loading
Loading
Loading
+2 −0
Original line number Diff line number Diff line
@@ -35,6 +35,8 @@ ffffffbc00000000 ffffffbdffffffff 8GB vmemmap

ffffffbe00000000	ffffffbffbbfffff	  ~8GB		[guard, future vmmemap]

ffffffbffbc00000	ffffffbffbdfffff	   2MB		earlyprintk device

ffffffbffbe00000	ffffffbffbe0ffff	  64KB		PCI I/O space

ffffffbbffff0000	ffffffbcffffffff	  ~2MB		[guard]
+1 −0
Original line number Diff line number Diff line
@@ -2,6 +2,7 @@ config ARM64
	def_bool y
	select ARCH_HAS_ATOMIC64_DEC_IF_POSITIVE
	select ARCH_WANT_COMPAT_IPC_PARSE_VERSION
	select ARCH_WANT_FRAME_POINTERS
	select ARM_AMBA
	select CLONE_BACKWARDS
	select COMMON_CLK
+17 −0
Original line number Diff line number Diff line
@@ -24,4 +24,21 @@ config DEBUG_STACK_USAGE
	  Enables the display of the minimum amount of free stack which each
	  task has ever had available in the sysrq-T output.

config EARLY_PRINTK
	bool "Early printk support"
	default y
	help
	  Say Y here if you want to have an early console using the
	  earlyprintk=<name>[,<addr>][,<options>] kernel parameter. It
	  is assumed that the early console device has been initialised
	  by the boot loader prior to starting the Linux kernel.

config PID_IN_CONTEXTIDR
	bool "Write the current PID to the CONTEXTIDR register"
	help
	  Enabling this option causes the kernel to write the current PID to
	  the CONTEXTIDR register, at the expense of some additional
	  instructions during context switch. Say Y here only if you are
	  planning to use hardware trace tools with this kernel.

endmenu
+2 −0
Original line number Diff line number Diff line
@@ -19,6 +19,7 @@ generic-y += ipcbuf.h
generic-y += irq_regs.h
generic-y += kdebug.h
generic-y += kmap_types.h
generic-y += kvm_para.h
generic-y += local.h
generic-y += local64.h
generic-y += mman.h
@@ -48,3 +49,4 @@ generic-y += trace_clock.h
generic-y += types.h
generic-y += unaligned.h
generic-y += user.h
generic-y += xor.h
+66 −66
Original line number Diff line number Diff line
@@ -49,12 +49,12 @@ static inline void atomic_add(int i, atomic_t *v)
	int result;

	asm volatile("// atomic_add\n"
"1:	ldxr	%w0, [%3]\n"
"	add	%w0, %w0, %w4\n"
"	stxr	%w1, %w0, [%3]\n"
"1:	ldxr	%w0, %2\n"
"	add	%w0, %w0, %w3\n"
"	stxr	%w1, %w0, %2\n"
"	cbnz	%w1, 1b"
	: "=&r" (result), "=&r" (tmp), "+o" (v->counter)
	: "r" (&v->counter), "Ir" (i)
	: "=&r" (result), "=&r" (tmp), "+Q" (v->counter)
	: "Ir" (i)
	: "cc");
}

@@ -64,13 +64,13 @@ static inline int atomic_add_return(int i, atomic_t *v)
	int result;

	asm volatile("// atomic_add_return\n"
"1:	ldaxr	%w0, [%3]\n"
"	add	%w0, %w0, %w4\n"
"	stlxr	%w1, %w0, [%3]\n"
"1:	ldaxr	%w0, %2\n"
"	add	%w0, %w0, %w3\n"
"	stlxr	%w1, %w0, %2\n"
"	cbnz	%w1, 1b"
	: "=&r" (result), "=&r" (tmp), "+o" (v->counter)
	: "r" (&v->counter), "Ir" (i)
	: "cc");
	: "=&r" (result), "=&r" (tmp), "+Q" (v->counter)
	: "Ir" (i)
	: "cc", "memory");

	return result;
}
@@ -81,12 +81,12 @@ static inline void atomic_sub(int i, atomic_t *v)
	int result;

	asm volatile("// atomic_sub\n"
"1:	ldxr	%w0, [%3]\n"
"	sub	%w0, %w0, %w4\n"
"	stxr	%w1, %w0, [%3]\n"
"1:	ldxr	%w0, %2\n"
"	sub	%w0, %w0, %w3\n"
"	stxr	%w1, %w0, %2\n"
"	cbnz	%w1, 1b"
	: "=&r" (result), "=&r" (tmp), "+o" (v->counter)
	: "r" (&v->counter), "Ir" (i)
	: "=&r" (result), "=&r" (tmp), "+Q" (v->counter)
	: "Ir" (i)
	: "cc");
}

@@ -96,13 +96,13 @@ static inline int atomic_sub_return(int i, atomic_t *v)
	int result;

	asm volatile("// atomic_sub_return\n"
"1:	ldaxr	%w0, [%3]\n"
"	sub	%w0, %w0, %w4\n"
"	stlxr	%w1, %w0, [%3]\n"
"1:	ldaxr	%w0, %2\n"
"	sub	%w0, %w0, %w3\n"
"	stlxr	%w1, %w0, %2\n"
"	cbnz	%w1, 1b"
	: "=&r" (result), "=&r" (tmp), "+o" (v->counter)
	: "r" (&v->counter), "Ir" (i)
	: "cc");
	: "=&r" (result), "=&r" (tmp), "+Q" (v->counter)
	: "Ir" (i)
	: "cc", "memory");

	return result;
}
@@ -113,15 +113,15 @@ static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new)
	int oldval;

	asm volatile("// atomic_cmpxchg\n"
"1:	ldaxr	%w1, [%3]\n"
"	cmp	%w1, %w4\n"
"1:	ldaxr	%w1, %2\n"
"	cmp	%w1, %w3\n"
"	b.ne	2f\n"
"	stlxr	%w0, %w5, [%3]\n"
"	stlxr	%w0, %w4, %2\n"
"	cbnz	%w0, 1b\n"
"2:"
	: "=&r" (tmp), "=&r" (oldval), "+o" (ptr->counter)
	: "r" (&ptr->counter), "Ir" (old), "r" (new)
	: "cc");
	: "=&r" (tmp), "=&r" (oldval), "+Q" (ptr->counter)
	: "Ir" (old), "r" (new)
	: "cc", "memory");

	return oldval;
}
@@ -131,12 +131,12 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
	unsigned long tmp, tmp2;

	asm volatile("// atomic_clear_mask\n"
"1:	ldxr	%0, [%3]\n"
"	bic	%0, %0, %4\n"
"	stxr	%w1, %0, [%3]\n"
"1:	ldxr	%0, %2\n"
"	bic	%0, %0, %3\n"
"	stxr	%w1, %0, %2\n"
"	cbnz	%w1, 1b"
	: "=&r" (tmp), "=&r" (tmp2), "+o" (*addr)
	: "r" (addr), "Ir" (mask)
	: "=&r" (tmp), "=&r" (tmp2), "+Q" (*addr)
	: "Ir" (mask)
	: "cc");
}

@@ -182,12 +182,12 @@ static inline void atomic64_add(u64 i, atomic64_t *v)
	unsigned long tmp;

	asm volatile("// atomic64_add\n"
"1:	ldxr	%0, [%3]\n"
"	add	%0, %0, %4\n"
"	stxr	%w1, %0, [%3]\n"
"1:	ldxr	%0, %2\n"
"	add	%0, %0, %3\n"
"	stxr	%w1, %0, %2\n"
"	cbnz	%w1, 1b"
	: "=&r" (result), "=&r" (tmp), "+o" (v->counter)
	: "r" (&v->counter), "Ir" (i)
	: "=&r" (result), "=&r" (tmp), "+Q" (v->counter)
	: "Ir" (i)
	: "cc");
}

@@ -197,13 +197,13 @@ static inline long atomic64_add_return(long i, atomic64_t *v)
	unsigned long tmp;

	asm volatile("// atomic64_add_return\n"
"1:	ldaxr	%0, [%3]\n"
"	add	%0, %0, %4\n"
"	stlxr	%w1, %0, [%3]\n"
"1:	ldaxr	%0, %2\n"
"	add	%0, %0, %3\n"
"	stlxr	%w1, %0, %2\n"
"	cbnz	%w1, 1b"
	: "=&r" (result), "=&r" (tmp), "+o" (v->counter)
	: "r" (&v->counter), "Ir" (i)
	: "cc");
	: "=&r" (result), "=&r" (tmp), "+Q" (v->counter)
	: "Ir" (i)
	: "cc", "memory");

	return result;
}
@@ -214,12 +214,12 @@ static inline void atomic64_sub(u64 i, atomic64_t *v)
	unsigned long tmp;

	asm volatile("// atomic64_sub\n"
"1:	ldxr	%0, [%3]\n"
"	sub	%0, %0, %4\n"
"	stxr	%w1, %0, [%3]\n"
"1:	ldxr	%0, %2\n"
"	sub	%0, %0, %3\n"
"	stxr	%w1, %0, %2\n"
"	cbnz	%w1, 1b"
	: "=&r" (result), "=&r" (tmp), "+o" (v->counter)
	: "r" (&v->counter), "Ir" (i)
	: "=&r" (result), "=&r" (tmp), "+Q" (v->counter)
	: "Ir" (i)
	: "cc");
}

@@ -229,13 +229,13 @@ static inline long atomic64_sub_return(long i, atomic64_t *v)
	unsigned long tmp;

	asm volatile("// atomic64_sub_return\n"
"1:	ldaxr	%0, [%3]\n"
"	sub	%0, %0, %4\n"
"	stlxr	%w1, %0, [%3]\n"
"1:	ldaxr	%0, %2\n"
"	sub	%0, %0, %3\n"
"	stlxr	%w1, %0, %2\n"
"	cbnz	%w1, 1b"
	: "=&r" (result), "=&r" (tmp), "+o" (v->counter)
	: "r" (&v->counter), "Ir" (i)
	: "cc");
	: "=&r" (result), "=&r" (tmp), "+Q" (v->counter)
	: "Ir" (i)
	: "cc", "memory");

	return result;
}
@@ -246,15 +246,15 @@ static inline long atomic64_cmpxchg(atomic64_t *ptr, long old, long new)
	unsigned long res;

	asm volatile("// atomic64_cmpxchg\n"
"1:	ldaxr	%1, [%3]\n"
"	cmp	%1, %4\n"
"1:	ldaxr	%1, %2\n"
"	cmp	%1, %3\n"
"	b.ne	2f\n"
"	stlxr	%w0, %5, [%3]\n"
"	stlxr	%w0, %4, %2\n"
"	cbnz	%w0, 1b\n"
"2:"
	: "=&r" (res), "=&r" (oldval), "+o" (ptr->counter)
	: "r" (&ptr->counter), "Ir" (old), "r" (new)
	: "cc");
	: "=&r" (res), "=&r" (oldval), "+Q" (ptr->counter)
	: "Ir" (old), "r" (new)
	: "cc", "memory");

	return oldval;
}
@@ -267,15 +267,15 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
	unsigned long tmp;

	asm volatile("// atomic64_dec_if_positive\n"
"1:	ldaxr	%0, [%3]\n"
"1:	ldaxr	%0, %2\n"
"	subs	%0, %0, #1\n"
"	b.mi	2f\n"
"	stlxr	%w1, %0, [%3]\n"
"	stlxr	%w1, %0, %2\n"
"	cbnz	%w1, 1b\n"
"2:"
	: "=&r" (result), "=&r" (tmp), "+o" (v->counter)
	: "r" (&v->counter)
	: "cc");
	: "=&r" (result), "=&r" (tmp), "+Q" (v->counter)
	:
	: "cc", "memory");

	return result;
}
Loading