Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e2fd1374 authored by Max Filippov's avatar Max Filippov
Browse files

xtensa: introduce spill_registers_kernel macro



Most in-kernel users want registers spilled on the kernel stack and
don't require PS.EXCM to be set. That means that they don't need fixup
routine and could reuse regular window overflow mechanism for that,
which makes spill routine very simple.

Cc: stable@vger.kernel.org
Suggested-by: default avatarChris Zankel <chris@zankel.net>
Signed-off-by: default avatarMax Filippov <jcmvbkbc@gmail.com>
parent 45ec8860
Loading
Loading
Loading
Loading
+28 −16
Original line number Diff line number Diff line
@@ -23,25 +23,37 @@ void secondary_trap_init(void);

static inline void spill_registers(void)
{

#if XCHAL_NUM_AREGS > 16
	__asm__ __volatile__ (
		"movi	a14, "__stringify((1 << PS_EXCM_BIT) | LOCKLEVEL)"\n\t"
		"mov	a12, a0\n\t"
		"rsr	a13, sar\n\t"
		"xsr	a14, ps\n\t"
		"movi	a0, _spill_registers\n\t"
		"rsync\n\t"
		"callx0 a0\n\t"
		"mov	a0, a12\n\t"
		"wsr	a13, sar\n\t"
		"wsr	a14, ps\n\t"
		: :
#if defined(CONFIG_FRAME_POINTER)
		: "a2", "a3", "a4",       "a11", "a12", "a13", "a14", "a15",
		"	call12	1f\n"
		"	_j	2f\n"
		"	retw\n"
		"	.align	4\n"
		"1:\n"
		"	_entry	a1, 48\n"
		"	addi	a12, a0, 3\n"
#if XCHAL_NUM_AREGS > 32
		"	.rept	(" __stringify(XCHAL_NUM_AREGS) " - 32) / 12\n"
		"	_entry	a1, 48\n"
		"	mov	a12, a0\n"
		"	.endr\n"
#endif
		"	_entry	a1, 48\n"
#if XCHAL_NUM_AREGS % 12 == 0
		"	mov	a8, a8\n"
#elif XCHAL_NUM_AREGS % 12 == 4
		"	mov	a12, a12\n"
#elif XCHAL_NUM_AREGS % 12 == 8
		"	mov	a4, a4\n"
#endif
		"	retw\n"
		"2:\n"
		: : : "a12", "a13", "memory");
#else
		: "a2", "a3", "a4", "a7", "a11", "a12", "a13", "a14", "a15",
	__asm__ __volatile__ (
		"	mov	a12, a12\n"
		: : : "memory");
#endif
		  "memory");
}

#endif /* _XTENSA_TRAPS_H */
+48 −12
Original line number Diff line number Diff line
@@ -1794,6 +1794,43 @@ ENTRY(system_call)

ENDPROC(system_call)

/*
 * Spill live registers on the kernel stack macro.
 *
 * Entry condition: ps.woe is set, ps.excm is cleared
 * Exit condition: windowstart has single bit set
 * May clobber: a12, a13
 */
	.macro	spill_registers_kernel

#if XCHAL_NUM_AREGS > 16
	call12	1f
	_j	2f
	retw
	.align	4
1:
	_entry	a1, 48
	addi	a12, a0, 3
#if XCHAL_NUM_AREGS > 32
	.rept	(XCHAL_NUM_AREGS - 32) / 12
	_entry	a1, 48
	mov	a12, a0
	.endr
#endif
	_entry	a1, 48
#if XCHAL_NUM_AREGS % 12 == 0
	mov	a8, a8
#elif XCHAL_NUM_AREGS % 12 == 4
	mov	a12, a12
#elif XCHAL_NUM_AREGS % 12 == 8
	mov	a4, a4
#endif
	retw
2:
#else
	mov	a12, a12
#endif
	.endm

/*
 * Task switch.
@@ -1806,21 +1843,20 @@ ENTRY(_switch_to)

	entry	a1, 16

	mov	a12, a2			# preserve 'prev' (a2)
	mov	a13, a3			# and 'next' (a3)
	mov	a10, a2			# preserve 'prev' (a2)
	mov	a11, a3			# and 'next' (a3)

	l32i	a4, a2, TASK_THREAD_INFO
	l32i	a5, a3, TASK_THREAD_INFO

	save_xtregs_user a4 a6 a8 a9 a10 a11 THREAD_XTREGS_USER
	save_xtregs_user a4 a6 a8 a9 a12 a13 THREAD_XTREGS_USER

	s32i	a0, a12, THREAD_RA	# save return address
	s32i	a1, a12, THREAD_SP	# save stack pointer
	s32i	a0, a10, THREAD_RA	# save return address
	s32i	a1, a10, THREAD_SP	# save stack pointer

	/* Disable ints while we manipulate the stack pointer. */

	movi	a14, (1 << PS_EXCM_BIT) | LOCKLEVEL
	xsr	a14, ps
	rsil	a14, LOCKLEVEL
	rsr	a3, excsave1
	rsync
	s32i	a3, a3, EXC_TABLE_FIXUP	/* enter critical section */
@@ -1835,7 +1871,7 @@ ENTRY(_switch_to)

	/* Flush register file. */

	call0	_spill_registers	# destroys a3, a4, and SAR
	spill_registers_kernel

	/* Set kernel stack (and leave critical section)
	 * Note: It's save to set it here. The stack will not be overwritten
@@ -1851,13 +1887,13 @@ ENTRY(_switch_to)

	/* restore context of the task 'next' */

	l32i	a0, a13, THREAD_RA	# restore return address
	l32i	a1, a13, THREAD_SP	# restore stack pointer
	l32i	a0, a11, THREAD_RA	# restore return address
	l32i	a1, a11, THREAD_SP	# restore stack pointer

	load_xtregs_user a5 a6 a8 a9 a10 a11 THREAD_XTREGS_USER
	load_xtregs_user a5 a6 a8 a9 a12 a13 THREAD_XTREGS_USER

	wsr	a14, ps
	mov	a2, a12			# return 'prev'
	mov	a2, a10			# return 'prev'
	rsync

	retw