Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 535c0c34 authored by H. Peter Anvin's avatar H. Peter Anvin
Browse files

x86, extable: Add _ASM_EXTABLE_EX() macro



Add _ASM_EXTABLE_EX() to generate the special extable entries that are
associated with uaccess_err.  This allows us to change the protocol
associated with these special entries.

Signed-off-by: default avatarH. Peter Anvin <hpa@zytor.com>
Cc: David Daney <david.daney@cavium.com>
Link: http://lkml.kernel.org/r/CA%2B55aFyijf43qSu3N9nWHEBwaGbb7T2Oq9A=9EyR=Jtyqfq_cQ@mail.gmail.com
parent a3e859fe
Loading
Loading
Loading
Loading
+20 −8
Original line number Original line Diff line number Diff line
@@ -45,12 +45,24 @@
	_ASM_ALIGN ;						\
	_ASM_ALIGN ;						\
	_ASM_PTR from , to ;					\
	_ASM_PTR from , to ;					\
	.popsection
	.popsection

# define _ASM_EXTABLE_EX(from,to)				\
	.pushsection "__ex_table","a" ;				\
	_ASM_ALIGN ;						\
	_ASM_PTR from , (to) - (from) ;				\
	.popsection
#else
#else
# define _ASM_EXTABLE(from,to)					\
# define _ASM_EXTABLE(from,to)					\
	" .pushsection \"__ex_table\",\"a\"\n"			\
	" .pushsection \"__ex_table\",\"a\"\n"			\
	_ASM_ALIGN "\n" 					\
	_ASM_ALIGN "\n" 					\
	_ASM_PTR #from "," #to "\n" 				\
	_ASM_PTR #from "," #to "\n" 				\
	" .popsection\n"
	" .popsection\n"

# define _ASM_EXTABLE_EX(from,to)				\
	" .pushsection \"__ex_table\",\"a\"\n"			\
	_ASM_ALIGN "\n" 					\
	_ASM_PTR #from ",(" #to ")-(" #from ")\n" 		\
	" .popsection\n"
#endif
#endif


#endif /* _ASM_X86_ASM_H */
#endif /* _ASM_X86_ASM_H */
+4 −4
Original line number Original line Diff line number Diff line
@@ -202,8 +202,8 @@ extern int __get_user_bad(void);
	asm volatile("1:	movl %%eax,0(%1)\n"			\
	asm volatile("1:	movl %%eax,0(%1)\n"			\
		     "2:	movl %%edx,4(%1)\n"			\
		     "2:	movl %%edx,4(%1)\n"			\
		     "3:\n"						\
		     "3:\n"						\
		     _ASM_EXTABLE(1b, 2b - 1b)				\
		     _ASM_EXTABLE_EX(1b, 2b)				\
		     _ASM_EXTABLE(2b, 3b - 2b)				\
		     _ASM_EXTABLE_EX(2b, 3b)				\
		     : : "A" (x), "r" (addr))
		     : : "A" (x), "r" (addr))


#define __put_user_x8(x, ptr, __ret_pu)				\
#define __put_user_x8(x, ptr, __ret_pu)				\
@@ -408,7 +408,7 @@ do { \
#define __get_user_asm_ex(x, addr, itype, rtype, ltype)			\
#define __get_user_asm_ex(x, addr, itype, rtype, ltype)			\
	asm volatile("1:	mov"itype" %1,%"rtype"0\n"		\
	asm volatile("1:	mov"itype" %1,%"rtype"0\n"		\
		     "2:\n"						\
		     "2:\n"						\
		     _ASM_EXTABLE(1b, 2b - 1b)				\
		     _ASM_EXTABLE_EX(1b, 2b)				\
		     : ltype(x) : "m" (__m(addr)))
		     : ltype(x) : "m" (__m(addr)))


#define __put_user_nocheck(x, ptr, size)			\
#define __put_user_nocheck(x, ptr, size)			\
@@ -450,7 +450,7 @@ struct __large_struct { unsigned long buf[100]; };
#define __put_user_asm_ex(x, addr, itype, rtype, ltype)			\
#define __put_user_asm_ex(x, addr, itype, rtype, ltype)			\
	asm volatile("1:	mov"itype" %"rtype"0,%1\n"		\
	asm volatile("1:	mov"itype" %"rtype"0,%1\n"		\
		     "2:\n"						\
		     "2:\n"						\
		     _ASM_EXTABLE(1b, 2b - 1b)				\
		     _ASM_EXTABLE_EX(1b, 2b)				\
		     : : ltype(x), "m" (__m(addr)))
		     : : ltype(x), "m" (__m(addr)))


/*
/*