Switch to using relative exception table entries on x86. On i386, this has the advantage that the exception table entries don't need to be relocated; on x86-64 this means the exception table entries take up only half the space. In either case, a 32-bit delta is sufficient, as the range of kernel code addresses is limited. Since part of the goal is to avoid needing to adjust the entries when the kernel is relocated, the old trick of using addresses in the NULL pointer range to indicate uaccess_err no longer works (and unlike RISC architectures we can't use a flag bit); instead use an delta just below +2G to indicate these special entries. The reach is still limited to a single instruction. Signed-off-by: H. Peter Anvin <hpa@zytor.com> Cc: David Daney <david.daney@cavium.com> Link: http://lkml.kernel.org/r/CA%2B55aFyijf43qSu3N9nWHEBwaGbb7T2Oq9A=9EyR=Jtyqfq_cQ@mail.gmail.com
		
			
				
	
	
		
			72 lines
		
	
	
	
		
			1.8 KiB
			
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			72 lines
		
	
	
	
		
			1.8 KiB
			
		
	
	
	
		
			C
		
	
	
	
	
	
#ifndef _ASM_X86_ASM_H
 | 
						|
#define _ASM_X86_ASM_H
 | 
						|
 | 
						|
#ifdef __ASSEMBLY__
 | 
						|
# define __ASM_FORM(x)	x
 | 
						|
# define __ASM_FORM_COMMA(x) x,
 | 
						|
#else
 | 
						|
# define __ASM_FORM(x)	" " #x " "
 | 
						|
# define __ASM_FORM_COMMA(x) " " #x ","
 | 
						|
#endif
 | 
						|
 | 
						|
#ifdef CONFIG_X86_32
 | 
						|
# define __ASM_SEL(a,b) __ASM_FORM(a)
 | 
						|
#else
 | 
						|
# define __ASM_SEL(a,b) __ASM_FORM(b)
 | 
						|
#endif
 | 
						|
 | 
						|
#define __ASM_SIZE(inst, ...)	__ASM_SEL(inst##l##__VA_ARGS__, \
 | 
						|
					  inst##q##__VA_ARGS__)
 | 
						|
#define __ASM_REG(reg)		__ASM_SEL(e##reg, r##reg)
 | 
						|
 | 
						|
#define _ASM_PTR	__ASM_SEL(.long, .quad)
 | 
						|
#define _ASM_ALIGN	__ASM_SEL(.balign 4, .balign 8)
 | 
						|
 | 
						|
#define _ASM_MOV	__ASM_SIZE(mov)
 | 
						|
#define _ASM_INC	__ASM_SIZE(inc)
 | 
						|
#define _ASM_DEC	__ASM_SIZE(dec)
 | 
						|
#define _ASM_ADD	__ASM_SIZE(add)
 | 
						|
#define _ASM_SUB	__ASM_SIZE(sub)
 | 
						|
#define _ASM_XADD	__ASM_SIZE(xadd)
 | 
						|
 | 
						|
#define _ASM_AX		__ASM_REG(ax)
 | 
						|
#define _ASM_BX		__ASM_REG(bx)
 | 
						|
#define _ASM_CX		__ASM_REG(cx)
 | 
						|
#define _ASM_DX		__ASM_REG(dx)
 | 
						|
#define _ASM_SP		__ASM_REG(sp)
 | 
						|
#define _ASM_BP		__ASM_REG(bp)
 | 
						|
#define _ASM_SI		__ASM_REG(si)
 | 
						|
#define _ASM_DI		__ASM_REG(di)
 | 
						|
 | 
						|
/* Exception table entry */
 | 
						|
#ifdef __ASSEMBLY__
 | 
						|
# define _ASM_EXTABLE(from,to)					\
 | 
						|
	.pushsection "__ex_table","a" ;				\
 | 
						|
	.balign 8 ;						\
 | 
						|
	.long (from) - . ;					\
 | 
						|
	.long (to) - . ;					\
 | 
						|
	.popsection
 | 
						|
 | 
						|
# define _ASM_EXTABLE_EX(from,to)				\
 | 
						|
	.pushsection "__ex_table","a" ;				\
 | 
						|
	.balign 8 ;						\
 | 
						|
	.long (from) - . ;					\
 | 
						|
	.long (to) - . + 0x7ffffff0 ;				\
 | 
						|
	.popsection
 | 
						|
#else
 | 
						|
# define _ASM_EXTABLE(from,to)					\
 | 
						|
	" .pushsection \"__ex_table\",\"a\"\n"			\
 | 
						|
	" .balign 8\n"						\
 | 
						|
	" .long (" #from ") - .\n"				\
 | 
						|
	" .long (" #to ") - .\n"				\
 | 
						|
	" .popsection\n"
 | 
						|
 | 
						|
# define _ASM_EXTABLE_EX(from,to)				\
 | 
						|
	" .pushsection \"__ex_table\",\"a\"\n"			\
 | 
						|
	" .balign 8\n"						\
 | 
						|
	" .long (" #from ") - .\n"				\
 | 
						|
	" .long (" #to ") - . + 0x7ffffff0\n"			\
 | 
						|
	" .popsection\n"
 | 
						|
#endif
 | 
						|
 | 
						|
#endif /* _ASM_X86_ASM_H */
 |