| 
									
										
										
										
											2012-03-28 18:11:12 +01:00
										 |  |  | #ifndef _ASM_X86_SPECIAL_INSNS_H
 | 
					
						
							|  |  |  | #define _ASM_X86_SPECIAL_INSNS_H
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #ifdef __KERNEL__
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void native_clts(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	asm volatile("clts"); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /*
 | 
					
						
							|  |  |  |  * Volatile isn't enough to prevent the compiler from reordering the | 
					
						
							|  |  |  |  * read/write functions for the control registers and messing everything up. | 
					
						
							|  |  |  |  * A memory clobber would solve the problem, but would prevent reordering of | 
					
						
							|  |  |  |  * all loads stores around it, which can hurt performance. Solution is to | 
					
						
							|  |  |  |  * use a variable and mimic reads and writes to it to enforce serialization | 
					
						
							|  |  |  |  */ | 
					
						
							| 
									
										
										
										
											2013-05-29 13:29:12 +01:00
										 |  |  | extern unsigned long __force_order; | 
					
						
							| 
									
										
										
										
											2012-03-28 18:11:12 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | static inline unsigned long native_read_cr0(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	unsigned long val; | 
					
						
							|  |  |  | 	asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order)); | 
					
						
							|  |  |  | 	return val; | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void native_write_cr0(unsigned long val) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order)); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline unsigned long native_read_cr2(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	unsigned long val; | 
					
						
							|  |  |  | 	asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order)); | 
					
						
							|  |  |  | 	return val; | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void native_write_cr2(unsigned long val) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order)); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline unsigned long native_read_cr3(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	unsigned long val; | 
					
						
							|  |  |  | 	asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order)); | 
					
						
							|  |  |  | 	return val; | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void native_write_cr3(unsigned long val) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order)); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline unsigned long native_read_cr4(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	unsigned long val; | 
					
						
							|  |  |  | 	asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order)); | 
					
						
							|  |  |  | 	return val; | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline unsigned long native_read_cr4_safe(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	unsigned long val; | 
					
						
							|  |  |  | 	/* This could fault if %cr4 does not exist. In x86_64, a cr4 always
 | 
					
						
							|  |  |  | 	 * exists, so it will never fail. */ | 
					
						
							|  |  |  | #ifdef CONFIG_X86_32
 | 
					
						
							|  |  |  | 	asm volatile("1: mov %%cr4, %0\n" | 
					
						
							|  |  |  | 		     "2:\n" | 
					
						
							|  |  |  | 		     _ASM_EXTABLE(1b, 2b) | 
					
						
							|  |  |  | 		     : "=r" (val), "=m" (__force_order) : "0" (0)); | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | 	val = native_read_cr4(); | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 	return val; | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void native_write_cr4(unsigned long val) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order)); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #ifdef CONFIG_X86_64
 | 
					
						
							|  |  |  | static inline unsigned long native_read_cr8(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	unsigned long cr8; | 
					
						
							|  |  |  | 	asm volatile("movq %%cr8,%0" : "=r" (cr8)); | 
					
						
							|  |  |  | 	return cr8; | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void native_write_cr8(unsigned long val) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	asm volatile("movq %0,%%cr8" :: "r" (val) : "memory"); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void native_wbinvd(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	asm volatile("wbinvd": : :"memory"); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-08-05 15:02:43 -07:00
										 |  |  | extern asmlinkage void native_load_gs_index(unsigned); | 
					
						
							| 
									
										
										
										
											2012-03-28 18:11:12 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | #ifdef CONFIG_PARAVIRT
 | 
					
						
							|  |  |  | #include <asm/paravirt.h>
 | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline unsigned long read_cr0(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	return native_read_cr0(); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void write_cr0(unsigned long x) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	native_write_cr0(x); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline unsigned long read_cr2(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	return native_read_cr2(); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void write_cr2(unsigned long x) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	native_write_cr2(x); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline unsigned long read_cr3(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	return native_read_cr3(); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void write_cr3(unsigned long x) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	native_write_cr3(x); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-10-24 15:58:08 -07:00
										 |  |  | static inline unsigned long __read_cr4(void) | 
					
						
							| 
									
										
										
										
											2012-03-28 18:11:12 +01:00
										 |  |  | { | 
					
						
							|  |  |  | 	return native_read_cr4(); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-10-24 15:58:08 -07:00
										 |  |  | static inline unsigned long __read_cr4_safe(void) | 
					
						
							| 
									
										
										
										
											2012-03-28 18:11:12 +01:00
										 |  |  | { | 
					
						
							|  |  |  | 	return native_read_cr4_safe(); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-10-24 15:58:08 -07:00
										 |  |  | static inline void __write_cr4(unsigned long x) | 
					
						
							| 
									
										
										
										
											2012-03-28 18:11:12 +01:00
										 |  |  | { | 
					
						
							|  |  |  | 	native_write_cr4(x); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void wbinvd(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	native_wbinvd(); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #ifdef CONFIG_X86_64
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline unsigned long read_cr8(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	return native_read_cr8(); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void write_cr8(unsigned long x) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	native_write_cr8(x); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void load_gs_index(unsigned selector) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	native_load_gs_index(selector); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /* Clear the 'TS' bit */ | 
					
						
							|  |  |  | static inline void clts(void) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	native_clts(); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #endif/* CONFIG_PARAVIRT */
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #define stts() write_cr0(read_cr0() | X86_CR0_TS)
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | static inline void clflush(volatile void *__p) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p)); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-02-26 12:06:49 -07:00
										 |  |  | static inline void clflushopt(volatile void *__p) | 
					
						
							|  |  |  | { | 
					
						
							|  |  |  | 	alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0", | 
					
						
							|  |  |  | 		       ".byte 0x66; clflush %P0", | 
					
						
							|  |  |  | 		       X86_FEATURE_CLFLUSHOPT, | 
					
						
							|  |  |  | 		       "+m" (*(volatile char __force *)__p)); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2012-03-28 18:11:12 +01:00
										 |  |  | #define nop() asm volatile ("nop")
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #endif /* __KERNEL__ */
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #endif /* _ASM_X86_SPECIAL_INSNS_H */
 |