1 | /* SPDX-License-Identifier: GPL-2.0 */ |
2 | #ifndef _ASM_X86_SPECIAL_INSNS_H |
3 | #define _ASM_X86_SPECIAL_INSNS_H |
4 | |
5 | |
6 | #ifdef __KERNEL__ |
7 | |
8 | #include <asm/nops.h> |
9 | |
10 | /* |
11 | * Volatile isn't enough to prevent the compiler from reordering the |
12 | * read/write functions for the control registers and messing everything up. |
13 | * A memory clobber would solve the problem, but would prevent reordering of |
14 | * all loads stores around it, which can hurt performance. Solution is to |
15 | * use a variable and mimic reads and writes to it to enforce serialization |
16 | */ |
17 | extern unsigned long __force_order; |
18 | |
19 | static inline unsigned long native_read_cr0(void) |
20 | { |
21 | unsigned long val; |
22 | asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order)); |
23 | return val; |
24 | } |
25 | |
26 | static inline void native_write_cr0(unsigned long val) |
27 | { |
28 | asm volatile("mov %0,%%cr0" : : "r" (val), "m" (__force_order)); |
29 | } |
30 | |
31 | static inline unsigned long native_read_cr2(void) |
32 | { |
33 | unsigned long val; |
34 | asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order)); |
35 | return val; |
36 | } |
37 | |
38 | static inline void native_write_cr2(unsigned long val) |
39 | { |
40 | asm volatile("mov %0,%%cr2" : : "r" (val), "m" (__force_order)); |
41 | } |
42 | |
43 | static inline unsigned long __native_read_cr3(void) |
44 | { |
45 | unsigned long val; |
46 | asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order)); |
47 | return val; |
48 | } |
49 | |
50 | static inline void native_write_cr3(unsigned long val) |
51 | { |
52 | asm volatile("mov %0,%%cr3" : : "r" (val), "m" (__force_order)); |
53 | } |
54 | |
55 | static inline unsigned long native_read_cr4(void) |
56 | { |
57 | unsigned long val; |
58 | #ifdef CONFIG_X86_32 |
59 | /* |
60 | * This could fault if CR4 does not exist. Non-existent CR4 |
61 | * is functionally equivalent to CR4 == 0. Keep it simple and pretend |
62 | * that CR4 == 0 on CPUs that don't have CR4. |
63 | */ |
64 | asm volatile("1: mov %%cr4, %0\n" |
65 | "2:\n" |
66 | _ASM_EXTABLE(1b, 2b) |
67 | : "=r" (val), "=m" (__force_order) : "0" (0)); |
68 | #else |
69 | /* CR4 always exists on x86_64. */ |
70 | asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order)); |
71 | #endif |
72 | return val; |
73 | } |
74 | |
75 | static inline void native_write_cr4(unsigned long val) |
76 | { |
77 | asm volatile("mov %0,%%cr4" : : "r" (val), "m" (__force_order)); |
78 | } |
79 | |
80 | #ifdef CONFIG_X86_64 |
81 | static inline unsigned long native_read_cr8(void) |
82 | { |
83 | unsigned long cr8; |
84 | asm volatile("movq %%cr8,%0" : "=r" (cr8)); |
85 | return cr8; |
86 | } |
87 | |
88 | static inline void native_write_cr8(unsigned long val) |
89 | { |
90 | asm volatile("movq %0,%%cr8" :: "r" (val) : "memory" ); |
91 | } |
92 | #endif |
93 | |
94 | #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS |
95 | static inline u32 __read_pkru(void) |
96 | { |
97 | u32 ecx = 0; |
98 | u32 edx, pkru; |
99 | |
100 | /* |
101 | * "rdpkru" instruction. Places PKRU contents in to EAX, |
102 | * clears EDX and requires that ecx=0. |
103 | */ |
104 | asm volatile(".byte 0x0f,0x01,0xee\n\t" |
105 | : "=a" (pkru), "=d" (edx) |
106 | : "c" (ecx)); |
107 | return pkru; |
108 | } |
109 | |
110 | static inline void __write_pkru(u32 pkru) |
111 | { |
112 | u32 ecx = 0, edx = 0; |
113 | |
114 | /* |
115 | * "wrpkru" instruction. Loads contents in EAX to PKRU, |
116 | * requires that ecx = edx = 0. |
117 | */ |
118 | asm volatile(".byte 0x0f,0x01,0xef\n\t" |
119 | : : "a" (pkru), "c" (ecx), "d" (edx)); |
120 | } |
121 | #else |
122 | static inline u32 __read_pkru(void) |
123 | { |
124 | return 0; |
125 | } |
126 | |
127 | static inline void __write_pkru(u32 pkru) |
128 | { |
129 | } |
130 | #endif |
131 | |
132 | static inline void native_wbinvd(void) |
133 | { |
134 | asm volatile("wbinvd" : : :"memory" ); |
135 | } |
136 | |
137 | extern asmlinkage void native_load_gs_index(unsigned); |
138 | |
139 | static inline unsigned long __read_cr4(void) |
140 | { |
141 | return native_read_cr4(); |
142 | } |
143 | |
144 | #ifdef CONFIG_PARAVIRT_XXL |
145 | #include <asm/paravirt.h> |
146 | #else |
147 | |
148 | static inline unsigned long read_cr0(void) |
149 | { |
150 | return native_read_cr0(); |
151 | } |
152 | |
153 | static inline void write_cr0(unsigned long x) |
154 | { |
155 | native_write_cr0(x); |
156 | } |
157 | |
158 | static inline unsigned long read_cr2(void) |
159 | { |
160 | return native_read_cr2(); |
161 | } |
162 | |
163 | static inline void write_cr2(unsigned long x) |
164 | { |
165 | native_write_cr2(x); |
166 | } |
167 | |
168 | /* |
169 | * Careful! CR3 contains more than just an address. You probably want |
170 | * read_cr3_pa() instead. |
171 | */ |
172 | static inline unsigned long __read_cr3(void) |
173 | { |
174 | return __native_read_cr3(); |
175 | } |
176 | |
177 | static inline void write_cr3(unsigned long x) |
178 | { |
179 | native_write_cr3(x); |
180 | } |
181 | |
182 | static inline void __write_cr4(unsigned long x) |
183 | { |
184 | native_write_cr4(x); |
185 | } |
186 | |
187 | static inline void wbinvd(void) |
188 | { |
189 | native_wbinvd(); |
190 | } |
191 | |
192 | #ifdef CONFIG_X86_64 |
193 | |
194 | static inline unsigned long read_cr8(void) |
195 | { |
196 | return native_read_cr8(); |
197 | } |
198 | |
199 | static inline void write_cr8(unsigned long x) |
200 | { |
201 | native_write_cr8(x); |
202 | } |
203 | |
204 | static inline void load_gs_index(unsigned selector) |
205 | { |
206 | native_load_gs_index(selector); |
207 | } |
208 | |
209 | #endif |
210 | |
211 | #endif /* CONFIG_PARAVIRT_XXL */ |
212 | |
213 | static inline void clflush(volatile void *__p) |
214 | { |
215 | asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p)); |
216 | } |
217 | |
218 | static inline void clflushopt(volatile void *__p) |
219 | { |
220 | alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0" , |
221 | ".byte 0x66; clflush %P0" , |
222 | X86_FEATURE_CLFLUSHOPT, |
223 | "+m" (*(volatile char __force *)__p)); |
224 | } |
225 | |
226 | static inline void clwb(volatile void *__p) |
227 | { |
228 | volatile struct { char x[64]; } *p = __p; |
229 | |
230 | asm volatile(ALTERNATIVE_2( |
231 | ".byte " __stringify(NOP_DS_PREFIX) "; clflush (%[pax])" , |
232 | ".byte 0x66; clflush (%[pax])" , /* clflushopt (%%rax) */ |
233 | X86_FEATURE_CLFLUSHOPT, |
234 | ".byte 0x66, 0x0f, 0xae, 0x30" , /* clwb (%%rax) */ |
235 | X86_FEATURE_CLWB) |
236 | : [p] "+m" (*p) |
237 | : [pax] "a" (p)); |
238 | } |
239 | |
240 | #define nop() asm volatile ("nop") |
241 | |
242 | |
243 | #endif /* __KERNEL__ */ |
244 | |
245 | #endif /* _ASM_X86_SPECIAL_INSNS_H */ |
246 | |