1 | /* SPDX-License-Identifier: GPL-2.0 */ |
2 | #ifndef __SVM_H |
3 | #define __SVM_H |
4 | |
5 | #include <uapi/asm/svm.h> |
6 | |
7 | |
8 | enum { |
9 | INTERCEPT_INTR, |
10 | INTERCEPT_NMI, |
11 | INTERCEPT_SMI, |
12 | INTERCEPT_INIT, |
13 | INTERCEPT_VINTR, |
14 | INTERCEPT_SELECTIVE_CR0, |
15 | INTERCEPT_STORE_IDTR, |
16 | INTERCEPT_STORE_GDTR, |
17 | INTERCEPT_STORE_LDTR, |
18 | INTERCEPT_STORE_TR, |
19 | INTERCEPT_LOAD_IDTR, |
20 | INTERCEPT_LOAD_GDTR, |
21 | INTERCEPT_LOAD_LDTR, |
22 | INTERCEPT_LOAD_TR, |
23 | INTERCEPT_RDTSC, |
24 | INTERCEPT_RDPMC, |
25 | INTERCEPT_PUSHF, |
26 | INTERCEPT_POPF, |
27 | INTERCEPT_CPUID, |
28 | INTERCEPT_RSM, |
29 | INTERCEPT_IRET, |
30 | INTERCEPT_INTn, |
31 | INTERCEPT_INVD, |
32 | INTERCEPT_PAUSE, |
33 | INTERCEPT_HLT, |
34 | INTERCEPT_INVLPG, |
35 | INTERCEPT_INVLPGA, |
36 | INTERCEPT_IOIO_PROT, |
37 | INTERCEPT_MSR_PROT, |
38 | INTERCEPT_TASK_SWITCH, |
39 | INTERCEPT_FERR_FREEZE, |
40 | INTERCEPT_SHUTDOWN, |
41 | INTERCEPT_VMRUN, |
42 | INTERCEPT_VMMCALL, |
43 | INTERCEPT_VMLOAD, |
44 | INTERCEPT_VMSAVE, |
45 | INTERCEPT_STGI, |
46 | INTERCEPT_CLGI, |
47 | INTERCEPT_SKINIT, |
48 | INTERCEPT_RDTSCP, |
49 | INTERCEPT_ICEBP, |
50 | INTERCEPT_WBINVD, |
51 | INTERCEPT_MONITOR, |
52 | INTERCEPT_MWAIT, |
53 | INTERCEPT_MWAIT_COND, |
54 | INTERCEPT_XSETBV, |
55 | }; |
56 | |
57 | |
58 | struct __attribute__ ((__packed__)) vmcb_control_area { |
59 | u32 intercept_cr; |
60 | u32 intercept_dr; |
61 | u32 intercept_exceptions; |
62 | u64 intercept; |
63 | u8 reserved_1[40]; |
64 | u16 pause_filter_thresh; |
65 | u16 pause_filter_count; |
66 | u64 iopm_base_pa; |
67 | u64 msrpm_base_pa; |
68 | u64 tsc_offset; |
69 | u32 asid; |
70 | u8 tlb_ctl; |
71 | u8 reserved_2[3]; |
72 | u32 int_ctl; |
73 | u32 int_vector; |
74 | u32 int_state; |
75 | u8 reserved_3[4]; |
76 | u32 exit_code; |
77 | u32 exit_code_hi; |
78 | u64 exit_info_1; |
79 | u64 exit_info_2; |
80 | u32 exit_int_info; |
81 | u32 exit_int_info_err; |
82 | u64 nested_ctl; |
83 | u64 avic_vapic_bar; |
84 | u8 reserved_4[8]; |
85 | u32 event_inj; |
86 | u32 event_inj_err; |
87 | u64 nested_cr3; |
88 | u64 virt_ext; |
89 | u32 clean; |
90 | u32 reserved_5; |
91 | u64 next_rip; |
92 | u8 insn_len; |
93 | u8 insn_bytes[15]; |
94 | u64 avic_backing_page; /* Offset 0xe0 */ |
95 | u8 reserved_6[8]; /* Offset 0xe8 */ |
96 | u64 avic_logical_id; /* Offset 0xf0 */ |
97 | u64 avic_physical_id; /* Offset 0xf8 */ |
98 | u8 reserved_7[768]; |
99 | }; |
100 | |
101 | |
102 | #define TLB_CONTROL_DO_NOTHING 0 |
103 | #define TLB_CONTROL_FLUSH_ALL_ASID 1 |
104 | #define TLB_CONTROL_FLUSH_ASID 3 |
105 | #define TLB_CONTROL_FLUSH_ASID_LOCAL 7 |
106 | |
107 | #define V_TPR_MASK 0x0f |
108 | |
109 | #define V_IRQ_SHIFT 8 |
110 | #define V_IRQ_MASK (1 << V_IRQ_SHIFT) |
111 | |
112 | #define V_GIF_SHIFT 9 |
113 | #define V_GIF_MASK (1 << V_GIF_SHIFT) |
114 | |
115 | #define V_INTR_PRIO_SHIFT 16 |
116 | #define V_INTR_PRIO_MASK (0x0f << V_INTR_PRIO_SHIFT) |
117 | |
118 | #define V_IGN_TPR_SHIFT 20 |
119 | #define V_IGN_TPR_MASK (1 << V_IGN_TPR_SHIFT) |
120 | |
121 | #define V_INTR_MASKING_SHIFT 24 |
122 | #define V_INTR_MASKING_MASK (1 << V_INTR_MASKING_SHIFT) |
123 | |
124 | #define V_GIF_ENABLE_SHIFT 25 |
125 | #define V_GIF_ENABLE_MASK (1 << V_GIF_ENABLE_SHIFT) |
126 | |
127 | #define AVIC_ENABLE_SHIFT 31 |
128 | #define AVIC_ENABLE_MASK (1 << AVIC_ENABLE_SHIFT) |
129 | |
130 | #define LBR_CTL_ENABLE_MASK BIT_ULL(0) |
131 | #define VIRTUAL_VMLOAD_VMSAVE_ENABLE_MASK BIT_ULL(1) |
132 | |
133 | #define SVM_INTERRUPT_SHADOW_MASK 1 |
134 | |
135 | #define SVM_IOIO_STR_SHIFT 2 |
136 | #define SVM_IOIO_REP_SHIFT 3 |
137 | #define SVM_IOIO_SIZE_SHIFT 4 |
138 | #define SVM_IOIO_ASIZE_SHIFT 7 |
139 | |
140 | #define SVM_IOIO_TYPE_MASK 1 |
141 | #define SVM_IOIO_STR_MASK (1 << SVM_IOIO_STR_SHIFT) |
142 | #define SVM_IOIO_REP_MASK (1 << SVM_IOIO_REP_SHIFT) |
143 | #define SVM_IOIO_SIZE_MASK (7 << SVM_IOIO_SIZE_SHIFT) |
144 | #define SVM_IOIO_ASIZE_MASK (7 << SVM_IOIO_ASIZE_SHIFT) |
145 | |
146 | #define SVM_VM_CR_VALID_MASK 0x001fULL |
147 | #define SVM_VM_CR_SVM_LOCK_MASK 0x0008ULL |
148 | #define SVM_VM_CR_SVM_DIS_MASK 0x0010ULL |
149 | |
150 | #define SVM_NESTED_CTL_NP_ENABLE BIT(0) |
151 | #define SVM_NESTED_CTL_SEV_ENABLE BIT(1) |
152 | |
153 | struct __attribute__ ((__packed__)) vmcb_seg { |
154 | u16 selector; |
155 | u16 attrib; |
156 | u32 limit; |
157 | u64 base; |
158 | }; |
159 | |
160 | struct __attribute__ ((__packed__)) vmcb_save_area { |
161 | struct vmcb_seg es; |
162 | struct vmcb_seg cs; |
163 | struct vmcb_seg ss; |
164 | struct vmcb_seg ds; |
165 | struct vmcb_seg fs; |
166 | struct vmcb_seg gs; |
167 | struct vmcb_seg gdtr; |
168 | struct vmcb_seg ldtr; |
169 | struct vmcb_seg idtr; |
170 | struct vmcb_seg tr; |
171 | u8 reserved_1[43]; |
172 | u8 cpl; |
173 | u8 reserved_2[4]; |
174 | u64 efer; |
175 | u8 reserved_3[112]; |
176 | u64 cr4; |
177 | u64 cr3; |
178 | u64 cr0; |
179 | u64 dr7; |
180 | u64 dr6; |
181 | u64 rflags; |
182 | u64 rip; |
183 | u8 reserved_4[88]; |
184 | u64 rsp; |
185 | u8 reserved_5[24]; |
186 | u64 rax; |
187 | u64 star; |
188 | u64 lstar; |
189 | u64 cstar; |
190 | u64 sfmask; |
191 | u64 kernel_gs_base; |
192 | u64 sysenter_cs; |
193 | u64 sysenter_esp; |
194 | u64 sysenter_eip; |
195 | u64 cr2; |
196 | u8 reserved_6[32]; |
197 | u64 g_pat; |
198 | u64 dbgctl; |
199 | u64 br_from; |
200 | u64 br_to; |
201 | u64 last_excp_from; |
202 | u64 last_excp_to; |
203 | }; |
204 | |
205 | struct __attribute__ ((__packed__)) vmcb { |
206 | struct vmcb_control_area control; |
207 | struct vmcb_save_area save; |
208 | }; |
209 | |
210 | #define SVM_CPUID_FUNC 0x8000000a |
211 | |
212 | #define SVM_VM_CR_SVM_DISABLE 4 |
213 | |
214 | #define SVM_SELECTOR_S_SHIFT 4 |
215 | #define SVM_SELECTOR_DPL_SHIFT 5 |
216 | #define SVM_SELECTOR_P_SHIFT 7 |
217 | #define SVM_SELECTOR_AVL_SHIFT 8 |
218 | #define SVM_SELECTOR_L_SHIFT 9 |
219 | #define SVM_SELECTOR_DB_SHIFT 10 |
220 | #define SVM_SELECTOR_G_SHIFT 11 |
221 | |
222 | #define SVM_SELECTOR_TYPE_MASK (0xf) |
223 | #define SVM_SELECTOR_S_MASK (1 << SVM_SELECTOR_S_SHIFT) |
224 | #define SVM_SELECTOR_DPL_MASK (3 << SVM_SELECTOR_DPL_SHIFT) |
225 | #define SVM_SELECTOR_P_MASK (1 << SVM_SELECTOR_P_SHIFT) |
226 | #define SVM_SELECTOR_AVL_MASK (1 << SVM_SELECTOR_AVL_SHIFT) |
227 | #define SVM_SELECTOR_L_MASK (1 << SVM_SELECTOR_L_SHIFT) |
228 | #define SVM_SELECTOR_DB_MASK (1 << SVM_SELECTOR_DB_SHIFT) |
229 | #define SVM_SELECTOR_G_MASK (1 << SVM_SELECTOR_G_SHIFT) |
230 | |
231 | #define SVM_SELECTOR_WRITE_MASK (1 << 1) |
232 | #define SVM_SELECTOR_READ_MASK SVM_SELECTOR_WRITE_MASK |
233 | #define SVM_SELECTOR_CODE_MASK (1 << 3) |
234 | |
235 | #define INTERCEPT_CR0_READ 0 |
236 | #define INTERCEPT_CR3_READ 3 |
237 | #define INTERCEPT_CR4_READ 4 |
238 | #define INTERCEPT_CR8_READ 8 |
239 | #define INTERCEPT_CR0_WRITE (16 + 0) |
240 | #define INTERCEPT_CR3_WRITE (16 + 3) |
241 | #define INTERCEPT_CR4_WRITE (16 + 4) |
242 | #define INTERCEPT_CR8_WRITE (16 + 8) |
243 | |
244 | #define INTERCEPT_DR0_READ 0 |
245 | #define INTERCEPT_DR1_READ 1 |
246 | #define INTERCEPT_DR2_READ 2 |
247 | #define INTERCEPT_DR3_READ 3 |
248 | #define INTERCEPT_DR4_READ 4 |
249 | #define INTERCEPT_DR5_READ 5 |
250 | #define INTERCEPT_DR6_READ 6 |
251 | #define INTERCEPT_DR7_READ 7 |
252 | #define INTERCEPT_DR0_WRITE (16 + 0) |
253 | #define INTERCEPT_DR1_WRITE (16 + 1) |
254 | #define INTERCEPT_DR2_WRITE (16 + 2) |
255 | #define INTERCEPT_DR3_WRITE (16 + 3) |
256 | #define INTERCEPT_DR4_WRITE (16 + 4) |
257 | #define INTERCEPT_DR5_WRITE (16 + 5) |
258 | #define INTERCEPT_DR6_WRITE (16 + 6) |
259 | #define INTERCEPT_DR7_WRITE (16 + 7) |
260 | |
261 | #define SVM_EVTINJ_VEC_MASK 0xff |
262 | |
263 | #define SVM_EVTINJ_TYPE_SHIFT 8 |
264 | #define SVM_EVTINJ_TYPE_MASK (7 << SVM_EVTINJ_TYPE_SHIFT) |
265 | |
266 | #define SVM_EVTINJ_TYPE_INTR (0 << SVM_EVTINJ_TYPE_SHIFT) |
267 | #define SVM_EVTINJ_TYPE_NMI (2 << SVM_EVTINJ_TYPE_SHIFT) |
268 | #define SVM_EVTINJ_TYPE_EXEPT (3 << SVM_EVTINJ_TYPE_SHIFT) |
269 | #define SVM_EVTINJ_TYPE_SOFT (4 << SVM_EVTINJ_TYPE_SHIFT) |
270 | |
271 | #define SVM_EVTINJ_VALID (1 << 31) |
272 | #define SVM_EVTINJ_VALID_ERR (1 << 11) |
273 | |
274 | #define SVM_EXITINTINFO_VEC_MASK SVM_EVTINJ_VEC_MASK |
275 | #define SVM_EXITINTINFO_TYPE_MASK SVM_EVTINJ_TYPE_MASK |
276 | |
277 | #define SVM_EXITINTINFO_TYPE_INTR SVM_EVTINJ_TYPE_INTR |
278 | #define SVM_EXITINTINFO_TYPE_NMI SVM_EVTINJ_TYPE_NMI |
279 | #define SVM_EXITINTINFO_TYPE_EXEPT SVM_EVTINJ_TYPE_EXEPT |
280 | #define SVM_EXITINTINFO_TYPE_SOFT SVM_EVTINJ_TYPE_SOFT |
281 | |
282 | #define SVM_EXITINTINFO_VALID SVM_EVTINJ_VALID |
283 | #define SVM_EXITINTINFO_VALID_ERR SVM_EVTINJ_VALID_ERR |
284 | |
285 | #define SVM_EXITINFOSHIFT_TS_REASON_IRET 36 |
286 | #define SVM_EXITINFOSHIFT_TS_REASON_JMP 38 |
287 | #define SVM_EXITINFOSHIFT_TS_HAS_ERROR_CODE 44 |
288 | |
289 | #define SVM_EXITINFO_REG_MASK 0x0F |
290 | |
291 | #define SVM_CR0_SELECTIVE_MASK (X86_CR0_TS | X86_CR0_MP) |
292 | |
293 | #endif |
294 | |