1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_X86_DEBUGREG_H
3#define _ASM_X86_DEBUGREG_H
4
5#include <linux/bug.h>
6#include <linux/percpu.h>
7#include <uapi/asm/debugreg.h>
8
9DECLARE_PER_CPU(unsigned long, cpu_dr7);
10
11#ifndef CONFIG_PARAVIRT_XXL
12/*
13 * These special macros can be used to get or set a debugging register
14 */
15#define get_debugreg(var, register) \
16 (var) = native_get_debugreg(register)
17#define set_debugreg(value, register) \
18 native_set_debugreg(register, value)
19#endif
20
21static __always_inline unsigned long native_get_debugreg(int regno)
22{
23 unsigned long val = 0; /* Damn you, gcc! */
24
25 switch (regno) {
26 case 0:
27 asm("mov %%db0, %0" :"=r" (val));
28 break;
29 case 1:
30 asm("mov %%db1, %0" :"=r" (val));
31 break;
32 case 2:
33 asm("mov %%db2, %0" :"=r" (val));
34 break;
35 case 3:
36 asm("mov %%db3, %0" :"=r" (val));
37 break;
38 case 6:
39 asm("mov %%db6, %0" :"=r" (val));
40 break;
41 case 7:
42 /*
43 * Apply __FORCE_ORDER to DR7 reads to forbid re-ordering them
44 * with other code.
45 *
46 * This is needed because a DR7 access can cause a #VC exception
47 * when running under SEV-ES. Taking a #VC exception is not a
48 * safe thing to do just anywhere in the entry code and
49 * re-ordering might place the access into an unsafe location.
50 *
51 * This happened in the NMI handler, where the DR7 read was
52 * re-ordered to happen before the call to sev_es_ist_enter(),
53 * causing stack recursion.
54 */
55 asm volatile("mov %%db7, %0" : "=r" (val) : __FORCE_ORDER);
56 break;
57 default:
58 BUG();
59 }
60 return val;
61}
62
63static __always_inline void native_set_debugreg(int regno, unsigned long value)
64{
65 switch (regno) {
66 case 0:
67 asm("mov %0, %%db0" ::"r" (value));
68 break;
69 case 1:
70 asm("mov %0, %%db1" ::"r" (value));
71 break;
72 case 2:
73 asm("mov %0, %%db2" ::"r" (value));
74 break;
75 case 3:
76 asm("mov %0, %%db3" ::"r" (value));
77 break;
78 case 6:
79 asm("mov %0, %%db6" ::"r" (value));
80 break;
81 case 7:
82 /*
83 * Apply __FORCE_ORDER to DR7 writes to forbid re-ordering them
84 * with other code.
85 *
86 * While is didn't happen with a DR7 write (see the DR7 read
87 * comment above which explains where it happened), add the
88 * __FORCE_ORDER here too to avoid similar problems in the
89 * future.
90 */
91 asm volatile("mov %0, %%db7" ::"r" (value), __FORCE_ORDER);
92 break;
93 default:
94 BUG();
95 }
96}
97
98static inline void hw_breakpoint_disable(void)
99{
100 /* Zero the control register for HW Breakpoint */
101 set_debugreg(val: 0UL, reg: 7);
102
103 /* Zero-out the individual HW breakpoint address registers */
104 set_debugreg(val: 0UL, reg: 0);
105 set_debugreg(val: 0UL, reg: 1);
106 set_debugreg(val: 0UL, reg: 2);
107 set_debugreg(val: 0UL, reg: 3);
108}
109
110static __always_inline bool hw_breakpoint_active(void)
111{
112 return __this_cpu_read(cpu_dr7) & DR_GLOBAL_ENABLE_MASK;
113}
114
115extern void hw_breakpoint_restore(void);
116
117static __always_inline unsigned long local_db_save(void)
118{
119 unsigned long dr7;
120
121 if (static_cpu_has(X86_FEATURE_HYPERVISOR) && !hw_breakpoint_active())
122 return 0;
123
124 get_debugreg(dr7, 7);
125 dr7 &= ~0x400; /* architecturally set bit */
126 if (dr7)
127 set_debugreg(val: 0, reg: 7);
128 /*
129 * Ensure the compiler doesn't lower the above statements into
130 * the critical section; disabling breakpoints late would not
131 * be good.
132 */
133 barrier();
134
135 return dr7;
136}
137
138static __always_inline void local_db_restore(unsigned long dr7)
139{
140 /*
141 * Ensure the compiler doesn't raise this statement into
142 * the critical section; enabling breakpoints early would
143 * not be good.
144 */
145 barrier();
146 if (dr7)
147 set_debugreg(val: dr7, reg: 7);
148}
149
150#ifdef CONFIG_CPU_SUP_AMD
151extern void amd_set_dr_addr_mask(unsigned long mask, unsigned int dr);
152extern unsigned long amd_get_dr_addr_mask(unsigned int dr);
153#else
154static inline void amd_set_dr_addr_mask(unsigned long mask, unsigned int dr) { }
155static inline unsigned long amd_get_dr_addr_mask(unsigned int dr)
156{
157 return 0;
158}
159#endif
160
161#endif /* _ASM_X86_DEBUGREG_H */
162

source code of linux/arch/x86/include/asm/debugreg.h