1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
2 | /* |
3 | * |
4 | * verify_cpu.S - Code for cpu long mode and SSE verification. This |
5 | * code has been borrowed from boot/setup.S and was introduced by |
6 | * Andi Kleen. |
7 | * |
8 | * Copyright (c) 2007 Andi Kleen (ak@suse.de) |
9 | * Copyright (c) 2007 Eric Biederman (ebiederm@xmission.com) |
10 | * Copyright (c) 2007 Vivek Goyal (vgoyal@in.ibm.com) |
11 | * Copyright (c) 2010 Kees Cook (kees.cook@canonical.com) |
12 | * |
13 | * This is a common code for verification whether CPU supports |
14 | * long mode and SSE or not. It is not called directly instead this |
15 | * file is included at various places and compiled in that context. |
16 | * This file is expected to run in 32bit code. Currently: |
17 | * |
18 | * arch/x86/boot/compressed/head_64.S: Boot cpu verification |
19 | * arch/x86/kernel/trampoline_64.S: secondary processor verification |
20 | * arch/x86/kernel/head_32.S: processor startup |
21 | * |
22 | * verify_cpu, returns the status of longmode and SSE in register %eax. |
23 | * 0: Success 1: Failure |
24 | * |
25 | * On Intel, the XD_DISABLE flag will be cleared as a side-effect. |
26 | * |
27 | * The caller needs to check for the error code and take the action |
28 | * appropriately. Either display a message or halt. |
29 | */ |
30 | |
31 | #include <asm/cpufeatures.h> |
32 | #include <asm/msr-index.h> |
33 | |
34 | SYM_FUNC_START_LOCAL(verify_cpu) |
35 | pushf # Save caller passed flags |
36 | push $0 # Kill any dangerous flags |
37 | popf |
38 | |
39 | #ifndef __x86_64__ |
40 | pushfl # standard way to check for cpuid |
41 | popl %eax |
42 | movl %eax,%ebx |
43 | xorl $0x200000,%eax |
44 | pushl %eax |
45 | popfl |
46 | pushfl |
47 | popl %eax |
48 | cmpl %eax,%ebx |
49 | jz .Lverify_cpu_no_longmode # cpu has no cpuid |
50 | #endif |
51 | |
52 | movl $0x0,%eax # See if cpuid 1 is implemented |
53 | cpuid |
54 | cmpl $0x1,%eax |
55 | jb .Lverify_cpu_no_longmode # no cpuid 1 |
56 | |
57 | xor %di,%di |
58 | cmpl $0x68747541,%ebx # AuthenticAMD |
59 | jnz .Lverify_cpu_noamd |
60 | cmpl $0x69746e65,%edx |
61 | jnz .Lverify_cpu_noamd |
62 | cmpl $0x444d4163,%ecx |
63 | jnz .Lverify_cpu_noamd |
64 | mov $1,%di # cpu is from AMD |
65 | jmp .Lverify_cpu_check |
66 | |
67 | .Lverify_cpu_noamd: |
68 | cmpl $0x756e6547,%ebx # GenuineIntel? |
69 | jnz .Lverify_cpu_check |
70 | cmpl $0x49656e69,%edx |
71 | jnz .Lverify_cpu_check |
72 | cmpl $0x6c65746e,%ecx |
73 | jnz .Lverify_cpu_check |
74 | |
75 | # only call IA32_MISC_ENABLE when: |
76 | # family > 6 || (family == 6 && model >= 0xd) |
77 | movl $0x1, %eax # check CPU family and model |
78 | cpuid |
79 | movl %eax, %ecx |
80 | |
81 | andl $0x0ff00f00, %eax # mask family and extended family |
82 | shrl $8, %eax |
83 | cmpl $6, %eax |
84 | ja .Lverify_cpu_clear_xd # family > 6, ok |
85 | jb .Lverify_cpu_check # family < 6, skip |
86 | |
87 | andl $0x000f00f0, %ecx # mask model and extended model |
88 | shrl $4, %ecx |
89 | cmpl $0xd, %ecx |
90 | jb .Lverify_cpu_check # family == 6, model < 0xd, skip |
91 | |
92 | .Lverify_cpu_clear_xd: |
93 | movl $MSR_IA32_MISC_ENABLE, %ecx |
94 | rdmsr |
95 | btrl $2, %edx # clear MSR_IA32_MISC_ENABLE_XD_DISABLE |
96 | jnc .Lverify_cpu_check # only write MSR if bit was changed |
97 | wrmsr |
98 | |
99 | .Lverify_cpu_check: |
100 | movl $0x1,%eax # Does the cpu have what it takes |
101 | cpuid |
102 | andl $REQUIRED_MASK0,%edx |
103 | xorl $REQUIRED_MASK0,%edx |
104 | jnz .Lverify_cpu_no_longmode |
105 | |
106 | movl $0x80000000,%eax # See if extended cpuid is implemented |
107 | cpuid |
108 | cmpl $0x80000001,%eax |
109 | jb .Lverify_cpu_no_longmode # no extended cpuid |
110 | |
111 | movl $0x80000001,%eax # Does the cpu have what it takes |
112 | cpuid |
113 | andl $REQUIRED_MASK1,%edx |
114 | xorl $REQUIRED_MASK1,%edx |
115 | jnz .Lverify_cpu_no_longmode |
116 | |
117 | .Lverify_cpu_sse_test: |
118 | movl $1,%eax |
119 | cpuid |
120 | andl $SSE_MASK,%edx |
121 | cmpl $SSE_MASK,%edx |
122 | je .Lverify_cpu_sse_ok |
123 | test %di,%di |
124 | jz .Lverify_cpu_no_longmode # only try to force SSE on AMD |
125 | movl $MSR_K7_HWCR,%ecx |
126 | rdmsr |
127 | btr $15,%eax # enable SSE |
128 | wrmsr |
129 | xor %di,%di # don't loop |
130 | jmp .Lverify_cpu_sse_test # try again |
131 | |
132 | .Lverify_cpu_no_longmode: |
133 | popf # Restore caller passed flags |
134 | movl $1,%eax |
135 | RET |
136 | .Lverify_cpu_sse_ok: |
137 | popf # Restore caller passed flags |
138 | xorl %eax, %eax |
139 | RET |
140 | SYM_FUNC_END(verify_cpu) |
141 | |