1 | // SPDX-License-Identifier: GPL-2.0-or-later |
2 | /* |
3 | * Test cases for compiler-based stack variable zeroing via |
4 | * -ftrivial-auto-var-init={zero,pattern} or CONFIG_GCC_PLUGIN_STRUCTLEAK*. |
5 | * For example, see: |
6 | * "Running tests with kunit_tool" at Documentation/dev-tools/kunit/start.rst |
7 | * ./tools/testing/kunit/kunit.py run stackinit [--raw_output] \ |
8 | * --make_option LLVM=1 \ |
9 | * --kconfig_add CONFIG_INIT_STACK_ALL_ZERO=y |
10 | * |
11 | */ |
12 | #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt |
13 | |
14 | #include <kunit/test.h> |
15 | #include <linux/init.h> |
16 | #include <linux/kernel.h> |
17 | #include <linux/module.h> |
18 | #include <linux/string.h> |
19 | |
20 | /* Exfiltration buffer. */ |
21 | #define MAX_VAR_SIZE 128 |
22 | static u8 check_buf[MAX_VAR_SIZE]; |
23 | |
24 | /* Character array to trigger stack protector in all functions. */ |
25 | #define VAR_BUFFER 32 |
26 | |
27 | /* Volatile mask to convince compiler to copy memory with 0xff. */ |
28 | static volatile u8 forced_mask = 0xff; |
29 | |
30 | /* Location and size tracking to validate fill and test are colocated. */ |
31 | static void *fill_start, *target_start; |
32 | static size_t fill_size, target_size; |
33 | |
34 | static bool stackinit_range_contains(char *haystack_start, size_t haystack_size, |
35 | char *needle_start, size_t needle_size) |
36 | { |
37 | if (needle_start >= haystack_start && |
38 | needle_start + needle_size <= haystack_start + haystack_size) |
39 | return true; |
40 | return false; |
41 | } |
42 | |
43 | /* Whether the test is expected to fail. */ |
44 | #define WANT_SUCCESS 0 |
45 | #define XFAIL 1 |
46 | |
47 | #define DO_NOTHING_TYPE_SCALAR(var_type) var_type |
48 | #define DO_NOTHING_TYPE_STRING(var_type) void |
49 | #define DO_NOTHING_TYPE_STRUCT(var_type) void |
50 | |
51 | #define DO_NOTHING_RETURN_SCALAR(ptr) *(ptr) |
52 | #define DO_NOTHING_RETURN_STRING(ptr) /**/ |
53 | #define DO_NOTHING_RETURN_STRUCT(ptr) /**/ |
54 | |
55 | #define DO_NOTHING_CALL_SCALAR(var, name) \ |
56 | (var) = do_nothing_ ## name(&(var)) |
57 | #define DO_NOTHING_CALL_STRING(var, name) \ |
58 | do_nothing_ ## name(var) |
59 | #define DO_NOTHING_CALL_STRUCT(var, name) \ |
60 | do_nothing_ ## name(&(var)) |
61 | |
62 | #define FETCH_ARG_SCALAR(var) &var |
63 | #define FETCH_ARG_STRING(var) var |
64 | #define FETCH_ARG_STRUCT(var) &var |
65 | |
66 | /* |
67 | * On m68k, if the leaf function test variable is longer than 8 bytes, |
68 | * the start of the stack frame moves. 8 is sufficiently large to |
69 | * test m68k char arrays, but leave it at 16 for other architectures. |
70 | */ |
71 | #ifdef CONFIG_M68K |
72 | #define FILL_SIZE_STRING 8 |
73 | #else |
74 | #define FILL_SIZE_STRING 16 |
75 | #endif |
76 | |
77 | #define INIT_CLONE_SCALAR /**/ |
78 | #define INIT_CLONE_STRING [FILL_SIZE_STRING] |
79 | #define INIT_CLONE_STRUCT /**/ |
80 | |
81 | #define ZERO_CLONE_SCALAR(zero) memset(&(zero), 0x00, sizeof(zero)) |
82 | #define ZERO_CLONE_STRING(zero) memset(&(zero), 0x00, sizeof(zero)) |
83 | /* |
84 | * For the struct, intentionally poison padding to see if it gets |
85 | * copied out in direct assignments. |
86 | * */ |
87 | #define ZERO_CLONE_STRUCT(zero) \ |
88 | do { \ |
89 | memset(&(zero), 0xFF, sizeof(zero)); \ |
90 | zero.one = 0; \ |
91 | zero.two = 0; \ |
92 | zero.three = 0; \ |
93 | zero.four = 0; \ |
94 | } while (0) |
95 | |
96 | #define INIT_SCALAR_none(var_type) /**/ |
97 | #define INIT_SCALAR_zero(var_type) = 0 |
98 | |
99 | #define INIT_STRING_none(var_type) [FILL_SIZE_STRING] /**/ |
100 | #define INIT_STRING_zero(var_type) [FILL_SIZE_STRING] = { } |
101 | |
102 | #define INIT_STRUCT_none(var_type) /**/ |
103 | #define INIT_STRUCT_zero(var_type) = { } |
104 | |
105 | |
106 | #define __static_partial { .two = 0, } |
107 | #define __static_all { .one = 0, \ |
108 | .two = 0, \ |
109 | .three = 0, \ |
110 | .four = 0, \ |
111 | } |
112 | #define __dynamic_partial { .two = arg->two, } |
113 | #define __dynamic_all { .one = arg->one, \ |
114 | .two = arg->two, \ |
115 | .three = arg->three, \ |
116 | .four = arg->four, \ |
117 | } |
118 | #define __runtime_partial var.two = 0 |
119 | #define __runtime_all var.one = 0; \ |
120 | var.two = 0; \ |
121 | var.three = 0; \ |
122 | var.four = 0 |
123 | |
124 | #define INIT_STRUCT_static_partial(var_type) \ |
125 | = __static_partial |
126 | #define INIT_STRUCT_static_all(var_type) \ |
127 | = __static_all |
128 | #define INIT_STRUCT_dynamic_partial(var_type) \ |
129 | = __dynamic_partial |
130 | #define INIT_STRUCT_dynamic_all(var_type) \ |
131 | = __dynamic_all |
132 | #define INIT_STRUCT_runtime_partial(var_type) \ |
133 | ; __runtime_partial |
134 | #define INIT_STRUCT_runtime_all(var_type) \ |
135 | ; __runtime_all |
136 | |
137 | #define INIT_STRUCT_assigned_static_partial(var_type) \ |
138 | ; var = (var_type)__static_partial |
139 | #define INIT_STRUCT_assigned_static_all(var_type) \ |
140 | ; var = (var_type)__static_all |
141 | #define INIT_STRUCT_assigned_dynamic_partial(var_type) \ |
142 | ; var = (var_type)__dynamic_partial |
143 | #define INIT_STRUCT_assigned_dynamic_all(var_type) \ |
144 | ; var = (var_type)__dynamic_all |
145 | |
146 | #define INIT_STRUCT_assigned_copy(var_type) \ |
147 | ; var = *(arg) |
148 | |
149 | /* |
150 | * @name: unique string name for the test |
151 | * @var_type: type to be tested for zeroing initialization |
152 | * @which: is this a SCALAR, STRING, or STRUCT type? |
153 | * @init_level: what kind of initialization is performed |
154 | * @xfail: is this test expected to fail? |
155 | */ |
156 | #define DEFINE_TEST_DRIVER(name, var_type, which, xfail) \ |
157 | /* Returns 0 on success, 1 on failure. */ \ |
158 | static noinline void test_ ## name (struct kunit *test) \ |
159 | { \ |
160 | var_type zero INIT_CLONE_ ## which; \ |
161 | int ignored; \ |
162 | u8 sum = 0, i; \ |
163 | \ |
164 | /* Notice when a new test is larger than expected. */ \ |
165 | BUILD_BUG_ON(sizeof(zero) > MAX_VAR_SIZE); \ |
166 | \ |
167 | /* Fill clone type with zero for per-field init. */ \ |
168 | ZERO_CLONE_ ## which(zero); \ |
169 | /* Clear entire check buffer for 0xFF overlap test. */ \ |
170 | memset(check_buf, 0x00, sizeof(check_buf)); \ |
171 | /* Fill stack with 0xFF. */ \ |
172 | ignored = leaf_ ##name((unsigned long)&ignored, 1, \ |
173 | FETCH_ARG_ ## which(zero)); \ |
174 | /* Verify all bytes overwritten with 0xFF. */ \ |
175 | for (sum = 0, i = 0; i < target_size; i++) \ |
176 | sum += (check_buf[i] != 0xFF); \ |
177 | /* Clear entire check buffer for later bit tests. */ \ |
178 | memset(check_buf, 0x00, sizeof(check_buf)); \ |
179 | /* Extract stack-defined variable contents. */ \ |
180 | ignored = leaf_ ##name((unsigned long)&ignored, 0, \ |
181 | FETCH_ARG_ ## which(zero)); \ |
182 | /* \ |
183 | * Delay the sum test to here to do as little as \ |
184 | * possible between the two leaf function calls. \ |
185 | */ \ |
186 | KUNIT_ASSERT_EQ_MSG(test, sum, 0, \ |
187 | "leaf fill was not 0xFF!?\n"); \ |
188 | \ |
189 | /* Validate that compiler lined up fill and target. */ \ |
190 | KUNIT_ASSERT_TRUE_MSG(test, \ |
191 | stackinit_range_contains(fill_start, fill_size, \ |
192 | target_start, target_size), \ |
193 | "stackframe was not the same between calls!? " \ |
194 | "(fill %zu wide, target offset by %d)\n", \ |
195 | fill_size, \ |
196 | (int)((ssize_t)(uintptr_t)fill_start - \ |
197 | (ssize_t)(uintptr_t)target_start)); \ |
198 | \ |
199 | /* Look for any bytes still 0xFF in check region. */ \ |
200 | for (sum = 0, i = 0; i < target_size; i++) \ |
201 | sum += (check_buf[i] == 0xFF); \ |
202 | \ |
203 | if (sum != 0 && xfail) \ |
204 | kunit_skip(test, \ |
205 | "XFAIL uninit bytes: %d\n", \ |
206 | sum); \ |
207 | KUNIT_ASSERT_EQ_MSG(test, sum, 0, \ |
208 | "uninit bytes: %d\n", sum); \ |
209 | } |
210 | #define DEFINE_TEST(name, var_type, which, init_level, xfail) \ |
211 | /* no-op to force compiler into ignoring "uninitialized" vars */\ |
212 | static noinline DO_NOTHING_TYPE_ ## which(var_type) \ |
213 | do_nothing_ ## name(var_type *ptr) \ |
214 | { \ |
215 | /* Will always be true, but compiler doesn't know. */ \ |
216 | if ((unsigned long)ptr > 0x2) \ |
217 | return DO_NOTHING_RETURN_ ## which(ptr); \ |
218 | else \ |
219 | return DO_NOTHING_RETURN_ ## which(ptr + 1); \ |
220 | } \ |
221 | static noinline int leaf_ ## name(unsigned long sp, bool fill, \ |
222 | var_type *arg) \ |
223 | { \ |
224 | char buf[VAR_BUFFER]; \ |
225 | var_type var \ |
226 | INIT_ ## which ## _ ## init_level(var_type); \ |
227 | \ |
228 | target_start = &var; \ |
229 | target_size = sizeof(var); \ |
230 | /* \ |
231 | * Keep this buffer around to make sure we've got a \ |
232 | * stack frame of SOME kind... \ |
233 | */ \ |
234 | memset(buf, (char)(sp & 0xff), sizeof(buf)); \ |
235 | /* Fill variable with 0xFF. */ \ |
236 | if (fill) { \ |
237 | fill_start = &var; \ |
238 | fill_size = sizeof(var); \ |
239 | memset(fill_start, \ |
240 | (char)((sp & 0xff) | forced_mask), \ |
241 | fill_size); \ |
242 | } \ |
243 | \ |
244 | /* Silence "never initialized" warnings. */ \ |
245 | DO_NOTHING_CALL_ ## which(var, name); \ |
246 | \ |
247 | /* Exfiltrate "var". */ \ |
248 | memcpy(check_buf, target_start, target_size); \ |
249 | \ |
250 | return (int)buf[0] | (int)buf[sizeof(buf) - 1]; \ |
251 | } \ |
252 | DEFINE_TEST_DRIVER(name, var_type, which, xfail) |
253 | |
254 | /* Structure with no padding. */ |
255 | struct test_packed { |
256 | unsigned long one; |
257 | unsigned long two; |
258 | unsigned long three; |
259 | unsigned long four; |
260 | }; |
261 | |
262 | /* Simple structure with padding likely to be covered by compiler. */ |
263 | struct test_small_hole { |
264 | size_t one; |
265 | char two; |
266 | /* 3 byte padding hole here. */ |
267 | int three; |
268 | unsigned long four; |
269 | }; |
270 | |
271 | /* Trigger unhandled padding in a structure. */ |
272 | struct test_big_hole { |
273 | u8 one; |
274 | u8 two; |
275 | u8 three; |
276 | /* 61 byte padding hole here. */ |
277 | u8 four __aligned(64); |
278 | } __aligned(64); |
279 | |
280 | struct test_trailing_hole { |
281 | char *one; |
282 | char *two; |
283 | char *three; |
284 | char four; |
285 | /* "sizeof(unsigned long) - 1" byte padding hole here. */ |
286 | }; |
287 | |
288 | /* Test if STRUCTLEAK is clearing structs with __user fields. */ |
289 | struct test_user { |
290 | u8 one; |
291 | unsigned long two; |
292 | char __user *three; |
293 | unsigned long four; |
294 | }; |
295 | |
296 | #define ALWAYS_PASS WANT_SUCCESS |
297 | #define ALWAYS_FAIL XFAIL |
298 | |
299 | #ifdef CONFIG_INIT_STACK_NONE |
300 | # define USER_PASS XFAIL |
301 | # define BYREF_PASS XFAIL |
302 | # define STRONG_PASS XFAIL |
303 | #elif defined(CONFIG_GCC_PLUGIN_STRUCTLEAK_USER) |
304 | # define USER_PASS WANT_SUCCESS |
305 | # define BYREF_PASS XFAIL |
306 | # define STRONG_PASS XFAIL |
307 | #elif defined(CONFIG_GCC_PLUGIN_STRUCTLEAK_BYREF) |
308 | # define USER_PASS WANT_SUCCESS |
309 | # define BYREF_PASS WANT_SUCCESS |
310 | # define STRONG_PASS XFAIL |
311 | #else |
312 | # define USER_PASS WANT_SUCCESS |
313 | # define BYREF_PASS WANT_SUCCESS |
314 | # define STRONG_PASS WANT_SUCCESS |
315 | #endif |
316 | |
317 | #define DEFINE_SCALAR_TEST(name, init, xfail) \ |
318 | DEFINE_TEST(name ## _ ## init, name, SCALAR, \ |
319 | init, xfail) |
320 | |
321 | #define DEFINE_SCALAR_TESTS(init, xfail) \ |
322 | DEFINE_SCALAR_TEST(u8, init, xfail); \ |
323 | DEFINE_SCALAR_TEST(u16, init, xfail); \ |
324 | DEFINE_SCALAR_TEST(u32, init, xfail); \ |
325 | DEFINE_SCALAR_TEST(u64, init, xfail); \ |
326 | DEFINE_TEST(char_array_ ## init, unsigned char, \ |
327 | STRING, init, xfail) |
328 | |
329 | #define DEFINE_STRUCT_TEST(name, init, xfail) \ |
330 | DEFINE_TEST(name ## _ ## init, \ |
331 | struct test_ ## name, STRUCT, init, \ |
332 | xfail) |
333 | |
334 | #define DEFINE_STRUCT_TESTS(init, xfail) \ |
335 | DEFINE_STRUCT_TEST(small_hole, init, xfail); \ |
336 | DEFINE_STRUCT_TEST(big_hole, init, xfail); \ |
337 | DEFINE_STRUCT_TEST(trailing_hole, init, xfail); \ |
338 | DEFINE_STRUCT_TEST(packed, init, xfail) |
339 | |
340 | #define DEFINE_STRUCT_INITIALIZER_TESTS(base, xfail) \ |
341 | DEFINE_STRUCT_TESTS(base ## _ ## partial, \ |
342 | xfail); \ |
343 | DEFINE_STRUCT_TESTS(base ## _ ## all, xfail) |
344 | |
345 | /* These should be fully initialized all the time! */ |
346 | DEFINE_SCALAR_TESTS(zero, ALWAYS_PASS); |
347 | DEFINE_STRUCT_TESTS(zero, ALWAYS_PASS); |
348 | /* Struct initializers: padding may be left uninitialized. */ |
349 | DEFINE_STRUCT_INITIALIZER_TESTS(static, STRONG_PASS); |
350 | DEFINE_STRUCT_INITIALIZER_TESTS(dynamic, STRONG_PASS); |
351 | DEFINE_STRUCT_INITIALIZER_TESTS(runtime, STRONG_PASS); |
352 | DEFINE_STRUCT_INITIALIZER_TESTS(assigned_static, STRONG_PASS); |
353 | DEFINE_STRUCT_INITIALIZER_TESTS(assigned_dynamic, STRONG_PASS); |
354 | DEFINE_STRUCT_TESTS(assigned_copy, ALWAYS_FAIL); |
355 | /* No initialization without compiler instrumentation. */ |
356 | DEFINE_SCALAR_TESTS(none, STRONG_PASS); |
357 | DEFINE_STRUCT_TESTS(none, BYREF_PASS); |
358 | /* Initialization of members with __user attribute. */ |
359 | DEFINE_TEST(user, struct test_user, STRUCT, none, USER_PASS); |
360 | |
361 | /* |
362 | * Check two uses through a variable declaration outside either path, |
363 | * which was noticed as a special case in porting earlier stack init |
364 | * compiler logic. |
365 | */ |
366 | static int noinline __leaf_switch_none(int path, bool fill) |
367 | { |
368 | switch (path) { |
369 | /* |
370 | * This is intentionally unreachable. To silence the |
371 | * warning, build with -Wno-switch-unreachable |
372 | */ |
373 | uint64_t var[10]; |
374 | |
375 | case 1: |
376 | target_start = &var; |
377 | target_size = sizeof(var); |
378 | if (fill) { |
379 | fill_start = &var; |
380 | fill_size = sizeof(var); |
381 | |
382 | memset(fill_start, forced_mask | 0x55, fill_size); |
383 | } |
384 | memcpy(check_buf, target_start, target_size); |
385 | break; |
386 | case 2: |
387 | target_start = &var; |
388 | target_size = sizeof(var); |
389 | if (fill) { |
390 | fill_start = &var; |
391 | fill_size = sizeof(var); |
392 | |
393 | memset(fill_start, forced_mask | 0xaa, fill_size); |
394 | } |
395 | memcpy(check_buf, target_start, target_size); |
396 | break; |
397 | default: |
398 | var[1] = 5; |
399 | return var[1] & forced_mask; |
400 | } |
401 | return 0; |
402 | } |
403 | |
404 | static noinline int leaf_switch_1_none(unsigned long sp, bool fill, |
405 | uint64_t *arg) |
406 | { |
407 | return __leaf_switch_none(path: 1, fill); |
408 | } |
409 | |
410 | static noinline int leaf_switch_2_none(unsigned long sp, bool fill, |
411 | uint64_t *arg) |
412 | { |
413 | return __leaf_switch_none(path: 2, fill); |
414 | } |
415 | |
416 | /* |
417 | * These are expected to fail for most configurations because neither |
418 | * GCC nor Clang have a way to perform initialization of variables in |
419 | * non-code areas (i.e. in a switch statement before the first "case"). |
420 | * https://llvm.org/pr44916 |
421 | */ |
422 | DEFINE_TEST_DRIVER(switch_1_none, uint64_t, SCALAR, ALWAYS_FAIL); |
423 | DEFINE_TEST_DRIVER(switch_2_none, uint64_t, SCALAR, ALWAYS_FAIL); |
424 | |
425 | #define KUNIT_test_scalars(init) \ |
426 | KUNIT_CASE(test_u8_ ## init), \ |
427 | KUNIT_CASE(test_u16_ ## init), \ |
428 | KUNIT_CASE(test_u32_ ## init), \ |
429 | KUNIT_CASE(test_u64_ ## init), \ |
430 | KUNIT_CASE(test_char_array_ ## init) |
431 | |
432 | #define KUNIT_test_structs(init) \ |
433 | KUNIT_CASE(test_small_hole_ ## init), \ |
434 | KUNIT_CASE(test_big_hole_ ## init), \ |
435 | KUNIT_CASE(test_trailing_hole_ ## init),\ |
436 | KUNIT_CASE(test_packed_ ## init) \ |
437 | |
438 | static struct kunit_case stackinit_test_cases[] = { |
439 | /* These are explicitly initialized and should always pass. */ |
440 | KUNIT_test_scalars(zero), |
441 | KUNIT_test_structs(zero), |
442 | /* Padding here appears to be accidentally always initialized? */ |
443 | KUNIT_test_structs(dynamic_partial), |
444 | KUNIT_test_structs(assigned_dynamic_partial), |
445 | /* Padding initialization depends on compiler behaviors. */ |
446 | KUNIT_test_structs(static_partial), |
447 | KUNIT_test_structs(static_all), |
448 | KUNIT_test_structs(dynamic_all), |
449 | KUNIT_test_structs(runtime_partial), |
450 | KUNIT_test_structs(runtime_all), |
451 | KUNIT_test_structs(assigned_static_partial), |
452 | KUNIT_test_structs(assigned_static_all), |
453 | KUNIT_test_structs(assigned_dynamic_all), |
454 | /* Everything fails this since it effectively performs a memcpy(). */ |
455 | KUNIT_test_structs(assigned_copy), |
456 | /* STRUCTLEAK_BYREF_ALL should cover everything from here down. */ |
457 | KUNIT_test_scalars(none), |
458 | KUNIT_CASE(test_switch_1_none), |
459 | KUNIT_CASE(test_switch_2_none), |
460 | /* STRUCTLEAK_BYREF should cover from here down. */ |
461 | KUNIT_test_structs(none), |
462 | /* STRUCTLEAK will only cover this. */ |
463 | KUNIT_CASE(test_user), |
464 | {} |
465 | }; |
466 | |
467 | static struct kunit_suite stackinit_test_suite = { |
468 | .name = "stackinit" , |
469 | .test_cases = stackinit_test_cases, |
470 | }; |
471 | |
472 | kunit_test_suites(&stackinit_test_suite); |
473 | |
474 | MODULE_LICENSE("GPL" ); |
475 | |