1 | /* Tree lowering pass. This pass gimplifies the tree representation built |
2 | by the C-based front ends. The structure of gimplified, or |
3 | language-independent, trees is dictated by the grammar described in this |
4 | file. |
5 | Copyright (C) 2002-2023 Free Software Foundation, Inc. |
6 | Lowering of expressions contributed by Sebastian Pop <s.pop@laposte.net> |
7 | Re-written to support lowering of whole function trees, documentation |
8 | and miscellaneous cleanups by Diego Novillo <dnovillo@redhat.com> |
9 | |
10 | This file is part of GCC. |
11 | |
12 | GCC is free software; you can redistribute it and/or modify it under |
13 | the terms of the GNU General Public License as published by the Free |
14 | Software Foundation; either version 3, or (at your option) any later |
15 | version. |
16 | |
17 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
18 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
19 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
20 | for more details. |
21 | |
22 | You should have received a copy of the GNU General Public License |
23 | along with GCC; see the file COPYING3. If not see |
24 | <http://www.gnu.org/licenses/>. */ |
25 | |
26 | #include "config.h" |
27 | #include "system.h" |
28 | #include "coretypes.h" |
29 | #include "tm.h" |
30 | #include "function.h" |
31 | #include "basic-block.h" |
32 | #include "tree.h" |
33 | #include "tree-iterator.h" |
34 | #include "predict.h" |
35 | #include "gimple.h" |
36 | #include "cgraph.h" |
37 | #include "c-pretty-print.h" |
38 | #include "gimplify.h" |
39 | #include "langhooks.h" |
40 | #include "dumpfile.h" |
41 | #include "c-ubsan.h" |
42 | #include "tree-nested.h" |
43 | #include "context.h" |
44 | #include "tree-pass.h" |
45 | #include "internal-fn.h" |
46 | |
47 | /* The gimplification pass converts the language-dependent trees |
48 | (ld-trees) emitted by the parser into language-independent trees |
49 | (li-trees) that are the target of SSA analysis and transformations. |
50 | |
51 | Language-independent trees are based on the SIMPLE intermediate |
52 | representation used in the McCAT compiler framework: |
53 | |
54 | "Designing the McCAT Compiler Based on a Family of Structured |
55 | Intermediate Representations," |
56 | L. Hendren, C. Donawa, M. Emami, G. Gao, Justiani, and B. Sridharan, |
57 | Proceedings of the 5th International Workshop on Languages and |
58 | Compilers for Parallel Computing, no. 757 in Lecture Notes in |
59 | Computer Science, New Haven, Connecticut, pp. 406-420, |
60 | Springer-Verlag, August 3-5, 1992. |
61 | |
62 | http://www-acaps.cs.mcgill.ca/info/McCAT/McCAT.html |
63 | |
64 | Basically, we walk down gimplifying the nodes that we encounter. As we |
65 | walk back up, we check that they fit our constraints, and copy them |
66 | into temporaries if not. */ |
67 | |
68 | /* Callback for c_genericize. */ |
69 | |
70 | static tree |
71 | ubsan_walk_array_refs_r (tree *tp, int *walk_subtrees, void *data) |
72 | { |
73 | hash_set<tree> *pset = (hash_set<tree> *) data; |
74 | |
75 | if (TREE_CODE (*tp) == BIND_EXPR) |
76 | { |
77 | /* Since walk_tree doesn't call the callback function on the decls |
78 | in BIND_EXPR_VARS, we have to walk them manually, so we can avoid |
79 | instrumenting DECL_INITIAL of TREE_STATIC vars. */ |
80 | *walk_subtrees = 0; |
81 | for (tree decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl)) |
82 | { |
83 | if (TREE_STATIC (decl)) |
84 | continue; |
85 | walk_tree (&DECL_INITIAL (decl), ubsan_walk_array_refs_r, pset, |
86 | pset); |
87 | walk_tree (&DECL_SIZE (decl), ubsan_walk_array_refs_r, pset, pset); |
88 | walk_tree (&DECL_SIZE_UNIT (decl), ubsan_walk_array_refs_r, pset, |
89 | pset); |
90 | } |
91 | walk_tree (&BIND_EXPR_BODY (*tp), ubsan_walk_array_refs_r, pset, pset); |
92 | } |
93 | else if (TREE_CODE (*tp) == ADDR_EXPR |
94 | && TREE_CODE (TREE_OPERAND (*tp, 0)) == ARRAY_REF) |
95 | { |
96 | ubsan_maybe_instrument_array_ref (&TREE_OPERAND (*tp, 0), true); |
97 | /* Make sure ubsan_maybe_instrument_array_ref is not called again |
98 | on the ARRAY_REF, the above call might not instrument anything |
99 | as the index might be constant or masked, so ensure it is not |
100 | walked again and walk its subtrees manually. */ |
101 | tree aref = TREE_OPERAND (*tp, 0); |
102 | pset->add (k: aref); |
103 | *walk_subtrees = 0; |
104 | walk_tree (&TREE_OPERAND (aref, 0), ubsan_walk_array_refs_r, pset, pset); |
105 | walk_tree (&TREE_OPERAND (aref, 1), ubsan_walk_array_refs_r, pset, pset); |
106 | walk_tree (&TREE_OPERAND (aref, 2), ubsan_walk_array_refs_r, pset, pset); |
107 | walk_tree (&TREE_OPERAND (aref, 3), ubsan_walk_array_refs_r, pset, pset); |
108 | } |
109 | else if (TREE_CODE (*tp) == ARRAY_REF) |
110 | ubsan_maybe_instrument_array_ref (tp, false); |
111 | else if (TREE_CODE (*tp) == MODIFY_EXPR) |
112 | { |
113 | /* Since r7-1900, we gimplify RHS before LHS. Consider |
114 | a[b] |= c; |
115 | wherein we can have a single shared tree a[b] in both LHS and RHS. |
116 | If we only instrument the LHS and the access is invalid, the program |
117 | could crash before emitting a UBSan error. So instrument the RHS |
118 | first. */ |
119 | *walk_subtrees = 0; |
120 | walk_tree (&TREE_OPERAND (*tp, 1), ubsan_walk_array_refs_r, pset, pset); |
121 | walk_tree (&TREE_OPERAND (*tp, 0), ubsan_walk_array_refs_r, pset, pset); |
122 | } |
123 | return NULL_TREE; |
124 | } |
125 | |
126 | /* Gimplification of statement trees. */ |
127 | |
128 | /* Local declarations. */ |
129 | |
130 | enum bc_t { bc_break = 0, bc_continue = 1 }; |
131 | |
132 | /* Stack of labels which are targets for "break" or "continue", |
133 | linked through TREE_CHAIN. */ |
134 | static tree bc_label[2]; |
135 | |
136 | /* Begin a scope which can be exited by a break or continue statement. BC |
137 | indicates which. |
138 | |
139 | Just creates a label with location LOCATION and pushes it into the current |
140 | context. */ |
141 | |
142 | static tree |
143 | begin_bc_block (enum bc_t bc, location_t location) |
144 | { |
145 | tree label = create_artificial_label (location); |
146 | DECL_CHAIN (label) = bc_label[bc]; |
147 | bc_label[bc] = label; |
148 | if (bc == bc_break) |
149 | LABEL_DECL_BREAK (label) = true; |
150 | else |
151 | LABEL_DECL_CONTINUE (label) = true; |
152 | return label; |
153 | } |
154 | |
155 | /* Finish a scope which can be exited by a break or continue statement. |
156 | LABEL was returned from the most recent call to begin_bc_block. BLOCK is |
157 | an expression for the contents of the scope. |
158 | |
159 | If we saw a break (or continue) in the scope, append a LABEL_EXPR to |
160 | BLOCK. Otherwise, just forget the label. */ |
161 | |
162 | static void |
163 | finish_bc_block (tree *block, enum bc_t bc, tree label) |
164 | { |
165 | gcc_assert (label == bc_label[bc]); |
166 | |
167 | if (TREE_USED (label)) |
168 | append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label), |
169 | block); |
170 | |
171 | bc_label[bc] = DECL_CHAIN (label); |
172 | DECL_CHAIN (label) = NULL_TREE; |
173 | } |
174 | |
175 | /* Allow saving and restoring break/continue state. */ |
176 | |
177 | void |
178 | save_bc_state (bc_state_t *state) |
179 | { |
180 | state->bc_label[bc_break] = bc_label[bc_break]; |
181 | state->bc_label[bc_continue] = bc_label[bc_continue]; |
182 | bc_label[bc_break] = NULL_TREE; |
183 | bc_label[bc_continue] = NULL_TREE; |
184 | } |
185 | |
186 | void |
187 | restore_bc_state (bc_state_t *state) |
188 | { |
189 | gcc_assert (bc_label[bc_break] == NULL); |
190 | gcc_assert (bc_label[bc_continue] == NULL); |
191 | bc_label[bc_break] = state->bc_label[bc_break]; |
192 | bc_label[bc_continue] = state->bc_label[bc_continue]; |
193 | } |
194 | |
195 | /* Get the LABEL_EXPR to represent a break or continue statement |
196 | in the current block scope. BC indicates which. */ |
197 | |
198 | static tree |
199 | get_bc_label (enum bc_t bc) |
200 | { |
201 | tree label = bc_label[bc]; |
202 | gcc_assert (label); |
203 | |
204 | /* Mark the label used for finish_bc_block. */ |
205 | TREE_USED (label) = 1; |
206 | return label; |
207 | } |
208 | |
209 | /* Return the location from EXPR, or OR_LOC if the former is unknown. */ |
210 | |
211 | location_t |
212 | expr_loc_or_loc (const_tree expr, location_t or_loc) |
213 | { |
214 | tree t = CONST_CAST_TREE (expr); |
215 | location_t loc = UNKNOWN_LOCATION; |
216 | if (t) |
217 | loc = EXPR_LOCATION (t); |
218 | if (loc == UNKNOWN_LOCATION) |
219 | loc = or_loc; |
220 | return loc; |
221 | } |
222 | |
223 | /* Build a generic representation of one of the C loop forms. COND is the |
224 | loop condition or NULL_TREE. BODY is the (possibly compound) statement |
225 | controlled by the loop. INCR is the increment expression of a for-loop, |
226 | or NULL_TREE. COND_IS_FIRST indicates whether the condition is |
227 | evaluated before the loop body as in while and for loops, or after the |
228 | loop body as in do-while loops. */ |
229 | |
230 | static void |
231 | genericize_c_loop (tree *stmt_p, location_t start_locus, tree cond, tree body, |
232 | tree incr, bool cond_is_first, int *walk_subtrees, |
233 | void *data, walk_tree_fn func, walk_tree_lh lh) |
234 | { |
235 | tree blab, clab; |
236 | tree entry = NULL, exit = NULL, t; |
237 | tree stmt_list = NULL; |
238 | location_t cond_locus = expr_loc_or_loc (expr: cond, or_loc: start_locus); |
239 | location_t incr_locus = expr_loc_or_loc (expr: incr, or_loc: start_locus); |
240 | |
241 | protected_set_expr_location_if_unset (incr, start_locus); |
242 | |
243 | walk_tree_1 (&cond, func, data, NULL, lh); |
244 | walk_tree_1 (&incr, func, data, NULL, lh); |
245 | |
246 | blab = begin_bc_block (bc: bc_break, location: start_locus); |
247 | clab = begin_bc_block (bc: bc_continue, location: start_locus); |
248 | |
249 | walk_tree_1 (&body, func, data, NULL, lh); |
250 | *walk_subtrees = 0; |
251 | |
252 | /* If condition is zero don't generate a loop construct. */ |
253 | if (cond && integer_zerop (cond)) |
254 | { |
255 | if (cond_is_first) |
256 | { |
257 | t = build1_loc (loc: start_locus, code: GOTO_EXPR, void_type_node, |
258 | arg1: get_bc_label (bc: bc_break)); |
259 | append_to_statement_list (t, &stmt_list); |
260 | } |
261 | } |
262 | else |
263 | { |
264 | /* Expand to gotos. */ |
265 | tree top = build1 (LABEL_EXPR, void_type_node, |
266 | create_artificial_label (start_locus)); |
267 | |
268 | /* If we have an exit condition, then we build an IF with gotos either |
269 | out of the loop, or to the top of it. If there's no exit condition, |
270 | then we just build a jump back to the top. */ |
271 | exit = build1 (GOTO_EXPR, void_type_node, LABEL_EXPR_LABEL (top)); |
272 | |
273 | if (cond && !integer_nonzerop (cond)) |
274 | { |
275 | /* Canonicalize the loop condition to the end. This means |
276 | generating a branch to the loop condition. Reuse the |
277 | continue label, if there is no incr expression. */ |
278 | if (cond_is_first) |
279 | { |
280 | if (incr) |
281 | { |
282 | entry = build1 (LABEL_EXPR, void_type_node, |
283 | create_artificial_label (start_locus)); |
284 | t = build1_loc (loc: start_locus, code: GOTO_EXPR, void_type_node, |
285 | LABEL_EXPR_LABEL (entry)); |
286 | } |
287 | else |
288 | t = build1_loc (loc: start_locus, code: GOTO_EXPR, void_type_node, |
289 | arg1: get_bc_label (bc: bc_continue)); |
290 | append_to_statement_list (t, &stmt_list); |
291 | } |
292 | |
293 | t = build1 (GOTO_EXPR, void_type_node, get_bc_label (bc: bc_break)); |
294 | exit = fold_build3_loc (cond_locus, |
295 | COND_EXPR, void_type_node, cond, exit, t); |
296 | } |
297 | else |
298 | { |
299 | /* For the backward-goto's location of an unconditional loop |
300 | use the beginning of the body, or, if there is none, the |
301 | top of the loop. */ |
302 | location_t loc = expr_loc_or_loc (expr: expr_first (body), |
303 | or_loc: start_locus); |
304 | SET_EXPR_LOCATION (exit, loc); |
305 | } |
306 | append_to_statement_list (top, &stmt_list); |
307 | } |
308 | |
309 | append_to_statement_list (body, &stmt_list); |
310 | finish_bc_block (block: &stmt_list, bc: bc_continue, label: clab); |
311 | if (incr) |
312 | { |
313 | if (MAY_HAVE_DEBUG_MARKER_STMTS && incr_locus != UNKNOWN_LOCATION) |
314 | { |
315 | tree d = build0 (DEBUG_BEGIN_STMT, void_type_node); |
316 | SET_EXPR_LOCATION (d, expr_loc_or_loc (incr, start_locus)); |
317 | append_to_statement_list (d, &stmt_list); |
318 | } |
319 | append_to_statement_list (incr, &stmt_list); |
320 | } |
321 | append_to_statement_list (entry, &stmt_list); |
322 | |
323 | if (MAY_HAVE_DEBUG_MARKER_STMTS && cond_locus != UNKNOWN_LOCATION) |
324 | { |
325 | tree d = build0 (DEBUG_BEGIN_STMT, void_type_node); |
326 | SET_EXPR_LOCATION (d, cond_locus); |
327 | append_to_statement_list (d, &stmt_list); |
328 | } |
329 | append_to_statement_list (exit, &stmt_list); |
330 | finish_bc_block (block: &stmt_list, bc: bc_break, label: blab); |
331 | if (!stmt_list) |
332 | stmt_list = build_empty_stmt (start_locus); |
333 | |
334 | *stmt_p = stmt_list; |
335 | } |
336 | |
337 | /* Genericize a FOR_STMT node *STMT_P. */ |
338 | |
339 | static void |
340 | genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data, |
341 | walk_tree_fn func, walk_tree_lh lh) |
342 | { |
343 | tree stmt = *stmt_p; |
344 | tree expr = NULL; |
345 | tree loop; |
346 | tree init = FOR_INIT_STMT (stmt); |
347 | |
348 | if (init) |
349 | { |
350 | walk_tree_1 (&init, func, data, NULL, lh); |
351 | append_to_statement_list (init, &expr); |
352 | } |
353 | |
354 | genericize_c_loop (stmt_p: &loop, EXPR_LOCATION (stmt), FOR_COND (stmt), |
355 | FOR_BODY (stmt), FOR_EXPR (stmt), cond_is_first: 1, walk_subtrees, |
356 | data, func, lh); |
357 | append_to_statement_list (loop, &expr); |
358 | if (expr == NULL_TREE) |
359 | expr = loop; |
360 | *stmt_p = expr; |
361 | } |
362 | |
363 | /* Genericize a WHILE_STMT node *STMT_P. */ |
364 | |
365 | static void |
366 | genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data, |
367 | walk_tree_fn func, walk_tree_lh lh) |
368 | { |
369 | tree stmt = *stmt_p; |
370 | genericize_c_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt), |
371 | WHILE_BODY (stmt), NULL_TREE, cond_is_first: 1, walk_subtrees, |
372 | data, func, lh); |
373 | } |
374 | |
375 | /* Genericize a DO_STMT node *STMT_P. */ |
376 | |
377 | static void |
378 | genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data, |
379 | walk_tree_fn func, walk_tree_lh lh) |
380 | { |
381 | tree stmt = *stmt_p; |
382 | genericize_c_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt), |
383 | DO_BODY (stmt), NULL_TREE, cond_is_first: 0, walk_subtrees, |
384 | data, func, lh); |
385 | } |
386 | |
387 | /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */ |
388 | |
389 | static void |
390 | genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data, |
391 | walk_tree_fn func, walk_tree_lh lh) |
392 | { |
393 | tree stmt = *stmt_p; |
394 | tree break_block, body, cond, type; |
395 | location_t stmt_locus = EXPR_LOCATION (stmt); |
396 | |
397 | body = SWITCH_STMT_BODY (stmt); |
398 | if (!body) |
399 | body = build_empty_stmt (stmt_locus); |
400 | cond = SWITCH_STMT_COND (stmt); |
401 | type = SWITCH_STMT_TYPE (stmt); |
402 | |
403 | walk_tree_1 (&cond, func, data, NULL, lh); |
404 | |
405 | break_block = begin_bc_block (bc: bc_break, location: stmt_locus); |
406 | |
407 | walk_tree_1 (&body, func, data, NULL, lh); |
408 | walk_tree_1 (&type, func, data, NULL, lh); |
409 | *walk_subtrees = 0; |
410 | |
411 | if (TREE_USED (break_block)) |
412 | SWITCH_BREAK_LABEL_P (break_block) = 1; |
413 | finish_bc_block (block: &body, bc: bc_break, label: break_block); |
414 | *stmt_p = build2_loc (loc: stmt_locus, code: SWITCH_EXPR, type, arg0: cond, arg1: body); |
415 | SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt); |
416 | gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt) |
417 | || !TREE_USED (break_block)); |
418 | } |
419 | |
420 | /* Genericize a CONTINUE_STMT node *STMT_P. */ |
421 | |
422 | static void |
423 | genericize_continue_stmt (tree *stmt_p) |
424 | { |
425 | tree stmt_list = NULL; |
426 | tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN); |
427 | tree label = get_bc_label (bc: bc_continue); |
428 | location_t location = EXPR_LOCATION (*stmt_p); |
429 | tree jump = build1_loc (loc: location, code: GOTO_EXPR, void_type_node, arg1: label); |
430 | append_to_statement_list_force (pred, &stmt_list); |
431 | append_to_statement_list (jump, &stmt_list); |
432 | *stmt_p = stmt_list; |
433 | } |
434 | |
435 | /* Genericize a BREAK_STMT node *STMT_P. */ |
436 | |
437 | static void |
438 | genericize_break_stmt (tree *stmt_p) |
439 | { |
440 | tree label = get_bc_label (bc: bc_break); |
441 | location_t location = EXPR_LOCATION (*stmt_p); |
442 | *stmt_p = build1_loc (loc: location, code: GOTO_EXPR, void_type_node, arg1: label); |
443 | } |
444 | |
445 | /* Genericize a OMP_FOR node *STMT_P. */ |
446 | |
447 | static void |
448 | genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data, |
449 | walk_tree_fn func, walk_tree_lh lh) |
450 | { |
451 | tree stmt = *stmt_p; |
452 | location_t locus = EXPR_LOCATION (stmt); |
453 | tree clab = begin_bc_block (bc: bc_continue, location: locus); |
454 | |
455 | walk_tree_1 (&OMP_FOR_BODY (stmt), func, data, NULL, lh); |
456 | if (TREE_CODE (stmt) != OMP_TASKLOOP) |
457 | walk_tree_1 (&OMP_FOR_CLAUSES (stmt), func, data, NULL, lh); |
458 | walk_tree_1 (&OMP_FOR_INIT (stmt), func, data, NULL, lh); |
459 | walk_tree_1 (&OMP_FOR_COND (stmt), func, data, NULL, lh); |
460 | walk_tree_1 (&OMP_FOR_INCR (stmt), func, data, NULL, lh); |
461 | walk_tree_1 (&OMP_FOR_PRE_BODY (stmt), func, data, NULL, lh); |
462 | *walk_subtrees = 0; |
463 | |
464 | finish_bc_block (block: &OMP_FOR_BODY (stmt), bc: bc_continue, label: clab); |
465 | } |
466 | |
467 | |
468 | /* Lower structured control flow tree nodes, such as loops. The |
469 | STMT_P, WALK_SUBTREES, and DATA arguments are as for the walk_tree_fn |
470 | type. FUNC and LH are language-specific functions passed to walk_tree_1 |
471 | for node visiting and traversal, respectively; they are used to do |
472 | subtree processing in a language-dependent way. */ |
473 | |
474 | tree |
475 | c_genericize_control_stmt (tree *stmt_p, int *walk_subtrees, void *data, |
476 | walk_tree_fn func, walk_tree_lh lh) |
477 | { |
478 | tree stmt = *stmt_p; |
479 | |
480 | switch (TREE_CODE (stmt)) |
481 | { |
482 | case FOR_STMT: |
483 | genericize_for_stmt (stmt_p, walk_subtrees, data, func, lh); |
484 | break; |
485 | |
486 | case WHILE_STMT: |
487 | genericize_while_stmt (stmt_p, walk_subtrees, data, func, lh); |
488 | break; |
489 | |
490 | case DO_STMT: |
491 | genericize_do_stmt (stmt_p, walk_subtrees, data, func, lh); |
492 | break; |
493 | |
494 | case SWITCH_STMT: |
495 | genericize_switch_stmt (stmt_p, walk_subtrees, data, func, lh); |
496 | break; |
497 | |
498 | case CONTINUE_STMT: |
499 | genericize_continue_stmt (stmt_p); |
500 | break; |
501 | |
502 | case BREAK_STMT: |
503 | genericize_break_stmt (stmt_p); |
504 | break; |
505 | |
506 | case OMP_FOR: |
507 | case OMP_SIMD: |
508 | case OMP_DISTRIBUTE: |
509 | case OMP_LOOP: |
510 | case OMP_TASKLOOP: |
511 | case OACC_LOOP: |
512 | genericize_omp_for_stmt (stmt_p, walk_subtrees, data, func, lh); |
513 | break; |
514 | |
515 | case STATEMENT_LIST: |
516 | if (TREE_SIDE_EFFECTS (stmt)) |
517 | { |
518 | tree_stmt_iterator i; |
519 | int nondebug_stmts = 0; |
520 | bool clear_side_effects = true; |
521 | /* Genericization can clear TREE_SIDE_EFFECTS, e.g. when |
522 | transforming an IF_STMT into COND_EXPR. If such stmt |
523 | appears in a STATEMENT_LIST that contains only that |
524 | stmt and some DEBUG_BEGIN_STMTs, without -g where the |
525 | STATEMENT_LIST wouldn't be present at all the resulting |
526 | expression wouldn't have TREE_SIDE_EFFECTS set, so make sure |
527 | to clear it even on the STATEMENT_LIST in such cases. */ |
528 | hash_set<tree> *pset = (c_dialect_cxx () |
529 | ? nullptr |
530 | : static_cast<hash_set<tree> *>(data)); |
531 | for (i = tsi_start (t: stmt); !tsi_end_p (i); tsi_next (i: &i)) |
532 | { |
533 | tree t = tsi_stmt (i); |
534 | if (TREE_CODE (t) != DEBUG_BEGIN_STMT && nondebug_stmts < 2) |
535 | nondebug_stmts++; |
536 | walk_tree_1 (tsi_stmt_ptr (i), func, data, pset, lh); |
537 | if (TREE_CODE (t) != DEBUG_BEGIN_STMT |
538 | && (nondebug_stmts > 1 || TREE_SIDE_EFFECTS (tsi_stmt (i)))) |
539 | clear_side_effects = false; |
540 | } |
541 | if (clear_side_effects) |
542 | TREE_SIDE_EFFECTS (stmt) = 0; |
543 | *walk_subtrees = 0; |
544 | } |
545 | break; |
546 | |
547 | default: |
548 | break; |
549 | } |
550 | |
551 | return NULL; |
552 | } |
553 | |
554 | |
555 | /* Wrapper for c_genericize_control_stmt to allow it to be used as a walk_tree |
556 | callback. This is appropriate for C; C++ calls c_genericize_control_stmt |
557 | directly. */ |
558 | |
559 | static tree |
560 | c_genericize_control_r (tree *stmt_p, int *walk_subtrees, void *data) |
561 | { |
562 | c_genericize_control_stmt (stmt_p, walk_subtrees, data, |
563 | func: c_genericize_control_r, NULL); |
564 | return NULL; |
565 | } |
566 | |
567 | /* Convert the tree representation of FNDECL from C frontend trees to |
568 | GENERIC. */ |
569 | |
570 | void |
571 | c_genericize (tree fndecl) |
572 | { |
573 | dump_file_info *dfi; |
574 | FILE *dump_orig; |
575 | dump_flags_t local_dump_flags; |
576 | struct cgraph_node *cgn; |
577 | |
578 | if (flag_sanitize & SANITIZE_BOUNDS) |
579 | { |
580 | hash_set<tree> pset; |
581 | walk_tree (&DECL_SAVED_TREE (fndecl), ubsan_walk_array_refs_r, &pset, |
582 | &pset); |
583 | } |
584 | |
585 | /* Genericize loops and other structured control constructs. The C++ |
586 | front end has already done this in lang-specific code. */ |
587 | if (!c_dialect_cxx ()) |
588 | { |
589 | bc_state_t save_state; |
590 | push_cfun (DECL_STRUCT_FUNCTION (fndecl)); |
591 | save_bc_state (state: &save_state); |
592 | hash_set<tree> pset; |
593 | walk_tree (&DECL_SAVED_TREE (fndecl), c_genericize_control_r, &pset, |
594 | &pset); |
595 | restore_bc_state (state: &save_state); |
596 | pop_cfun (); |
597 | } |
598 | |
599 | if (warn_duplicated_branches) |
600 | walk_tree_without_duplicates (&DECL_SAVED_TREE (fndecl), |
601 | do_warn_duplicated_branches_r, NULL); |
602 | |
603 | /* Dump the C-specific tree IR. */ |
604 | dfi = g->get_dumps ()->get_dump_file_info (phase: TDI_original); |
605 | dump_orig = dfi->pstream; |
606 | local_dump_flags = dfi->pflags; |
607 | if (dump_orig) |
608 | { |
609 | fprintf (stream: dump_orig, format: "\n;; Function %s" , |
610 | lang_hooks.decl_printable_name (fndecl, 2)); |
611 | fprintf (stream: dump_orig, format: " (%s)\n" , |
612 | (!DECL_ASSEMBLER_NAME_SET_P (fndecl) ? "null" |
613 | : IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)))); |
614 | fprintf (stream: dump_orig, format: ";; enabled by -%s\n" , dump_flag_name (TDI_original)); |
615 | fprintf (stream: dump_orig, format: "\n" ); |
616 | |
617 | if (local_dump_flags & TDF_RAW) |
618 | dump_node (DECL_SAVED_TREE (fndecl), |
619 | TDF_SLIM | local_dump_flags, dump_orig); |
620 | else |
621 | print_c_tree (file: dump_orig, DECL_SAVED_TREE (fndecl)); |
622 | fprintf (stream: dump_orig, format: "\n" ); |
623 | } |
624 | |
625 | /* Dump all nested functions now. */ |
626 | cgn = cgraph_node::get_create (fndecl); |
627 | for (cgn = first_nested_function (node: cgn); |
628 | cgn; cgn = next_nested_function (node: cgn)) |
629 | c_genericize (fndecl: cgn->decl); |
630 | } |
631 | |
632 | static void |
633 | add_block_to_enclosing (tree block) |
634 | { |
635 | unsigned i; |
636 | tree enclosing; |
637 | gbind *bind; |
638 | vec<gbind *> stack = gimple_bind_expr_stack (); |
639 | |
640 | FOR_EACH_VEC_ELT (stack, i, bind) |
641 | if (gimple_bind_block (bind_stmt: bind)) |
642 | break; |
643 | |
644 | enclosing = gimple_bind_block (bind_stmt: bind); |
645 | BLOCK_SUBBLOCKS (enclosing) = chainon (BLOCK_SUBBLOCKS (enclosing), block); |
646 | } |
647 | |
648 | /* Genericize a scope by creating a new BIND_EXPR. |
649 | BLOCK is either a BLOCK representing the scope or a chain of _DECLs. |
650 | In the latter case, we need to create a new BLOCK and add it to the |
651 | BLOCK_SUBBLOCKS of the enclosing block. |
652 | BODY is a chain of C _STMT nodes for the contents of the scope, to be |
653 | genericized. */ |
654 | |
655 | tree |
656 | c_build_bind_expr (location_t loc, tree block, tree body) |
657 | { |
658 | tree decls, bind; |
659 | |
660 | if (block == NULL_TREE) |
661 | decls = NULL_TREE; |
662 | else if (TREE_CODE (block) == BLOCK) |
663 | decls = BLOCK_VARS (block); |
664 | else |
665 | { |
666 | decls = block; |
667 | if (DECL_ARTIFICIAL (decls)) |
668 | block = NULL_TREE; |
669 | else |
670 | { |
671 | block = make_node (BLOCK); |
672 | BLOCK_VARS (block) = decls; |
673 | add_block_to_enclosing (block); |
674 | } |
675 | } |
676 | |
677 | if (!body) |
678 | body = build_empty_stmt (loc); |
679 | if (decls || block) |
680 | { |
681 | bind = build3 (BIND_EXPR, void_type_node, decls, body, block); |
682 | TREE_SIDE_EFFECTS (bind) = 1; |
683 | SET_EXPR_LOCATION (bind, loc); |
684 | } |
685 | else |
686 | bind = body; |
687 | |
688 | return bind; |
689 | } |
690 | |
691 | /* Helper for c_gimplify_expr: test if target supports fma-like FN. */ |
692 | |
693 | static bool |
694 | fma_supported_p (enum internal_fn fn, tree type) |
695 | { |
696 | return direct_internal_fn_supported_p (fn, type, OPTIMIZE_FOR_BOTH); |
697 | } |
698 | |
699 | /* Gimplification of expression trees. */ |
700 | |
701 | /* Do C-specific gimplification on *EXPR_P. PRE_P and POST_P are as in |
702 | gimplify_expr. */ |
703 | |
704 | int |
705 | c_gimplify_expr (tree *expr_p, gimple_seq *pre_p ATTRIBUTE_UNUSED, |
706 | gimple_seq *post_p ATTRIBUTE_UNUSED) |
707 | { |
708 | enum tree_code code = TREE_CODE (*expr_p); |
709 | |
710 | switch (code) |
711 | { |
712 | case LSHIFT_EXPR: |
713 | case RSHIFT_EXPR: |
714 | case LROTATE_EXPR: |
715 | case RROTATE_EXPR: |
716 | { |
717 | /* We used to convert the right operand of a shift-expression |
718 | to an integer_type_node in the FEs. But it is unnecessary |
719 | and not desirable for diagnostics and sanitizers. We keep |
720 | this here to not pessimize the code, but we convert to an |
721 | unsigned type, because negative shift counts are undefined |
722 | anyway. |
723 | We should get rid of this conversion when we have a proper |
724 | type demotion/promotion pass. */ |
725 | tree *op1_p = &TREE_OPERAND (*expr_p, 1); |
726 | if (!VECTOR_TYPE_P (TREE_TYPE (*op1_p)) |
727 | && !types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (*op1_p)), |
728 | unsigned_type_node) |
729 | && !types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (*op1_p)), |
730 | integer_type_node)) |
731 | /* Make sure to unshare the result, tree sharing is invalid |
732 | during gimplification. */ |
733 | *op1_p = unshare_expr (convert (unsigned_type_node, *op1_p)); |
734 | break; |
735 | } |
736 | |
737 | case PREINCREMENT_EXPR: |
738 | case PREDECREMENT_EXPR: |
739 | case POSTINCREMENT_EXPR: |
740 | case POSTDECREMENT_EXPR: |
741 | { |
742 | tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 0)); |
743 | if (INTEGRAL_TYPE_P (type) && c_promoting_integer_type_p (type)) |
744 | { |
745 | if (!TYPE_OVERFLOW_WRAPS (type)) |
746 | type = unsigned_type_for (type); |
747 | return gimplify_self_mod_expr (expr_p, pre_p, post_p, 1, type); |
748 | } |
749 | break; |
750 | } |
751 | |
752 | case PLUS_EXPR: |
753 | case MINUS_EXPR: |
754 | { |
755 | tree type = TREE_TYPE (*expr_p); |
756 | /* For -ffp-contract=on we need to attempt FMA contraction only |
757 | during initial gimplification. Late contraction across statement |
758 | boundaries would violate language semantics. */ |
759 | if (SCALAR_FLOAT_TYPE_P (type) |
760 | && flag_fp_contract_mode == FP_CONTRACT_ON |
761 | && cfun && !(cfun->curr_properties & PROP_gimple_any) |
762 | && fma_supported_p (fn: IFN_FMA, type)) |
763 | { |
764 | bool neg_mul = false, neg_add = code == MINUS_EXPR; |
765 | |
766 | tree *op0_p = &TREE_OPERAND (*expr_p, 0); |
767 | tree *op1_p = &TREE_OPERAND (*expr_p, 1); |
768 | |
769 | /* Look for ±(x * y) ± z, swapping operands if necessary. */ |
770 | if (TREE_CODE (*op0_p) == NEGATE_EXPR |
771 | && TREE_CODE (TREE_OPERAND (*op0_p, 0)) == MULT_EXPR) |
772 | /* '*EXPR_P' is '-(x * y) ± z'. This is fine. */; |
773 | else if (TREE_CODE (*op0_p) != MULT_EXPR) |
774 | { |
775 | std::swap (a&: op0_p, b&: op1_p); |
776 | std::swap (a&: neg_mul, b&: neg_add); |
777 | } |
778 | if (TREE_CODE (*op0_p) == NEGATE_EXPR) |
779 | { |
780 | op0_p = &TREE_OPERAND (*op0_p, 0); |
781 | neg_mul = !neg_mul; |
782 | } |
783 | if (TREE_CODE (*op0_p) != MULT_EXPR) |
784 | break; |
785 | auto_vec<tree, 3> ops (3); |
786 | ops.quick_push (TREE_OPERAND (*op0_p, 0)); |
787 | ops.quick_push (TREE_OPERAND (*op0_p, 1)); |
788 | ops.quick_push (obj: *op1_p); |
789 | |
790 | enum internal_fn ifn = IFN_FMA; |
791 | if (neg_mul) |
792 | { |
793 | if (fma_supported_p (fn: IFN_FNMA, type)) |
794 | ifn = IFN_FNMA; |
795 | else |
796 | ops[0] = build1 (NEGATE_EXPR, type, ops[0]); |
797 | } |
798 | if (neg_add) |
799 | { |
800 | enum internal_fn ifn2 = ifn == IFN_FMA ? IFN_FMS : IFN_FNMS; |
801 | if (fma_supported_p (fn: ifn2, type)) |
802 | ifn = ifn2; |
803 | else |
804 | ops[2] = build1 (NEGATE_EXPR, type, ops[2]); |
805 | } |
806 | /* Avoid gimplify_arg: it emits all side effects into *PRE_P. */ |
807 | for (auto &&op : ops) |
808 | if (gimplify_expr (&op, pre_p, post_p, is_gimple_val, fb_rvalue) |
809 | == GS_ERROR) |
810 | return GS_ERROR; |
811 | |
812 | gcall *call = gimple_build_call_internal_vec (ifn, ops); |
813 | gimple_seq_add_stmt_without_update (pre_p, call); |
814 | *expr_p = create_tmp_var (type); |
815 | gimple_call_set_lhs (gs: call, lhs: *expr_p); |
816 | return GS_ALL_DONE; |
817 | } |
818 | break; |
819 | } |
820 | |
821 | default:; |
822 | } |
823 | |
824 | return GS_UNHANDLED; |
825 | } |
826 | |