1 | /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c. |
2 | |
3 | Copyright (C) 2002-2017 Free Software Foundation, Inc. |
4 | Contributed by Jason Merrill <jason@redhat.com> |
5 | |
6 | This file is part of GCC. |
7 | |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free |
10 | Software Foundation; either version 3, or (at your option) any later |
11 | version. |
12 | |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
16 | for more details. |
17 | |
18 | You should have received a copy of the GNU General Public License |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ |
21 | |
22 | #include "config.h" |
23 | #include "system.h" |
24 | #include "coretypes.h" |
25 | #include "target.h" |
26 | #include "basic-block.h" |
27 | #include "cp-tree.h" |
28 | #include "gimple.h" |
29 | #include "predict.h" |
30 | #include "stor-layout.h" |
31 | #include "tree-iterator.h" |
32 | #include "gimplify.h" |
33 | #include "c-family/c-ubsan.h" |
34 | #include "stringpool.h" |
35 | #include "attribs.h" |
36 | #include "asan.h" |
37 | |
38 | /* Forward declarations. */ |
39 | |
40 | static tree cp_genericize_r (tree *, int *, void *); |
41 | static tree cp_fold_r (tree *, int *, void *); |
42 | static void cp_genericize_tree (tree*, bool); |
43 | static tree cp_fold (tree); |
44 | |
45 | /* Local declarations. */ |
46 | |
47 | enum bc_t { bc_break = 0, bc_continue = 1 }; |
48 | |
49 | /* Stack of labels which are targets for "break" or "continue", |
50 | linked through TREE_CHAIN. */ |
51 | static tree bc_label[2]; |
52 | |
53 | /* Begin a scope which can be exited by a break or continue statement. BC |
54 | indicates which. |
55 | |
56 | Just creates a label with location LOCATION and pushes it into the current |
57 | context. */ |
58 | |
59 | static tree |
60 | begin_bc_block (enum bc_t bc, location_t location) |
61 | { |
62 | tree label = create_artificial_label (location); |
63 | DECL_CHAIN (label) = bc_label[bc]; |
64 | bc_label[bc] = label; |
65 | if (bc == bc_break) |
66 | LABEL_DECL_BREAK (label) = true; |
67 | else |
68 | LABEL_DECL_CONTINUE (label) = true; |
69 | return label; |
70 | } |
71 | |
72 | /* Finish a scope which can be exited by a break or continue statement. |
73 | LABEL was returned from the most recent call to begin_bc_block. BLOCK is |
74 | an expression for the contents of the scope. |
75 | |
76 | If we saw a break (or continue) in the scope, append a LABEL_EXPR to |
77 | BLOCK. Otherwise, just forget the label. */ |
78 | |
79 | static void |
80 | finish_bc_block (tree *block, enum bc_t bc, tree label) |
81 | { |
82 | gcc_assert (label == bc_label[bc]); |
83 | |
84 | if (TREE_USED (label)) |
85 | append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label), |
86 | block); |
87 | |
88 | bc_label[bc] = DECL_CHAIN (label); |
89 | DECL_CHAIN (label) = NULL_TREE; |
90 | } |
91 | |
92 | /* Get the LABEL_EXPR to represent a break or continue statement |
93 | in the current block scope. BC indicates which. */ |
94 | |
95 | static tree |
96 | get_bc_label (enum bc_t bc) |
97 | { |
98 | tree label = bc_label[bc]; |
99 | |
100 | /* Mark the label used for finish_bc_block. */ |
101 | TREE_USED (label) = 1; |
102 | return label; |
103 | } |
104 | |
105 | /* Genericize a TRY_BLOCK. */ |
106 | |
107 | static void |
108 | genericize_try_block (tree *stmt_p) |
109 | { |
110 | tree body = TRY_STMTS (*stmt_p); |
111 | tree cleanup = TRY_HANDLERS (*stmt_p); |
112 | |
113 | *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup); |
114 | } |
115 | |
116 | /* Genericize a HANDLER by converting to a CATCH_EXPR. */ |
117 | |
118 | static void |
119 | genericize_catch_block (tree *stmt_p) |
120 | { |
121 | tree type = HANDLER_TYPE (*stmt_p); |
122 | tree body = HANDLER_BODY (*stmt_p); |
123 | |
124 | /* FIXME should the caught type go in TREE_TYPE? */ |
125 | *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body); |
126 | } |
127 | |
128 | /* A terser interface for building a representation of an exception |
129 | specification. */ |
130 | |
131 | static tree |
132 | build_gimple_eh_filter_tree (tree body, tree allowed, tree failure) |
133 | { |
134 | tree t; |
135 | |
136 | /* FIXME should the allowed types go in TREE_TYPE? */ |
137 | t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE); |
138 | append_to_statement_list (failure, &EH_FILTER_FAILURE (t)); |
139 | |
140 | t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t); |
141 | append_to_statement_list (body, &TREE_OPERAND (t, 0)); |
142 | |
143 | return t; |
144 | } |
145 | |
146 | /* Genericize an EH_SPEC_BLOCK by converting it to a |
147 | TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */ |
148 | |
149 | static void |
150 | genericize_eh_spec_block (tree *stmt_p) |
151 | { |
152 | tree body = EH_SPEC_STMTS (*stmt_p); |
153 | tree allowed = EH_SPEC_RAISES (*stmt_p); |
154 | tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ()); |
155 | |
156 | *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure); |
157 | TREE_NO_WARNING (*stmt_p) = true; |
158 | TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true; |
159 | } |
160 | |
161 | /* Genericize an IF_STMT by turning it into a COND_EXPR. */ |
162 | |
163 | static void |
164 | genericize_if_stmt (tree *stmt_p) |
165 | { |
166 | tree stmt, cond, then_, else_; |
167 | location_t locus = EXPR_LOCATION (*stmt_p); |
168 | |
169 | stmt = *stmt_p; |
170 | cond = IF_COND (stmt); |
171 | then_ = THEN_CLAUSE (stmt); |
172 | else_ = ELSE_CLAUSE (stmt); |
173 | |
174 | if (!then_) |
175 | then_ = build_empty_stmt (locus); |
176 | if (!else_) |
177 | else_ = build_empty_stmt (locus); |
178 | |
179 | if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_)) |
180 | stmt = then_; |
181 | else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_)) |
182 | stmt = else_; |
183 | else |
184 | stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_); |
185 | if (!EXPR_HAS_LOCATION (stmt)) |
186 | protected_set_expr_location (stmt, locus); |
187 | *stmt_p = stmt; |
188 | } |
189 | |
190 | /* Build a generic representation of one of the C loop forms. COND is the |
191 | loop condition or NULL_TREE. BODY is the (possibly compound) statement |
192 | controlled by the loop. INCR is the increment expression of a for-loop, |
193 | or NULL_TREE. COND_IS_FIRST indicates whether the condition is |
194 | evaluated before the loop body as in while and for loops, or after the |
195 | loop body as in do-while loops. */ |
196 | |
197 | static void |
198 | genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body, |
199 | tree incr, bool cond_is_first, int *walk_subtrees, |
200 | void *data) |
201 | { |
202 | tree blab, clab; |
203 | tree exit = NULL; |
204 | tree stmt_list = NULL; |
205 | |
206 | blab = begin_bc_block (bc_break, start_locus); |
207 | clab = begin_bc_block (bc_continue, start_locus); |
208 | |
209 | protected_set_expr_location (incr, start_locus); |
210 | |
211 | cp_walk_tree (&cond, cp_genericize_r, data, NULL); |
212 | cp_walk_tree (&body, cp_genericize_r, data, NULL); |
213 | cp_walk_tree (&incr, cp_genericize_r, data, NULL); |
214 | *walk_subtrees = 0; |
215 | |
216 | if (cond && TREE_CODE (cond) != INTEGER_CST) |
217 | { |
218 | /* If COND is constant, don't bother building an exit. If it's false, |
219 | we won't build a loop. If it's true, any exits are in the body. */ |
220 | location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus); |
221 | exit = build1_loc (cloc, GOTO_EXPR, void_type_node, |
222 | get_bc_label (bc_break)); |
223 | exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond, |
224 | build_empty_stmt (cloc), exit); |
225 | } |
226 | |
227 | if (exit && cond_is_first) |
228 | append_to_statement_list (exit, &stmt_list); |
229 | append_to_statement_list (body, &stmt_list); |
230 | finish_bc_block (&stmt_list, bc_continue, clab); |
231 | append_to_statement_list (incr, &stmt_list); |
232 | if (exit && !cond_is_first) |
233 | append_to_statement_list (exit, &stmt_list); |
234 | |
235 | if (!stmt_list) |
236 | stmt_list = build_empty_stmt (start_locus); |
237 | |
238 | tree loop; |
239 | if (cond && integer_zerop (cond)) |
240 | { |
241 | if (cond_is_first) |
242 | loop = fold_build3_loc (start_locus, COND_EXPR, |
243 | void_type_node, cond, stmt_list, |
244 | build_empty_stmt (start_locus)); |
245 | else |
246 | loop = stmt_list; |
247 | } |
248 | else |
249 | { |
250 | location_t loc = start_locus; |
251 | if (!cond || integer_nonzerop (cond)) |
252 | loc = EXPR_LOCATION (expr_first (body)); |
253 | if (loc == UNKNOWN_LOCATION) |
254 | loc = start_locus; |
255 | loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list); |
256 | } |
257 | |
258 | stmt_list = NULL; |
259 | append_to_statement_list (loop, &stmt_list); |
260 | finish_bc_block (&stmt_list, bc_break, blab); |
261 | if (!stmt_list) |
262 | stmt_list = build_empty_stmt (start_locus); |
263 | |
264 | *stmt_p = stmt_list; |
265 | } |
266 | |
267 | /* Genericize a FOR_STMT node *STMT_P. */ |
268 | |
269 | static void |
270 | genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data) |
271 | { |
272 | tree stmt = *stmt_p; |
273 | tree expr = NULL; |
274 | tree loop; |
275 | tree init = FOR_INIT_STMT (stmt); |
276 | |
277 | if (init) |
278 | { |
279 | cp_walk_tree (&init, cp_genericize_r, data, NULL); |
280 | append_to_statement_list (init, &expr); |
281 | } |
282 | |
283 | genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt), |
284 | FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data); |
285 | append_to_statement_list (loop, &expr); |
286 | if (expr == NULL_TREE) |
287 | expr = loop; |
288 | *stmt_p = expr; |
289 | } |
290 | |
291 | /* Genericize a WHILE_STMT node *STMT_P. */ |
292 | |
293 | static void |
294 | genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data) |
295 | { |
296 | tree stmt = *stmt_p; |
297 | genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt), |
298 | WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data); |
299 | } |
300 | |
301 | /* Genericize a DO_STMT node *STMT_P. */ |
302 | |
303 | static void |
304 | genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data) |
305 | { |
306 | tree stmt = *stmt_p; |
307 | genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt), |
308 | DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data); |
309 | } |
310 | |
311 | /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */ |
312 | |
313 | static void |
314 | genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data) |
315 | { |
316 | tree stmt = *stmt_p; |
317 | tree break_block, body, cond, type; |
318 | location_t stmt_locus = EXPR_LOCATION (stmt); |
319 | |
320 | break_block = begin_bc_block (bc_break, stmt_locus); |
321 | |
322 | body = SWITCH_STMT_BODY (stmt); |
323 | if (!body) |
324 | body = build_empty_stmt (stmt_locus); |
325 | cond = SWITCH_STMT_COND (stmt); |
326 | type = SWITCH_STMT_TYPE (stmt); |
327 | |
328 | cp_walk_tree (&body, cp_genericize_r, data, NULL); |
329 | cp_walk_tree (&cond, cp_genericize_r, data, NULL); |
330 | cp_walk_tree (&type, cp_genericize_r, data, NULL); |
331 | *walk_subtrees = 0; |
332 | |
333 | if (TREE_USED (break_block)) |
334 | SWITCH_BREAK_LABEL_P (break_block) = 1; |
335 | finish_bc_block (&body, bc_break, break_block); |
336 | *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body); |
337 | SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt); |
338 | gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt) |
339 | || !TREE_USED (break_block)); |
340 | } |
341 | |
342 | /* Genericize a CONTINUE_STMT node *STMT_P. */ |
343 | |
344 | static void |
345 | genericize_continue_stmt (tree *stmt_p) |
346 | { |
347 | tree stmt_list = NULL; |
348 | tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN); |
349 | tree label = get_bc_label (bc_continue); |
350 | location_t location = EXPR_LOCATION (*stmt_p); |
351 | tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label); |
352 | append_to_statement_list_force (pred, &stmt_list); |
353 | append_to_statement_list (jump, &stmt_list); |
354 | *stmt_p = stmt_list; |
355 | } |
356 | |
357 | /* Genericize a BREAK_STMT node *STMT_P. */ |
358 | |
359 | static void |
360 | genericize_break_stmt (tree *stmt_p) |
361 | { |
362 | tree label = get_bc_label (bc_break); |
363 | location_t location = EXPR_LOCATION (*stmt_p); |
364 | *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label); |
365 | } |
366 | |
367 | /* Genericize a OMP_FOR node *STMT_P. */ |
368 | |
369 | static void |
370 | genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data) |
371 | { |
372 | tree stmt = *stmt_p; |
373 | location_t locus = EXPR_LOCATION (stmt); |
374 | tree clab = begin_bc_block (bc_continue, locus); |
375 | |
376 | cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL); |
377 | if (TREE_CODE (stmt) != OMP_TASKLOOP) |
378 | cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL); |
379 | cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL); |
380 | cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL); |
381 | cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL); |
382 | cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL); |
383 | *walk_subtrees = 0; |
384 | |
385 | finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab); |
386 | } |
387 | |
388 | /* Hook into the middle of gimplifying an OMP_FOR node. */ |
389 | |
390 | static enum gimplify_status |
391 | cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p) |
392 | { |
393 | tree for_stmt = *expr_p; |
394 | gimple_seq seq = NULL; |
395 | |
396 | /* Protect ourselves from recursion. */ |
397 | if (OMP_FOR_GIMPLIFYING_P (for_stmt)) |
398 | return GS_UNHANDLED; |
399 | OMP_FOR_GIMPLIFYING_P (for_stmt) = 1; |
400 | |
401 | gimplify_and_add (for_stmt, &seq); |
402 | gimple_seq_add_seq (pre_p, seq); |
403 | |
404 | OMP_FOR_GIMPLIFYING_P (for_stmt) = 0; |
405 | |
406 | return GS_ALL_DONE; |
407 | } |
408 | |
409 | /* Gimplify an EXPR_STMT node. */ |
410 | |
411 | static void |
412 | gimplify_expr_stmt (tree *stmt_p) |
413 | { |
414 | tree stmt = EXPR_STMT_EXPR (*stmt_p); |
415 | |
416 | if (stmt == error_mark_node) |
417 | stmt = NULL; |
418 | |
419 | /* Gimplification of a statement expression will nullify the |
420 | statement if all its side effects are moved to *PRE_P and *POST_P. |
421 | |
422 | In this case we will not want to emit the gimplified statement. |
423 | However, we may still want to emit a warning, so we do that before |
424 | gimplification. */ |
425 | if (stmt && warn_unused_value) |
426 | { |
427 | if (!TREE_SIDE_EFFECTS (stmt)) |
428 | { |
429 | if (!IS_EMPTY_STMT (stmt) |
430 | && !VOID_TYPE_P (TREE_TYPE (stmt)) |
431 | && !TREE_NO_WARNING (stmt)) |
432 | warning (OPT_Wunused_value, "statement with no effect" ); |
433 | } |
434 | else |
435 | warn_if_unused_value (stmt, input_location); |
436 | } |
437 | |
438 | if (stmt == NULL_TREE) |
439 | stmt = alloc_stmt_list (); |
440 | |
441 | *stmt_p = stmt; |
442 | } |
443 | |
444 | /* Gimplify initialization from an AGGR_INIT_EXPR. */ |
445 | |
446 | static void |
447 | cp_gimplify_init_expr (tree *expr_p) |
448 | { |
449 | tree from = TREE_OPERAND (*expr_p, 1); |
450 | tree to = TREE_OPERAND (*expr_p, 0); |
451 | tree t; |
452 | |
453 | /* What about code that pulls out the temp and uses it elsewhere? I |
454 | think that such code never uses the TARGET_EXPR as an initializer. If |
455 | I'm wrong, we'll abort because the temp won't have any RTL. In that |
456 | case, I guess we'll need to replace references somehow. */ |
457 | if (TREE_CODE (from) == TARGET_EXPR) |
458 | from = TARGET_EXPR_INITIAL (from); |
459 | |
460 | /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them |
461 | inside the TARGET_EXPR. */ |
462 | for (t = from; t; ) |
463 | { |
464 | tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t; |
465 | |
466 | /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and |
467 | replace the slot operand with our target. |
468 | |
469 | Should we add a target parm to gimplify_expr instead? No, as in this |
470 | case we want to replace the INIT_EXPR. */ |
471 | if (TREE_CODE (sub) == AGGR_INIT_EXPR |
472 | || TREE_CODE (sub) == VEC_INIT_EXPR) |
473 | { |
474 | if (TREE_CODE (sub) == AGGR_INIT_EXPR) |
475 | AGGR_INIT_EXPR_SLOT (sub) = to; |
476 | else |
477 | VEC_INIT_EXPR_SLOT (sub) = to; |
478 | *expr_p = from; |
479 | |
480 | /* The initialization is now a side-effect, so the container can |
481 | become void. */ |
482 | if (from != sub) |
483 | TREE_TYPE (from) = void_type_node; |
484 | } |
485 | |
486 | /* Handle aggregate NSDMI. */ |
487 | replace_placeholders (sub, to); |
488 | |
489 | if (t == sub) |
490 | break; |
491 | else |
492 | t = TREE_OPERAND (t, 1); |
493 | } |
494 | |
495 | } |
496 | |
497 | /* Gimplify a MUST_NOT_THROW_EXPR. */ |
498 | |
499 | static enum gimplify_status |
500 | gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p) |
501 | { |
502 | tree stmt = *expr_p; |
503 | tree temp = voidify_wrapper_expr (stmt, NULL); |
504 | tree body = TREE_OPERAND (stmt, 0); |
505 | gimple_seq try_ = NULL; |
506 | gimple_seq catch_ = NULL; |
507 | gimple *mnt; |
508 | |
509 | gimplify_and_add (body, &try_); |
510 | mnt = gimple_build_eh_must_not_throw (terminate_fn); |
511 | gimple_seq_add_stmt_without_update (&catch_, mnt); |
512 | mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH); |
513 | |
514 | gimple_seq_add_stmt_without_update (pre_p, mnt); |
515 | if (temp) |
516 | { |
517 | *expr_p = temp; |
518 | return GS_OK; |
519 | } |
520 | |
521 | *expr_p = NULL; |
522 | return GS_ALL_DONE; |
523 | } |
524 | |
525 | /* Return TRUE if an operand (OP) of a given TYPE being copied is |
526 | really just an empty class copy. |
527 | |
528 | Check that the operand has a simple form so that TARGET_EXPRs and |
529 | non-empty CONSTRUCTORs get reduced properly, and we leave the |
530 | return slot optimization alone because it isn't a copy. */ |
531 | |
532 | static bool |
533 | simple_empty_class_p (tree type, tree op) |
534 | { |
535 | return |
536 | ((TREE_CODE (op) == COMPOUND_EXPR |
537 | && simple_empty_class_p (type, TREE_OPERAND (op, 1))) |
538 | || TREE_CODE (op) == EMPTY_CLASS_EXPR |
539 | || is_gimple_lvalue (op) |
540 | || INDIRECT_REF_P (op) |
541 | || (TREE_CODE (op) == CONSTRUCTOR |
542 | && CONSTRUCTOR_NELTS (op) == 0 |
543 | && !TREE_CLOBBER_P (op)) |
544 | || (TREE_CODE (op) == CALL_EXPR |
545 | && !CALL_EXPR_RETURN_SLOT_OPT (op))) |
546 | && is_really_empty_class (type); |
547 | } |
548 | |
549 | /* Returns true if evaluating E as an lvalue has side-effects; |
550 | specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really |
551 | have side-effects until there is a read or write through it. */ |
552 | |
553 | static bool |
554 | lvalue_has_side_effects (tree e) |
555 | { |
556 | if (!TREE_SIDE_EFFECTS (e)) |
557 | return false; |
558 | while (handled_component_p (e)) |
559 | { |
560 | if (TREE_CODE (e) == ARRAY_REF |
561 | && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1))) |
562 | return true; |
563 | e = TREE_OPERAND (e, 0); |
564 | } |
565 | if (DECL_P (e)) |
566 | /* Just naming a variable has no side-effects. */ |
567 | return false; |
568 | else if (INDIRECT_REF_P (e)) |
569 | /* Similarly, indirection has no side-effects. */ |
570 | return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0)); |
571 | else |
572 | /* For anything else, trust TREE_SIDE_EFFECTS. */ |
573 | return TREE_SIDE_EFFECTS (e); |
574 | } |
575 | |
576 | /* Do C++-specific gimplification. Args are as for gimplify_expr. */ |
577 | |
578 | int |
579 | cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) |
580 | { |
581 | int saved_stmts_are_full_exprs_p = 0; |
582 | location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location); |
583 | enum tree_code code = TREE_CODE (*expr_p); |
584 | enum gimplify_status ret; |
585 | |
586 | if (STATEMENT_CODE_P (code)) |
587 | { |
588 | saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p (); |
589 | current_stmt_tree ()->stmts_are_full_exprs_p |
590 | = STMT_IS_FULL_EXPR_P (*expr_p); |
591 | } |
592 | |
593 | switch (code) |
594 | { |
595 | case AGGR_INIT_EXPR: |
596 | simplify_aggr_init_expr (expr_p); |
597 | ret = GS_OK; |
598 | break; |
599 | |
600 | case VEC_INIT_EXPR: |
601 | { |
602 | location_t loc = input_location; |
603 | tree init = VEC_INIT_EXPR_INIT (*expr_p); |
604 | int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE); |
605 | gcc_assert (EXPR_HAS_LOCATION (*expr_p)); |
606 | input_location = EXPR_LOCATION (*expr_p); |
607 | *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE, |
608 | init, VEC_INIT_EXPR_VALUE_INIT (*expr_p), |
609 | from_array, |
610 | tf_warning_or_error); |
611 | hash_set<tree> pset; |
612 | cp_walk_tree (expr_p, cp_fold_r, &pset, NULL); |
613 | cp_genericize_tree (expr_p, false); |
614 | ret = GS_OK; |
615 | input_location = loc; |
616 | } |
617 | break; |
618 | |
619 | case THROW_EXPR: |
620 | /* FIXME communicate throw type to back end, probably by moving |
621 | THROW_EXPR into ../tree.def. */ |
622 | *expr_p = TREE_OPERAND (*expr_p, 0); |
623 | ret = GS_OK; |
624 | break; |
625 | |
626 | case MUST_NOT_THROW_EXPR: |
627 | ret = gimplify_must_not_throw_expr (expr_p, pre_p); |
628 | break; |
629 | |
630 | /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the |
631 | LHS of an assignment might also be involved in the RHS, as in bug |
632 | 25979. */ |
633 | case INIT_EXPR: |
634 | cp_gimplify_init_expr (expr_p); |
635 | if (TREE_CODE (*expr_p) != INIT_EXPR) |
636 | return GS_OK; |
637 | /* Fall through. */ |
638 | case MODIFY_EXPR: |
639 | modify_expr_case: |
640 | { |
641 | /* If the back end isn't clever enough to know that the lhs and rhs |
642 | types are the same, add an explicit conversion. */ |
643 | tree op0 = TREE_OPERAND (*expr_p, 0); |
644 | tree op1 = TREE_OPERAND (*expr_p, 1); |
645 | |
646 | if (!error_operand_p (op0) |
647 | && !error_operand_p (op1) |
648 | && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0)) |
649 | || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1))) |
650 | && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))) |
651 | TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR, |
652 | TREE_TYPE (op0), op1); |
653 | |
654 | else if (simple_empty_class_p (TREE_TYPE (op0), op1)) |
655 | { |
656 | /* Remove any copies of empty classes. Also drop volatile |
657 | variables on the RHS to avoid infinite recursion from |
658 | gimplify_expr trying to load the value. */ |
659 | if (TREE_SIDE_EFFECTS (op1)) |
660 | { |
661 | if (TREE_THIS_VOLATILE (op1) |
662 | && (REFERENCE_CLASS_P (op1) || DECL_P (op1))) |
663 | op1 = build_fold_addr_expr (op1); |
664 | |
665 | gimplify_and_add (op1, pre_p); |
666 | } |
667 | gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, |
668 | is_gimple_lvalue, fb_lvalue); |
669 | *expr_p = TREE_OPERAND (*expr_p, 0); |
670 | } |
671 | /* P0145 says that the RHS is sequenced before the LHS. |
672 | gimplify_modify_expr gimplifies the RHS before the LHS, but that |
673 | isn't quite strong enough in two cases: |
674 | |
675 | 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would |
676 | mean it's evaluated after the LHS. |
677 | |
678 | 2) the value calculation of the RHS is also sequenced before the |
679 | LHS, so for scalar assignment we need to preevaluate if the |
680 | RHS could be affected by LHS side-effects even if it has no |
681 | side-effects of its own. We don't need this for classes because |
682 | class assignment takes its RHS by reference. */ |
683 | else if (flag_strong_eval_order > 1 |
684 | && TREE_CODE (*expr_p) == MODIFY_EXPR |
685 | && lvalue_has_side_effects (op0) |
686 | && (TREE_CODE (op1) == CALL_EXPR |
687 | || (SCALAR_TYPE_P (TREE_TYPE (op1)) |
688 | && !TREE_CONSTANT (op1)))) |
689 | TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p); |
690 | } |
691 | ret = GS_OK; |
692 | break; |
693 | |
694 | case EMPTY_CLASS_EXPR: |
695 | /* We create an empty CONSTRUCTOR with RECORD_TYPE. */ |
696 | *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL); |
697 | ret = GS_OK; |
698 | break; |
699 | |
700 | case BASELINK: |
701 | *expr_p = BASELINK_FUNCTIONS (*expr_p); |
702 | ret = GS_OK; |
703 | break; |
704 | |
705 | case TRY_BLOCK: |
706 | genericize_try_block (expr_p); |
707 | ret = GS_OK; |
708 | break; |
709 | |
710 | case HANDLER: |
711 | genericize_catch_block (expr_p); |
712 | ret = GS_OK; |
713 | break; |
714 | |
715 | case EH_SPEC_BLOCK: |
716 | genericize_eh_spec_block (expr_p); |
717 | ret = GS_OK; |
718 | break; |
719 | |
720 | case USING_STMT: |
721 | gcc_unreachable (); |
722 | |
723 | case FOR_STMT: |
724 | case WHILE_STMT: |
725 | case DO_STMT: |
726 | case SWITCH_STMT: |
727 | case CONTINUE_STMT: |
728 | case BREAK_STMT: |
729 | gcc_unreachable (); |
730 | |
731 | case OMP_FOR: |
732 | case OMP_SIMD: |
733 | case OMP_DISTRIBUTE: |
734 | case OMP_TASKLOOP: |
735 | ret = cp_gimplify_omp_for (expr_p, pre_p); |
736 | break; |
737 | |
738 | case EXPR_STMT: |
739 | gimplify_expr_stmt (expr_p); |
740 | ret = GS_OK; |
741 | break; |
742 | |
743 | case UNARY_PLUS_EXPR: |
744 | { |
745 | tree arg = TREE_OPERAND (*expr_p, 0); |
746 | tree type = TREE_TYPE (*expr_p); |
747 | *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg) |
748 | : arg; |
749 | ret = GS_OK; |
750 | } |
751 | break; |
752 | |
753 | case CALL_EXPR: |
754 | ret = GS_OK; |
755 | if (!CALL_EXPR_FN (*expr_p)) |
756 | /* Internal function call. */; |
757 | else if (CALL_EXPR_REVERSE_ARGS (*expr_p)) |
758 | { |
759 | /* This is a call to a (compound) assignment operator that used |
760 | the operator syntax; gimplify the RHS first. */ |
761 | gcc_assert (call_expr_nargs (*expr_p) == 2); |
762 | gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p)); |
763 | enum gimplify_status t |
764 | = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc); |
765 | if (t == GS_ERROR) |
766 | ret = GS_ERROR; |
767 | } |
768 | else if (CALL_EXPR_ORDERED_ARGS (*expr_p)) |
769 | { |
770 | /* Leave the last argument for gimplify_call_expr, to avoid problems |
771 | with __builtin_va_arg_pack(). */ |
772 | int nargs = call_expr_nargs (*expr_p) - 1; |
773 | for (int i = 0; i < nargs; ++i) |
774 | { |
775 | enum gimplify_status t |
776 | = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc); |
777 | if (t == GS_ERROR) |
778 | ret = GS_ERROR; |
779 | } |
780 | } |
781 | else if (flag_strong_eval_order |
782 | && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)) |
783 | { |
784 | /* If flag_strong_eval_order, evaluate the object argument first. */ |
785 | tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p)); |
786 | if (POINTER_TYPE_P (fntype)) |
787 | fntype = TREE_TYPE (fntype); |
788 | if (TREE_CODE (fntype) == METHOD_TYPE) |
789 | { |
790 | enum gimplify_status t |
791 | = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc); |
792 | if (t == GS_ERROR) |
793 | ret = GS_ERROR; |
794 | } |
795 | } |
796 | break; |
797 | |
798 | case RETURN_EXPR: |
799 | if (TREE_OPERAND (*expr_p, 0) |
800 | && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR |
801 | || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR)) |
802 | { |
803 | expr_p = &TREE_OPERAND (*expr_p, 0); |
804 | code = TREE_CODE (*expr_p); |
805 | /* Avoid going through the INIT_EXPR case, which can |
806 | degrade INIT_EXPRs into AGGR_INIT_EXPRs. */ |
807 | goto modify_expr_case; |
808 | } |
809 | /* Fall through. */ |
810 | |
811 | default: |
812 | ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p); |
813 | break; |
814 | } |
815 | |
816 | /* Restore saved state. */ |
817 | if (STATEMENT_CODE_P (code)) |
818 | current_stmt_tree ()->stmts_are_full_exprs_p |
819 | = saved_stmts_are_full_exprs_p; |
820 | |
821 | return ret; |
822 | } |
823 | |
824 | static inline bool |
825 | is_invisiref_parm (const_tree t) |
826 | { |
827 | return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL) |
828 | && DECL_BY_REFERENCE (t)); |
829 | } |
830 | |
831 | /* Return true if the uid in both int tree maps are equal. */ |
832 | |
833 | bool |
834 | cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b) |
835 | { |
836 | return (a->uid == b->uid); |
837 | } |
838 | |
839 | /* Hash a UID in a cxx_int_tree_map. */ |
840 | |
841 | unsigned int |
842 | cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item) |
843 | { |
844 | return item->uid; |
845 | } |
846 | |
847 | /* A stable comparison routine for use with splay trees and DECLs. */ |
848 | |
849 | static int |
850 | splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) |
851 | { |
852 | tree a = (tree) xa; |
853 | tree b = (tree) xb; |
854 | |
855 | return DECL_UID (a) - DECL_UID (b); |
856 | } |
857 | |
858 | /* OpenMP context during genericization. */ |
859 | |
860 | struct cp_genericize_omp_taskreg |
861 | { |
862 | bool is_parallel; |
863 | bool default_shared; |
864 | struct cp_genericize_omp_taskreg *outer; |
865 | splay_tree variables; |
866 | }; |
867 | |
868 | /* Return true if genericization should try to determine if |
869 | DECL is firstprivate or shared within task regions. */ |
870 | |
871 | static bool |
872 | omp_var_to_track (tree decl) |
873 | { |
874 | tree type = TREE_TYPE (decl); |
875 | if (is_invisiref_parm (decl)) |
876 | type = TREE_TYPE (type); |
877 | else if (TREE_CODE (type) == REFERENCE_TYPE) |
878 | type = TREE_TYPE (type); |
879 | while (TREE_CODE (type) == ARRAY_TYPE) |
880 | type = TREE_TYPE (type); |
881 | if (type == error_mark_node || !CLASS_TYPE_P (type)) |
882 | return false; |
883 | if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl)) |
884 | return false; |
885 | if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED) |
886 | return false; |
887 | return true; |
888 | } |
889 | |
890 | /* Note DECL use in OpenMP region OMP_CTX during genericization. */ |
891 | |
892 | static void |
893 | omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl) |
894 | { |
895 | splay_tree_node n = splay_tree_lookup (omp_ctx->variables, |
896 | (splay_tree_key) decl); |
897 | if (n == NULL) |
898 | { |
899 | int flags = OMP_CLAUSE_DEFAULT_SHARED; |
900 | if (omp_ctx->outer) |
901 | omp_cxx_notice_variable (omp_ctx->outer, decl); |
902 | if (!omp_ctx->default_shared) |
903 | { |
904 | struct cp_genericize_omp_taskreg *octx; |
905 | |
906 | for (octx = omp_ctx->outer; octx; octx = octx->outer) |
907 | { |
908 | n = splay_tree_lookup (octx->variables, (splay_tree_key) decl); |
909 | if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED) |
910 | { |
911 | flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE; |
912 | break; |
913 | } |
914 | if (octx->is_parallel) |
915 | break; |
916 | } |
917 | if (octx == NULL |
918 | && (TREE_CODE (decl) == PARM_DECL |
919 | || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl)) |
920 | && DECL_CONTEXT (decl) == current_function_decl))) |
921 | flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE; |
922 | if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE) |
923 | { |
924 | /* DECL is implicitly determined firstprivate in |
925 | the current task construct. Ensure copy ctor and |
926 | dtor are instantiated, because during gimplification |
927 | it will be already too late. */ |
928 | tree type = TREE_TYPE (decl); |
929 | if (is_invisiref_parm (decl)) |
930 | type = TREE_TYPE (type); |
931 | else if (TREE_CODE (type) == REFERENCE_TYPE) |
932 | type = TREE_TYPE (type); |
933 | while (TREE_CODE (type) == ARRAY_TYPE) |
934 | type = TREE_TYPE (type); |
935 | get_copy_ctor (type, tf_none); |
936 | get_dtor (type, tf_none); |
937 | } |
938 | } |
939 | splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags); |
940 | } |
941 | } |
942 | |
943 | /* Genericization context. */ |
944 | |
945 | struct cp_genericize_data |
946 | { |
947 | hash_set<tree> *p_set; |
948 | vec<tree> bind_expr_stack; |
949 | struct cp_genericize_omp_taskreg *omp_ctx; |
950 | tree try_block; |
951 | bool no_sanitize_p; |
952 | bool handle_invisiref_parm_p; |
953 | }; |
954 | |
955 | /* Perform any pre-gimplification folding of C++ front end trees to |
956 | GENERIC. |
957 | Note: The folding of none-omp cases is something to move into |
958 | the middle-end. As for now we have most foldings only on GENERIC |
959 | in fold-const, we need to perform this before transformation to |
960 | GIMPLE-form. */ |
961 | |
962 | static tree |
963 | cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data) |
964 | { |
965 | tree stmt; |
966 | enum tree_code code; |
967 | |
968 | *stmt_p = stmt = cp_fold (*stmt_p); |
969 | |
970 | if (((hash_set<tree> *) data)->add (stmt)) |
971 | { |
972 | /* Don't walk subtrees of stmts we've already walked once, otherwise |
973 | we can have exponential complexity with e.g. lots of nested |
974 | SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return |
975 | always the same tree, which the first time cp_fold_r has been |
976 | called on it had the subtrees walked. */ |
977 | *walk_subtrees = 0; |
978 | return NULL; |
979 | } |
980 | |
981 | code = TREE_CODE (stmt); |
982 | if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE |
983 | || code == OMP_TASKLOOP || code == OACC_LOOP) |
984 | { |
985 | tree x; |
986 | int i, n; |
987 | |
988 | cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL); |
989 | cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL); |
990 | cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL); |
991 | x = OMP_FOR_COND (stmt); |
992 | if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison) |
993 | { |
994 | cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL); |
995 | cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL); |
996 | } |
997 | else if (x && TREE_CODE (x) == TREE_VEC) |
998 | { |
999 | n = TREE_VEC_LENGTH (x); |
1000 | for (i = 0; i < n; i++) |
1001 | { |
1002 | tree o = TREE_VEC_ELT (x, i); |
1003 | if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison) |
1004 | cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL); |
1005 | } |
1006 | } |
1007 | x = OMP_FOR_INCR (stmt); |
1008 | if (x && TREE_CODE (x) == TREE_VEC) |
1009 | { |
1010 | n = TREE_VEC_LENGTH (x); |
1011 | for (i = 0; i < n; i++) |
1012 | { |
1013 | tree o = TREE_VEC_ELT (x, i); |
1014 | if (o && TREE_CODE (o) == MODIFY_EXPR) |
1015 | o = TREE_OPERAND (o, 1); |
1016 | if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR |
1017 | || TREE_CODE (o) == POINTER_PLUS_EXPR)) |
1018 | { |
1019 | cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL); |
1020 | cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL); |
1021 | } |
1022 | } |
1023 | } |
1024 | cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL); |
1025 | *walk_subtrees = 0; |
1026 | } |
1027 | |
1028 | return NULL; |
1029 | } |
1030 | |
1031 | /* Fold ALL the trees! FIXME we should be able to remove this, but |
1032 | apparently that still causes optimization regressions. */ |
1033 | |
1034 | void |
1035 | cp_fold_function (tree fndecl) |
1036 | { |
1037 | hash_set<tree> pset; |
1038 | cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL); |
1039 | } |
1040 | |
1041 | /* Perform any pre-gimplification lowering of C++ front end trees to |
1042 | GENERIC. */ |
1043 | |
1044 | static tree |
1045 | cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data) |
1046 | { |
1047 | tree stmt = *stmt_p; |
1048 | struct cp_genericize_data *wtd = (struct cp_genericize_data *) data; |
1049 | hash_set<tree> *p_set = wtd->p_set; |
1050 | |
1051 | /* If in an OpenMP context, note var uses. */ |
1052 | if (__builtin_expect (wtd->omp_ctx != NULL, 0) |
1053 | && (VAR_P (stmt) |
1054 | || TREE_CODE (stmt) == PARM_DECL |
1055 | || TREE_CODE (stmt) == RESULT_DECL) |
1056 | && omp_var_to_track (stmt)) |
1057 | omp_cxx_notice_variable (wtd->omp_ctx, stmt); |
1058 | |
1059 | /* Don't dereference parms in a thunk, pass the references through. */ |
1060 | if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt)) |
1061 | || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt))) |
1062 | { |
1063 | *walk_subtrees = 0; |
1064 | return NULL; |
1065 | } |
1066 | |
1067 | /* Dereference invisible reference parms. */ |
1068 | if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt)) |
1069 | { |
1070 | *stmt_p = convert_from_reference (stmt); |
1071 | p_set->add (*stmt_p); |
1072 | *walk_subtrees = 0; |
1073 | return NULL; |
1074 | } |
1075 | |
1076 | /* Map block scope extern declarations to visible declarations with the |
1077 | same name and type in outer scopes if any. */ |
1078 | if (cp_function_chain->extern_decl_map |
1079 | && VAR_OR_FUNCTION_DECL_P (stmt) |
1080 | && DECL_EXTERNAL (stmt)) |
1081 | { |
1082 | struct cxx_int_tree_map *h, in; |
1083 | in.uid = DECL_UID (stmt); |
1084 | h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid); |
1085 | if (h) |
1086 | { |
1087 | *stmt_p = h->to; |
1088 | *walk_subtrees = 0; |
1089 | return NULL; |
1090 | } |
1091 | } |
1092 | |
1093 | if (TREE_CODE (stmt) == INTEGER_CST |
1094 | && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE |
1095 | && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) |
1096 | && !wtd->no_sanitize_p) |
1097 | { |
1098 | ubsan_maybe_instrument_reference (stmt_p); |
1099 | if (*stmt_p != stmt) |
1100 | { |
1101 | *walk_subtrees = 0; |
1102 | return NULL_TREE; |
1103 | } |
1104 | } |
1105 | |
1106 | /* Other than invisiref parms, don't walk the same tree twice. */ |
1107 | if (p_set->contains (stmt)) |
1108 | { |
1109 | *walk_subtrees = 0; |
1110 | return NULL_TREE; |
1111 | } |
1112 | |
1113 | switch (TREE_CODE (stmt)) |
1114 | { |
1115 | case ADDR_EXPR: |
1116 | if (is_invisiref_parm (TREE_OPERAND (stmt, 0))) |
1117 | { |
1118 | /* If in an OpenMP context, note var uses. */ |
1119 | if (__builtin_expect (wtd->omp_ctx != NULL, 0) |
1120 | && omp_var_to_track (TREE_OPERAND (stmt, 0))) |
1121 | omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0)); |
1122 | *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0)); |
1123 | *walk_subtrees = 0; |
1124 | } |
1125 | break; |
1126 | |
1127 | case RETURN_EXPR: |
1128 | if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0))) |
1129 | /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */ |
1130 | *walk_subtrees = 0; |
1131 | break; |
1132 | |
1133 | case OMP_CLAUSE: |
1134 | switch (OMP_CLAUSE_CODE (stmt)) |
1135 | { |
1136 | case OMP_CLAUSE_LASTPRIVATE: |
1137 | /* Don't dereference an invisiref in OpenMP clauses. */ |
1138 | if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) |
1139 | { |
1140 | *walk_subtrees = 0; |
1141 | if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt)) |
1142 | cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt), |
1143 | cp_genericize_r, data, NULL); |
1144 | } |
1145 | break; |
1146 | case OMP_CLAUSE_PRIVATE: |
1147 | /* Don't dereference an invisiref in OpenMP clauses. */ |
1148 | if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) |
1149 | *walk_subtrees = 0; |
1150 | else if (wtd->omp_ctx != NULL) |
1151 | { |
1152 | /* Private clause doesn't cause any references to the |
1153 | var in outer contexts, avoid calling |
1154 | omp_cxx_notice_variable for it. */ |
1155 | struct cp_genericize_omp_taskreg *old = wtd->omp_ctx; |
1156 | wtd->omp_ctx = NULL; |
1157 | cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r, |
1158 | data, NULL); |
1159 | wtd->omp_ctx = old; |
1160 | *walk_subtrees = 0; |
1161 | } |
1162 | break; |
1163 | case OMP_CLAUSE_SHARED: |
1164 | case OMP_CLAUSE_FIRSTPRIVATE: |
1165 | case OMP_CLAUSE_COPYIN: |
1166 | case OMP_CLAUSE_COPYPRIVATE: |
1167 | /* Don't dereference an invisiref in OpenMP clauses. */ |
1168 | if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) |
1169 | *walk_subtrees = 0; |
1170 | break; |
1171 | case OMP_CLAUSE_REDUCTION: |
1172 | /* Don't dereference an invisiref in reduction clause's |
1173 | OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE} |
1174 | still needs to be genericized. */ |
1175 | if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) |
1176 | { |
1177 | *walk_subtrees = 0; |
1178 | if (OMP_CLAUSE_REDUCTION_INIT (stmt)) |
1179 | cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt), |
1180 | cp_genericize_r, data, NULL); |
1181 | if (OMP_CLAUSE_REDUCTION_MERGE (stmt)) |
1182 | cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt), |
1183 | cp_genericize_r, data, NULL); |
1184 | } |
1185 | break; |
1186 | default: |
1187 | break; |
1188 | } |
1189 | break; |
1190 | |
1191 | /* Due to the way voidify_wrapper_expr is written, we don't get a chance |
1192 | to lower this construct before scanning it, so we need to lower these |
1193 | before doing anything else. */ |
1194 | case CLEANUP_STMT: |
1195 | *stmt_p = build2_loc (EXPR_LOCATION (stmt), |
1196 | CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR |
1197 | : TRY_FINALLY_EXPR, |
1198 | void_type_node, |
1199 | CLEANUP_BODY (stmt), |
1200 | CLEANUP_EXPR (stmt)); |
1201 | break; |
1202 | |
1203 | case IF_STMT: |
1204 | genericize_if_stmt (stmt_p); |
1205 | /* *stmt_p has changed, tail recurse to handle it again. */ |
1206 | return cp_genericize_r (stmt_p, walk_subtrees, data); |
1207 | |
1208 | /* COND_EXPR might have incompatible types in branches if one or both |
1209 | arms are bitfields. Fix it up now. */ |
1210 | case COND_EXPR: |
1211 | { |
1212 | tree type_left |
1213 | = (TREE_OPERAND (stmt, 1) |
1214 | ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1)) |
1215 | : NULL_TREE); |
1216 | tree type_right |
1217 | = (TREE_OPERAND (stmt, 2) |
1218 | ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2)) |
1219 | : NULL_TREE); |
1220 | if (type_left |
1221 | && !useless_type_conversion_p (TREE_TYPE (stmt), |
1222 | TREE_TYPE (TREE_OPERAND (stmt, 1)))) |
1223 | { |
1224 | TREE_OPERAND (stmt, 1) |
1225 | = fold_convert (type_left, TREE_OPERAND (stmt, 1)); |
1226 | gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), |
1227 | type_left)); |
1228 | } |
1229 | if (type_right |
1230 | && !useless_type_conversion_p (TREE_TYPE (stmt), |
1231 | TREE_TYPE (TREE_OPERAND (stmt, 2)))) |
1232 | { |
1233 | TREE_OPERAND (stmt, 2) |
1234 | = fold_convert (type_right, TREE_OPERAND (stmt, 2)); |
1235 | gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), |
1236 | type_right)); |
1237 | } |
1238 | } |
1239 | break; |
1240 | |
1241 | case BIND_EXPR: |
1242 | if (__builtin_expect (wtd->omp_ctx != NULL, 0)) |
1243 | { |
1244 | tree decl; |
1245 | for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl)) |
1246 | if (VAR_P (decl) |
1247 | && !DECL_EXTERNAL (decl) |
1248 | && omp_var_to_track (decl)) |
1249 | { |
1250 | splay_tree_node n |
1251 | = splay_tree_lookup (wtd->omp_ctx->variables, |
1252 | (splay_tree_key) decl); |
1253 | if (n == NULL) |
1254 | splay_tree_insert (wtd->omp_ctx->variables, |
1255 | (splay_tree_key) decl, |
1256 | TREE_STATIC (decl) |
1257 | ? OMP_CLAUSE_DEFAULT_SHARED |
1258 | : OMP_CLAUSE_DEFAULT_PRIVATE); |
1259 | } |
1260 | } |
1261 | if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR)) |
1262 | { |
1263 | /* The point here is to not sanitize static initializers. */ |
1264 | bool no_sanitize_p = wtd->no_sanitize_p; |
1265 | wtd->no_sanitize_p = true; |
1266 | for (tree decl = BIND_EXPR_VARS (stmt); |
1267 | decl; |
1268 | decl = DECL_CHAIN (decl)) |
1269 | if (VAR_P (decl) |
1270 | && TREE_STATIC (decl) |
1271 | && DECL_INITIAL (decl)) |
1272 | cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL); |
1273 | wtd->no_sanitize_p = no_sanitize_p; |
1274 | } |
1275 | wtd->bind_expr_stack.safe_push (stmt); |
1276 | cp_walk_tree (&BIND_EXPR_BODY (stmt), |
1277 | cp_genericize_r, data, NULL); |
1278 | wtd->bind_expr_stack.pop (); |
1279 | break; |
1280 | |
1281 | case USING_STMT: |
1282 | { |
1283 | tree block = NULL_TREE; |
1284 | |
1285 | /* Get the innermost inclosing GIMPLE_BIND that has a non NULL |
1286 | BLOCK, and append an IMPORTED_DECL to its |
1287 | BLOCK_VARS chained list. */ |
1288 | if (wtd->bind_expr_stack.exists ()) |
1289 | { |
1290 | int i; |
1291 | for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--) |
1292 | if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i]))) |
1293 | break; |
1294 | } |
1295 | if (block) |
1296 | { |
1297 | tree using_directive; |
1298 | gcc_assert (TREE_OPERAND (stmt, 0)); |
1299 | |
1300 | using_directive = make_node (IMPORTED_DECL); |
1301 | TREE_TYPE (using_directive) = void_type_node; |
1302 | |
1303 | IMPORTED_DECL_ASSOCIATED_DECL (using_directive) |
1304 | = TREE_OPERAND (stmt, 0); |
1305 | DECL_CHAIN (using_directive) = BLOCK_VARS (block); |
1306 | BLOCK_VARS (block) = using_directive; |
1307 | } |
1308 | /* The USING_STMT won't appear in GENERIC. */ |
1309 | *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); |
1310 | *walk_subtrees = 0; |
1311 | } |
1312 | break; |
1313 | |
1314 | case DECL_EXPR: |
1315 | if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL) |
1316 | { |
1317 | /* Using decls inside DECL_EXPRs are just dropped on the floor. */ |
1318 | *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); |
1319 | *walk_subtrees = 0; |
1320 | } |
1321 | else |
1322 | { |
1323 | tree d = DECL_EXPR_DECL (stmt); |
1324 | if (VAR_P (d)) |
1325 | gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d)); |
1326 | } |
1327 | break; |
1328 | |
1329 | case OMP_PARALLEL: |
1330 | case OMP_TASK: |
1331 | case OMP_TASKLOOP: |
1332 | { |
1333 | struct cp_genericize_omp_taskreg omp_ctx; |
1334 | tree c, decl; |
1335 | splay_tree_node n; |
1336 | |
1337 | *walk_subtrees = 0; |
1338 | cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL); |
1339 | omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL; |
1340 | omp_ctx.default_shared = omp_ctx.is_parallel; |
1341 | omp_ctx.outer = wtd->omp_ctx; |
1342 | omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); |
1343 | wtd->omp_ctx = &omp_ctx; |
1344 | for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c)) |
1345 | switch (OMP_CLAUSE_CODE (c)) |
1346 | { |
1347 | case OMP_CLAUSE_SHARED: |
1348 | case OMP_CLAUSE_PRIVATE: |
1349 | case OMP_CLAUSE_FIRSTPRIVATE: |
1350 | case OMP_CLAUSE_LASTPRIVATE: |
1351 | decl = OMP_CLAUSE_DECL (c); |
1352 | if (decl == error_mark_node || !omp_var_to_track (decl)) |
1353 | break; |
1354 | n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl); |
1355 | if (n != NULL) |
1356 | break; |
1357 | splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl, |
1358 | OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
1359 | ? OMP_CLAUSE_DEFAULT_SHARED |
1360 | : OMP_CLAUSE_DEFAULT_PRIVATE); |
1361 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer) |
1362 | omp_cxx_notice_variable (omp_ctx.outer, decl); |
1363 | break; |
1364 | case OMP_CLAUSE_DEFAULT: |
1365 | if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED) |
1366 | omp_ctx.default_shared = true; |
1367 | default: |
1368 | break; |
1369 | } |
1370 | if (TREE_CODE (stmt) == OMP_TASKLOOP) |
1371 | genericize_omp_for_stmt (stmt_p, walk_subtrees, data); |
1372 | else |
1373 | cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL); |
1374 | wtd->omp_ctx = omp_ctx.outer; |
1375 | splay_tree_delete (omp_ctx.variables); |
1376 | } |
1377 | break; |
1378 | |
1379 | case TRY_BLOCK: |
1380 | { |
1381 | *walk_subtrees = 0; |
1382 | tree try_block = wtd->try_block; |
1383 | wtd->try_block = stmt; |
1384 | cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL); |
1385 | wtd->try_block = try_block; |
1386 | cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL); |
1387 | } |
1388 | break; |
1389 | |
1390 | case MUST_NOT_THROW_EXPR: |
1391 | /* MUST_NOT_THROW_COND might be something else with TM. */ |
1392 | if (MUST_NOT_THROW_COND (stmt) == NULL_TREE) |
1393 | { |
1394 | *walk_subtrees = 0; |
1395 | tree try_block = wtd->try_block; |
1396 | wtd->try_block = stmt; |
1397 | cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL); |
1398 | wtd->try_block = try_block; |
1399 | } |
1400 | break; |
1401 | |
1402 | case THROW_EXPR: |
1403 | { |
1404 | location_t loc = location_of (stmt); |
1405 | if (TREE_NO_WARNING (stmt)) |
1406 | /* Never mind. */; |
1407 | else if (wtd->try_block) |
1408 | { |
1409 | if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR |
1410 | && warning_at (loc, OPT_Wterminate, |
1411 | "throw will always call terminate()" ) |
1412 | && cxx_dialect >= cxx11 |
1413 | && DECL_DESTRUCTOR_P (current_function_decl)) |
1414 | inform (loc, "in C++11 destructors default to noexcept" ); |
1415 | } |
1416 | else |
1417 | { |
1418 | if (warn_cxx11_compat && cxx_dialect < cxx11 |
1419 | && DECL_DESTRUCTOR_P (current_function_decl) |
1420 | && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl)) |
1421 | == NULL_TREE) |
1422 | && (get_defaulted_eh_spec (current_function_decl) |
1423 | == empty_except_spec)) |
1424 | warning_at (loc, OPT_Wc__11_compat, |
1425 | "in C++11 this throw will terminate because " |
1426 | "destructors default to noexcept" ); |
1427 | } |
1428 | } |
1429 | break; |
1430 | |
1431 | case CONVERT_EXPR: |
1432 | gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt)); |
1433 | break; |
1434 | |
1435 | case FOR_STMT: |
1436 | genericize_for_stmt (stmt_p, walk_subtrees, data); |
1437 | break; |
1438 | |
1439 | case WHILE_STMT: |
1440 | genericize_while_stmt (stmt_p, walk_subtrees, data); |
1441 | break; |
1442 | |
1443 | case DO_STMT: |
1444 | genericize_do_stmt (stmt_p, walk_subtrees, data); |
1445 | break; |
1446 | |
1447 | case SWITCH_STMT: |
1448 | genericize_switch_stmt (stmt_p, walk_subtrees, data); |
1449 | break; |
1450 | |
1451 | case CONTINUE_STMT: |
1452 | genericize_continue_stmt (stmt_p); |
1453 | break; |
1454 | |
1455 | case BREAK_STMT: |
1456 | genericize_break_stmt (stmt_p); |
1457 | break; |
1458 | |
1459 | case OMP_FOR: |
1460 | case OMP_SIMD: |
1461 | case OMP_DISTRIBUTE: |
1462 | genericize_omp_for_stmt (stmt_p, walk_subtrees, data); |
1463 | break; |
1464 | |
1465 | case PTRMEM_CST: |
1466 | /* By the time we get here we're handing off to the back end, so we don't |
1467 | need or want to preserve PTRMEM_CST anymore. */ |
1468 | *stmt_p = cplus_expand_constant (stmt); |
1469 | *walk_subtrees = 0; |
1470 | break; |
1471 | |
1472 | case MEM_REF: |
1473 | /* For MEM_REF, make sure not to sanitize the second operand even |
1474 | if it has reference type. It is just an offset with a type |
1475 | holding other information. There is no other processing we |
1476 | need to do for INTEGER_CSTs, so just ignore the second argument |
1477 | unconditionally. */ |
1478 | cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL); |
1479 | *walk_subtrees = 0; |
1480 | break; |
1481 | |
1482 | case NOP_EXPR: |
1483 | if (!wtd->no_sanitize_p |
1484 | && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT) |
1485 | && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE) |
1486 | ubsan_maybe_instrument_reference (stmt_p); |
1487 | break; |
1488 | |
1489 | case CALL_EXPR: |
1490 | if (!wtd->no_sanitize_p |
1491 | && sanitize_flags_p ((SANITIZE_NULL |
1492 | | SANITIZE_ALIGNMENT | SANITIZE_VPTR))) |
1493 | { |
1494 | tree fn = CALL_EXPR_FN (stmt); |
1495 | if (fn != NULL_TREE |
1496 | && !error_operand_p (fn) |
1497 | && POINTER_TYPE_P (TREE_TYPE (fn)) |
1498 | && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE) |
1499 | { |
1500 | bool is_ctor |
1501 | = TREE_CODE (fn) == ADDR_EXPR |
1502 | && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL |
1503 | && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0)); |
1504 | if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)) |
1505 | ubsan_maybe_instrument_member_call (stmt, is_ctor); |
1506 | if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor) |
1507 | cp_ubsan_maybe_instrument_member_call (stmt); |
1508 | } |
1509 | } |
1510 | break; |
1511 | |
1512 | default: |
1513 | if (IS_TYPE_OR_DECL_P (stmt)) |
1514 | *walk_subtrees = 0; |
1515 | break; |
1516 | } |
1517 | |
1518 | p_set->add (*stmt_p); |
1519 | |
1520 | return NULL; |
1521 | } |
1522 | |
1523 | /* Lower C++ front end trees to GENERIC in T_P. */ |
1524 | |
1525 | static void |
1526 | cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p) |
1527 | { |
1528 | struct cp_genericize_data wtd; |
1529 | |
1530 | wtd.p_set = new hash_set<tree>; |
1531 | wtd.bind_expr_stack.create (0); |
1532 | wtd.omp_ctx = NULL; |
1533 | wtd.try_block = NULL_TREE; |
1534 | wtd.no_sanitize_p = false; |
1535 | wtd.handle_invisiref_parm_p = handle_invisiref_parm_p; |
1536 | cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL); |
1537 | delete wtd.p_set; |
1538 | wtd.bind_expr_stack.release (); |
1539 | if (sanitize_flags_p (SANITIZE_VPTR)) |
1540 | cp_ubsan_instrument_member_accesses (t_p); |
1541 | } |
1542 | |
1543 | /* If a function that should end with a return in non-void |
1544 | function doesn't obviously end with return, add ubsan |
1545 | instrumentation code to verify it at runtime. If -fsanitize=return |
1546 | is not enabled, instrument __builtin_unreachable. */ |
1547 | |
1548 | static void |
1549 | cp_maybe_instrument_return (tree fndecl) |
1550 | { |
1551 | if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))) |
1552 | || DECL_CONSTRUCTOR_P (fndecl) |
1553 | || DECL_DESTRUCTOR_P (fndecl) |
1554 | || !targetm.warn_func_return (fndecl)) |
1555 | return; |
1556 | |
1557 | if (!sanitize_flags_p (SANITIZE_RETURN, fndecl) |
1558 | /* Don't add __builtin_unreachable () if not optimizing, it will not |
1559 | improve any optimizations in that case, just break UB code. |
1560 | Don't add it if -fsanitize=unreachable -fno-sanitize=return either, |
1561 | UBSan covers this with ubsan_instrument_return above where sufficient |
1562 | information is provided, while the __builtin_unreachable () below |
1563 | if return sanitization is disabled will just result in hard to |
1564 | understand runtime error without location. */ |
1565 | && (!optimize |
1566 | || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl))) |
1567 | return; |
1568 | |
1569 | tree t = DECL_SAVED_TREE (fndecl); |
1570 | while (t) |
1571 | { |
1572 | switch (TREE_CODE (t)) |
1573 | { |
1574 | case BIND_EXPR: |
1575 | t = BIND_EXPR_BODY (t); |
1576 | continue; |
1577 | case TRY_FINALLY_EXPR: |
1578 | t = TREE_OPERAND (t, 0); |
1579 | continue; |
1580 | case STATEMENT_LIST: |
1581 | { |
1582 | tree_stmt_iterator i = tsi_last (t); |
1583 | if (!tsi_end_p (i)) |
1584 | { |
1585 | t = tsi_stmt (i); |
1586 | continue; |
1587 | } |
1588 | } |
1589 | break; |
1590 | case RETURN_EXPR: |
1591 | return; |
1592 | default: |
1593 | break; |
1594 | } |
1595 | break; |
1596 | } |
1597 | if (t == NULL_TREE) |
1598 | return; |
1599 | tree *p = &DECL_SAVED_TREE (fndecl); |
1600 | if (TREE_CODE (*p) == BIND_EXPR) |
1601 | p = &BIND_EXPR_BODY (*p); |
1602 | |
1603 | location_t loc = DECL_SOURCE_LOCATION (fndecl); |
1604 | if (sanitize_flags_p (SANITIZE_RETURN, fndecl)) |
1605 | t = ubsan_instrument_return (loc); |
1606 | else |
1607 | { |
1608 | tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE); |
1609 | t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0); |
1610 | } |
1611 | |
1612 | append_to_statement_list (t, p); |
1613 | } |
1614 | |
1615 | void |
1616 | cp_genericize (tree fndecl) |
1617 | { |
1618 | tree t; |
1619 | |
1620 | /* Fix up the types of parms passed by invisible reference. */ |
1621 | for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t)) |
1622 | if (TREE_ADDRESSABLE (TREE_TYPE (t))) |
1623 | { |
1624 | /* If a function's arguments are copied to create a thunk, |
1625 | then DECL_BY_REFERENCE will be set -- but the type of the |
1626 | argument will be a pointer type, so we will never get |
1627 | here. */ |
1628 | gcc_assert (!DECL_BY_REFERENCE (t)); |
1629 | gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t)); |
1630 | TREE_TYPE (t) = DECL_ARG_TYPE (t); |
1631 | DECL_BY_REFERENCE (t) = 1; |
1632 | TREE_ADDRESSABLE (t) = 0; |
1633 | relayout_decl (t); |
1634 | } |
1635 | |
1636 | /* Do the same for the return value. */ |
1637 | if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl)))) |
1638 | { |
1639 | t = DECL_RESULT (fndecl); |
1640 | TREE_TYPE (t) = build_reference_type (TREE_TYPE (t)); |
1641 | DECL_BY_REFERENCE (t) = 1; |
1642 | TREE_ADDRESSABLE (t) = 0; |
1643 | relayout_decl (t); |
1644 | if (DECL_NAME (t)) |
1645 | { |
1646 | /* Adjust DECL_VALUE_EXPR of the original var. */ |
1647 | tree outer = outer_curly_brace_block (current_function_decl); |
1648 | tree var; |
1649 | |
1650 | if (outer) |
1651 | for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var)) |
1652 | if (VAR_P (var) |
1653 | && DECL_NAME (t) == DECL_NAME (var) |
1654 | && DECL_HAS_VALUE_EXPR_P (var) |
1655 | && DECL_VALUE_EXPR (var) == t) |
1656 | { |
1657 | tree val = convert_from_reference (t); |
1658 | SET_DECL_VALUE_EXPR (var, val); |
1659 | break; |
1660 | } |
1661 | } |
1662 | } |
1663 | |
1664 | /* If we're a clone, the body is already GIMPLE. */ |
1665 | if (DECL_CLONED_FUNCTION_P (fndecl)) |
1666 | return; |
1667 | |
1668 | /* Allow cp_genericize calls to be nested. */ |
1669 | tree save_bc_label[2]; |
1670 | save_bc_label[bc_break] = bc_label[bc_break]; |
1671 | save_bc_label[bc_continue] = bc_label[bc_continue]; |
1672 | bc_label[bc_break] = NULL_TREE; |
1673 | bc_label[bc_continue] = NULL_TREE; |
1674 | |
1675 | /* We do want to see every occurrence of the parms, so we can't just use |
1676 | walk_tree's hash functionality. */ |
1677 | cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true); |
1678 | |
1679 | cp_maybe_instrument_return (fndecl); |
1680 | |
1681 | /* Do everything else. */ |
1682 | c_genericize (fndecl); |
1683 | |
1684 | gcc_assert (bc_label[bc_break] == NULL); |
1685 | gcc_assert (bc_label[bc_continue] == NULL); |
1686 | bc_label[bc_break] = save_bc_label[bc_break]; |
1687 | bc_label[bc_continue] = save_bc_label[bc_continue]; |
1688 | } |
1689 | |
1690 | /* Build code to apply FN to each member of ARG1 and ARG2. FN may be |
1691 | NULL if there is in fact nothing to do. ARG2 may be null if FN |
1692 | actually only takes one argument. */ |
1693 | |
1694 | static tree |
1695 | cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2) |
1696 | { |
1697 | tree defparm, parm, t; |
1698 | int i = 0; |
1699 | int nargs; |
1700 | tree *argarray; |
1701 | |
1702 | if (fn == NULL) |
1703 | return NULL; |
1704 | |
1705 | nargs = list_length (DECL_ARGUMENTS (fn)); |
1706 | argarray = XALLOCAVEC (tree, nargs); |
1707 | |
1708 | defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn))); |
1709 | if (arg2) |
1710 | defparm = TREE_CHAIN (defparm); |
1711 | |
1712 | bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE; |
1713 | if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE) |
1714 | { |
1715 | tree inner_type = TREE_TYPE (arg1); |
1716 | tree start1, end1, p1; |
1717 | tree start2 = NULL, p2 = NULL; |
1718 | tree ret = NULL, lab; |
1719 | |
1720 | start1 = arg1; |
1721 | start2 = arg2; |
1722 | do |
1723 | { |
1724 | inner_type = TREE_TYPE (inner_type); |
1725 | start1 = build4 (ARRAY_REF, inner_type, start1, |
1726 | size_zero_node, NULL, NULL); |
1727 | if (arg2) |
1728 | start2 = build4 (ARRAY_REF, inner_type, start2, |
1729 | size_zero_node, NULL, NULL); |
1730 | } |
1731 | while (TREE_CODE (inner_type) == ARRAY_TYPE); |
1732 | start1 = build_fold_addr_expr_loc (input_location, start1); |
1733 | if (arg2) |
1734 | start2 = build_fold_addr_expr_loc (input_location, start2); |
1735 | |
1736 | end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1)); |
1737 | end1 = fold_build_pointer_plus (start1, end1); |
1738 | |
1739 | p1 = create_tmp_var (TREE_TYPE (start1)); |
1740 | t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1); |
1741 | append_to_statement_list (t, &ret); |
1742 | |
1743 | if (arg2) |
1744 | { |
1745 | p2 = create_tmp_var (TREE_TYPE (start2)); |
1746 | t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2); |
1747 | append_to_statement_list (t, &ret); |
1748 | } |
1749 | |
1750 | lab = create_artificial_label (input_location); |
1751 | t = build1 (LABEL_EXPR, void_type_node, lab); |
1752 | append_to_statement_list (t, &ret); |
1753 | |
1754 | argarray[i++] = p1; |
1755 | if (arg2) |
1756 | argarray[i++] = p2; |
1757 | /* Handle default arguments. */ |
1758 | for (parm = defparm; parm && parm != void_list_node; |
1759 | parm = TREE_CHAIN (parm), i++) |
1760 | argarray[i] = convert_default_arg (TREE_VALUE (parm), |
1761 | TREE_PURPOSE (parm), fn, |
1762 | i - is_method, tf_warning_or_error); |
1763 | t = build_call_a (fn, i, argarray); |
1764 | t = fold_convert (void_type_node, t); |
1765 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); |
1766 | append_to_statement_list (t, &ret); |
1767 | |
1768 | t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type)); |
1769 | t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t); |
1770 | append_to_statement_list (t, &ret); |
1771 | |
1772 | if (arg2) |
1773 | { |
1774 | t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type)); |
1775 | t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t); |
1776 | append_to_statement_list (t, &ret); |
1777 | } |
1778 | |
1779 | t = build2 (NE_EXPR, boolean_type_node, p1, end1); |
1780 | t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL); |
1781 | append_to_statement_list (t, &ret); |
1782 | |
1783 | return ret; |
1784 | } |
1785 | else |
1786 | { |
1787 | argarray[i++] = build_fold_addr_expr_loc (input_location, arg1); |
1788 | if (arg2) |
1789 | argarray[i++] = build_fold_addr_expr_loc (input_location, arg2); |
1790 | /* Handle default arguments. */ |
1791 | for (parm = defparm; parm && parm != void_list_node; |
1792 | parm = TREE_CHAIN (parm), i++) |
1793 | argarray[i] = convert_default_arg (TREE_VALUE (parm), |
1794 | TREE_PURPOSE (parm), fn, |
1795 | i - is_method, tf_warning_or_error); |
1796 | t = build_call_a (fn, i, argarray); |
1797 | t = fold_convert (void_type_node, t); |
1798 | return fold_build_cleanup_point_expr (TREE_TYPE (t), t); |
1799 | } |
1800 | } |
1801 | |
1802 | /* Return code to initialize DECL with its default constructor, or |
1803 | NULL if there's nothing to do. */ |
1804 | |
1805 | tree |
1806 | cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/) |
1807 | { |
1808 | tree info = CP_OMP_CLAUSE_INFO (clause); |
1809 | tree ret = NULL; |
1810 | |
1811 | if (info) |
1812 | ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL); |
1813 | |
1814 | return ret; |
1815 | } |
1816 | |
1817 | /* Return code to initialize DST with a copy constructor from SRC. */ |
1818 | |
1819 | tree |
1820 | cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src) |
1821 | { |
1822 | tree info = CP_OMP_CLAUSE_INFO (clause); |
1823 | tree ret = NULL; |
1824 | |
1825 | if (info) |
1826 | ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src); |
1827 | if (ret == NULL) |
1828 | ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); |
1829 | |
1830 | return ret; |
1831 | } |
1832 | |
1833 | /* Similarly, except use an assignment operator instead. */ |
1834 | |
1835 | tree |
1836 | cxx_omp_clause_assign_op (tree clause, tree dst, tree src) |
1837 | { |
1838 | tree info = CP_OMP_CLAUSE_INFO (clause); |
1839 | tree ret = NULL; |
1840 | |
1841 | if (info) |
1842 | ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src); |
1843 | if (ret == NULL) |
1844 | ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); |
1845 | |
1846 | return ret; |
1847 | } |
1848 | |
1849 | /* Return code to destroy DECL. */ |
1850 | |
1851 | tree |
1852 | cxx_omp_clause_dtor (tree clause, tree decl) |
1853 | { |
1854 | tree info = CP_OMP_CLAUSE_INFO (clause); |
1855 | tree ret = NULL; |
1856 | |
1857 | if (info) |
1858 | ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL); |
1859 | |
1860 | return ret; |
1861 | } |
1862 | |
1863 | /* True if OpenMP should privatize what this DECL points to rather |
1864 | than the DECL itself. */ |
1865 | |
1866 | bool |
1867 | cxx_omp_privatize_by_reference (const_tree decl) |
1868 | { |
1869 | return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE |
1870 | || is_invisiref_parm (decl)); |
1871 | } |
1872 | |
1873 | /* Return true if DECL is const qualified var having no mutable member. */ |
1874 | bool |
1875 | cxx_omp_const_qual_no_mutable (tree decl) |
1876 | { |
1877 | tree type = TREE_TYPE (decl); |
1878 | if (TREE_CODE (type) == REFERENCE_TYPE) |
1879 | { |
1880 | if (!is_invisiref_parm (decl)) |
1881 | return false; |
1882 | type = TREE_TYPE (type); |
1883 | |
1884 | if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl)) |
1885 | { |
1886 | /* NVR doesn't preserve const qualification of the |
1887 | variable's type. */ |
1888 | tree outer = outer_curly_brace_block (current_function_decl); |
1889 | tree var; |
1890 | |
1891 | if (outer) |
1892 | for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var)) |
1893 | if (VAR_P (var) |
1894 | && DECL_NAME (decl) == DECL_NAME (var) |
1895 | && (TYPE_MAIN_VARIANT (type) |
1896 | == TYPE_MAIN_VARIANT (TREE_TYPE (var)))) |
1897 | { |
1898 | if (TYPE_READONLY (TREE_TYPE (var))) |
1899 | type = TREE_TYPE (var); |
1900 | break; |
1901 | } |
1902 | } |
1903 | } |
1904 | |
1905 | if (type == error_mark_node) |
1906 | return false; |
1907 | |
1908 | /* Variables with const-qualified type having no mutable member |
1909 | are predetermined shared. */ |
1910 | if (TYPE_READONLY (type) && !cp_has_mutable_p (type)) |
1911 | return true; |
1912 | |
1913 | return false; |
1914 | } |
1915 | |
1916 | /* True if OpenMP sharing attribute of DECL is predetermined. */ |
1917 | |
1918 | enum omp_clause_default_kind |
1919 | cxx_omp_predetermined_sharing (tree decl) |
1920 | { |
1921 | /* Static data members are predetermined shared. */ |
1922 | if (TREE_STATIC (decl)) |
1923 | { |
1924 | tree ctx = CP_DECL_CONTEXT (decl); |
1925 | if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx)) |
1926 | return OMP_CLAUSE_DEFAULT_SHARED; |
1927 | } |
1928 | |
1929 | /* Const qualified vars having no mutable member are predetermined |
1930 | shared. */ |
1931 | if (cxx_omp_const_qual_no_mutable (decl)) |
1932 | return OMP_CLAUSE_DEFAULT_SHARED; |
1933 | |
1934 | return OMP_CLAUSE_DEFAULT_UNSPECIFIED; |
1935 | } |
1936 | |
1937 | /* Finalize an implicitly determined clause. */ |
1938 | |
1939 | void |
1940 | cxx_omp_finish_clause (tree c, gimple_seq *) |
1941 | { |
1942 | tree decl, inner_type; |
1943 | bool make_shared = false; |
1944 | |
1945 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE) |
1946 | return; |
1947 | |
1948 | decl = OMP_CLAUSE_DECL (c); |
1949 | decl = require_complete_type (decl); |
1950 | inner_type = TREE_TYPE (decl); |
1951 | if (decl == error_mark_node) |
1952 | make_shared = true; |
1953 | else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE) |
1954 | inner_type = TREE_TYPE (inner_type); |
1955 | |
1956 | /* We're interested in the base element, not arrays. */ |
1957 | while (TREE_CODE (inner_type) == ARRAY_TYPE) |
1958 | inner_type = TREE_TYPE (inner_type); |
1959 | |
1960 | /* Check for special function availability by building a call to one. |
1961 | Save the results, because later we won't be in the right context |
1962 | for making these queries. */ |
1963 | if (!make_shared |
1964 | && CLASS_TYPE_P (inner_type) |
1965 | && cxx_omp_create_clause_info (c, inner_type, false, true, false, true)) |
1966 | make_shared = true; |
1967 | |
1968 | if (make_shared) |
1969 | { |
1970 | OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED; |
1971 | OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0; |
1972 | OMP_CLAUSE_SHARED_READONLY (c) = 0; |
1973 | } |
1974 | } |
1975 | |
1976 | /* Return true if DECL's DECL_VALUE_EXPR (if any) should be |
1977 | disregarded in OpenMP construct, because it is going to be |
1978 | remapped during OpenMP lowering. SHARED is true if DECL |
1979 | is going to be shared, false if it is going to be privatized. */ |
1980 | |
1981 | bool |
1982 | cxx_omp_disregard_value_expr (tree decl, bool shared) |
1983 | { |
1984 | return !shared |
1985 | && VAR_P (decl) |
1986 | && DECL_HAS_VALUE_EXPR_P (decl) |
1987 | && DECL_ARTIFICIAL (decl) |
1988 | && DECL_LANG_SPECIFIC (decl) |
1989 | && DECL_OMP_PRIVATIZED_MEMBER (decl); |
1990 | } |
1991 | |
1992 | /* Fold expression X which is used as an rvalue if RVAL is true. */ |
1993 | |
1994 | static tree |
1995 | cp_fold_maybe_rvalue (tree x, bool rval) |
1996 | { |
1997 | while (true) |
1998 | { |
1999 | x = cp_fold (x); |
2000 | if (rval && DECL_P (x) |
2001 | && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE) |
2002 | { |
2003 | tree v = decl_constant_value (x); |
2004 | if (v != x && v != error_mark_node) |
2005 | { |
2006 | x = v; |
2007 | continue; |
2008 | } |
2009 | } |
2010 | break; |
2011 | } |
2012 | return x; |
2013 | } |
2014 | |
2015 | /* Fold expression X which is used as an rvalue. */ |
2016 | |
2017 | static tree |
2018 | cp_fold_rvalue (tree x) |
2019 | { |
2020 | return cp_fold_maybe_rvalue (x, true); |
2021 | } |
2022 | |
2023 | /* Perform folding on expression X. */ |
2024 | |
2025 | tree |
2026 | cp_fully_fold (tree x) |
2027 | { |
2028 | if (processing_template_decl) |
2029 | return x; |
2030 | /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't |
2031 | have to call both. */ |
2032 | if (cxx_dialect >= cxx11) |
2033 | x = maybe_constant_value (x); |
2034 | return cp_fold_rvalue (x); |
2035 | } |
2036 | |
2037 | /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer |
2038 | and certain changes are made to the folding done. Or should be (FIXME). We |
2039 | never touch maybe_const, as it is only used for the C front-end |
2040 | C_MAYBE_CONST_EXPR. */ |
2041 | |
2042 | tree |
2043 | c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval) |
2044 | { |
2045 | return cp_fold_maybe_rvalue (x, !lval); |
2046 | } |
2047 | |
2048 | static GTY((deletable)) hash_map<tree, tree> *fold_cache; |
2049 | |
2050 | /* Dispose of the whole FOLD_CACHE. */ |
2051 | |
2052 | void |
2053 | clear_fold_cache (void) |
2054 | { |
2055 | if (fold_cache != NULL) |
2056 | fold_cache->empty (); |
2057 | } |
2058 | |
2059 | /* This function tries to fold an expression X. |
2060 | To avoid combinatorial explosion, folding results are kept in fold_cache. |
2061 | If we are processing a template or X is invalid, we don't fold at all. |
2062 | For performance reasons we don't cache expressions representing a |
2063 | declaration or constant. |
2064 | Function returns X or its folded variant. */ |
2065 | |
2066 | static tree |
2067 | cp_fold (tree x) |
2068 | { |
2069 | tree op0, op1, op2, op3; |
2070 | tree org_x = x, r = NULL_TREE; |
2071 | enum tree_code code; |
2072 | location_t loc; |
2073 | bool rval_ops = true; |
2074 | |
2075 | if (!x || x == error_mark_node) |
2076 | return x; |
2077 | |
2078 | if (processing_template_decl |
2079 | || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))) |
2080 | return x; |
2081 | |
2082 | /* Don't bother to cache DECLs or constants. */ |
2083 | if (DECL_P (x) || CONSTANT_CLASS_P (x)) |
2084 | return x; |
2085 | |
2086 | if (fold_cache == NULL) |
2087 | fold_cache = hash_map<tree, tree>::create_ggc (101); |
2088 | |
2089 | if (tree *cached = fold_cache->get (x)) |
2090 | return *cached; |
2091 | |
2092 | code = TREE_CODE (x); |
2093 | switch (code) |
2094 | { |
2095 | case CLEANUP_POINT_EXPR: |
2096 | /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side |
2097 | effects. */ |
2098 | r = cp_fold_rvalue (TREE_OPERAND (x, 0)); |
2099 | if (!TREE_SIDE_EFFECTS (r)) |
2100 | x = r; |
2101 | break; |
2102 | |
2103 | case SIZEOF_EXPR: |
2104 | x = fold_sizeof_expr (x); |
2105 | break; |
2106 | |
2107 | case VIEW_CONVERT_EXPR: |
2108 | rval_ops = false; |
2109 | /* FALLTHRU */ |
2110 | case CONVERT_EXPR: |
2111 | case NOP_EXPR: |
2112 | case NON_LVALUE_EXPR: |
2113 | |
2114 | if (VOID_TYPE_P (TREE_TYPE (x))) |
2115 | return x; |
2116 | |
2117 | loc = EXPR_LOCATION (x); |
2118 | op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops); |
2119 | |
2120 | if (code == CONVERT_EXPR |
2121 | && SCALAR_TYPE_P (TREE_TYPE (x)) |
2122 | && op0 != void_node) |
2123 | /* During parsing we used convert_to_*_nofold; re-convert now using the |
2124 | folding variants, since fold() doesn't do those transformations. */ |
2125 | x = fold (convert (TREE_TYPE (x), op0)); |
2126 | else if (op0 != TREE_OPERAND (x, 0)) |
2127 | { |
2128 | if (op0 == error_mark_node) |
2129 | x = error_mark_node; |
2130 | else |
2131 | x = fold_build1_loc (loc, code, TREE_TYPE (x), op0); |
2132 | } |
2133 | else |
2134 | x = fold (x); |
2135 | |
2136 | /* Conversion of an out-of-range value has implementation-defined |
2137 | behavior; the language considers it different from arithmetic |
2138 | overflow, which is undefined. */ |
2139 | if (TREE_CODE (op0) == INTEGER_CST |
2140 | && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0)) |
2141 | TREE_OVERFLOW (x) = false; |
2142 | |
2143 | break; |
2144 | |
2145 | case INDIRECT_REF: |
2146 | /* We don't need the decltype(auto) obfuscation anymore. */ |
2147 | if (REF_PARENTHESIZED_P (x)) |
2148 | { |
2149 | tree p = maybe_undo_parenthesized_ref (x); |
2150 | return cp_fold (p); |
2151 | } |
2152 | goto unary; |
2153 | |
2154 | case ADDR_EXPR: |
2155 | case REALPART_EXPR: |
2156 | case IMAGPART_EXPR: |
2157 | rval_ops = false; |
2158 | /* FALLTHRU */ |
2159 | case CONJ_EXPR: |
2160 | case FIX_TRUNC_EXPR: |
2161 | case FLOAT_EXPR: |
2162 | case NEGATE_EXPR: |
2163 | case ABS_EXPR: |
2164 | case BIT_NOT_EXPR: |
2165 | case TRUTH_NOT_EXPR: |
2166 | case FIXED_CONVERT_EXPR: |
2167 | unary: |
2168 | |
2169 | loc = EXPR_LOCATION (x); |
2170 | op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops); |
2171 | |
2172 | if (op0 != TREE_OPERAND (x, 0)) |
2173 | { |
2174 | if (op0 == error_mark_node) |
2175 | x = error_mark_node; |
2176 | else |
2177 | { |
2178 | x = fold_build1_loc (loc, code, TREE_TYPE (x), op0); |
2179 | if (code == INDIRECT_REF |
2180 | && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF)) |
2181 | { |
2182 | TREE_READONLY (x) = TREE_READONLY (org_x); |
2183 | TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x); |
2184 | TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x); |
2185 | } |
2186 | } |
2187 | } |
2188 | else |
2189 | x = fold (x); |
2190 | |
2191 | gcc_assert (TREE_CODE (x) != COND_EXPR |
2192 | || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0)))); |
2193 | break; |
2194 | |
2195 | case UNARY_PLUS_EXPR: |
2196 | op0 = cp_fold_rvalue (TREE_OPERAND (x, 0)); |
2197 | if (op0 == error_mark_node) |
2198 | x = error_mark_node; |
2199 | else |
2200 | x = fold_convert (TREE_TYPE (x), op0); |
2201 | break; |
2202 | |
2203 | case POSTDECREMENT_EXPR: |
2204 | case POSTINCREMENT_EXPR: |
2205 | case INIT_EXPR: |
2206 | case PREDECREMENT_EXPR: |
2207 | case PREINCREMENT_EXPR: |
2208 | case COMPOUND_EXPR: |
2209 | case MODIFY_EXPR: |
2210 | rval_ops = false; |
2211 | /* FALLTHRU */ |
2212 | case POINTER_PLUS_EXPR: |
2213 | case PLUS_EXPR: |
2214 | case POINTER_DIFF_EXPR: |
2215 | case MINUS_EXPR: |
2216 | case MULT_EXPR: |
2217 | case TRUNC_DIV_EXPR: |
2218 | case CEIL_DIV_EXPR: |
2219 | case FLOOR_DIV_EXPR: |
2220 | case ROUND_DIV_EXPR: |
2221 | case TRUNC_MOD_EXPR: |
2222 | case CEIL_MOD_EXPR: |
2223 | case ROUND_MOD_EXPR: |
2224 | case RDIV_EXPR: |
2225 | case EXACT_DIV_EXPR: |
2226 | case MIN_EXPR: |
2227 | case MAX_EXPR: |
2228 | case LSHIFT_EXPR: |
2229 | case RSHIFT_EXPR: |
2230 | case LROTATE_EXPR: |
2231 | case RROTATE_EXPR: |
2232 | case BIT_AND_EXPR: |
2233 | case BIT_IOR_EXPR: |
2234 | case BIT_XOR_EXPR: |
2235 | case TRUTH_AND_EXPR: |
2236 | case TRUTH_ANDIF_EXPR: |
2237 | case TRUTH_OR_EXPR: |
2238 | case TRUTH_ORIF_EXPR: |
2239 | case TRUTH_XOR_EXPR: |
2240 | case LT_EXPR: case LE_EXPR: |
2241 | case GT_EXPR: case GE_EXPR: |
2242 | case EQ_EXPR: case NE_EXPR: |
2243 | case UNORDERED_EXPR: case ORDERED_EXPR: |
2244 | case UNLT_EXPR: case UNLE_EXPR: |
2245 | case UNGT_EXPR: case UNGE_EXPR: |
2246 | case UNEQ_EXPR: case LTGT_EXPR: |
2247 | case RANGE_EXPR: case COMPLEX_EXPR: |
2248 | |
2249 | loc = EXPR_LOCATION (x); |
2250 | op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops); |
2251 | op1 = cp_fold_rvalue (TREE_OPERAND (x, 1)); |
2252 | |
2253 | if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1)) |
2254 | { |
2255 | if (op0 == error_mark_node || op1 == error_mark_node) |
2256 | x = error_mark_node; |
2257 | else |
2258 | x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1); |
2259 | } |
2260 | else |
2261 | x = fold (x); |
2262 | |
2263 | if (TREE_NO_WARNING (org_x) |
2264 | && warn_nonnull_compare |
2265 | && COMPARISON_CLASS_P (org_x)) |
2266 | { |
2267 | if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST) |
2268 | ; |
2269 | else if (COMPARISON_CLASS_P (x)) |
2270 | TREE_NO_WARNING (x) = 1; |
2271 | /* Otherwise give up on optimizing these, let GIMPLE folders |
2272 | optimize those later on. */ |
2273 | else if (op0 != TREE_OPERAND (org_x, 0) |
2274 | || op1 != TREE_OPERAND (org_x, 1)) |
2275 | { |
2276 | x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1); |
2277 | TREE_NO_WARNING (x) = 1; |
2278 | } |
2279 | else |
2280 | x = org_x; |
2281 | } |
2282 | break; |
2283 | |
2284 | case VEC_COND_EXPR: |
2285 | case COND_EXPR: |
2286 | loc = EXPR_LOCATION (x); |
2287 | op0 = cp_fold_rvalue (TREE_OPERAND (x, 0)); |
2288 | op1 = cp_fold (TREE_OPERAND (x, 1)); |
2289 | op2 = cp_fold (TREE_OPERAND (x, 2)); |
2290 | |
2291 | if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE) |
2292 | { |
2293 | warning_sentinel s (warn_int_in_bool_context); |
2294 | if (!VOID_TYPE_P (TREE_TYPE (op1))) |
2295 | op1 = cp_truthvalue_conversion (op1); |
2296 | if (!VOID_TYPE_P (TREE_TYPE (op2))) |
2297 | op2 = cp_truthvalue_conversion (op2); |
2298 | } |
2299 | else if (VOID_TYPE_P (TREE_TYPE (x))) |
2300 | { |
2301 | if (TREE_CODE (op0) == INTEGER_CST) |
2302 | { |
2303 | /* If the condition is constant, fold can fold away |
2304 | the COND_EXPR. If some statement-level uses of COND_EXPR |
2305 | have one of the branches NULL, avoid folding crash. */ |
2306 | if (!op1) |
2307 | op1 = build_empty_stmt (loc); |
2308 | if (!op2) |
2309 | op2 = build_empty_stmt (loc); |
2310 | } |
2311 | else |
2312 | { |
2313 | /* Otherwise, don't bother folding a void condition, since |
2314 | it can't produce a constant value. */ |
2315 | if (op0 != TREE_OPERAND (x, 0) |
2316 | || op1 != TREE_OPERAND (x, 1) |
2317 | || op2 != TREE_OPERAND (x, 2)) |
2318 | x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2); |
2319 | break; |
2320 | } |
2321 | } |
2322 | |
2323 | if (op0 != TREE_OPERAND (x, 0) |
2324 | || op1 != TREE_OPERAND (x, 1) |
2325 | || op2 != TREE_OPERAND (x, 2)) |
2326 | { |
2327 | if (op0 == error_mark_node |
2328 | || op1 == error_mark_node |
2329 | || op2 == error_mark_node) |
2330 | x = error_mark_node; |
2331 | else |
2332 | x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2); |
2333 | } |
2334 | else |
2335 | x = fold (x); |
2336 | |
2337 | /* A COND_EXPR might have incompatible types in branches if one or both |
2338 | arms are bitfields. If folding exposed such a branch, fix it up. */ |
2339 | if (TREE_CODE (x) != code |
2340 | && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x))) |
2341 | x = fold_convert (TREE_TYPE (org_x), x); |
2342 | |
2343 | break; |
2344 | |
2345 | case CALL_EXPR: |
2346 | { |
2347 | int i, m, sv = optimize, nw = sv, changed = 0; |
2348 | tree callee = get_callee_fndecl (x); |
2349 | |
2350 | /* Some built-in function calls will be evaluated at compile-time in |
2351 | fold (). Set optimize to 1 when folding __builtin_constant_p inside |
2352 | a constexpr function so that fold_builtin_1 doesn't fold it to 0. */ |
2353 | if (callee && DECL_BUILT_IN (callee) && !optimize |
2354 | && DECL_IS_BUILTIN_CONSTANT_P (callee) |
2355 | && current_function_decl |
2356 | && DECL_DECLARED_CONSTEXPR_P (current_function_decl)) |
2357 | nw = 1; |
2358 | |
2359 | x = copy_node (x); |
2360 | |
2361 | m = call_expr_nargs (x); |
2362 | for (i = 0; i < m; i++) |
2363 | { |
2364 | r = cp_fold (CALL_EXPR_ARG (x, i)); |
2365 | if (r != CALL_EXPR_ARG (x, i)) |
2366 | { |
2367 | if (r == error_mark_node) |
2368 | { |
2369 | x = error_mark_node; |
2370 | break; |
2371 | } |
2372 | changed = 1; |
2373 | } |
2374 | CALL_EXPR_ARG (x, i) = r; |
2375 | } |
2376 | if (x == error_mark_node) |
2377 | break; |
2378 | |
2379 | optimize = nw; |
2380 | r = fold (x); |
2381 | optimize = sv; |
2382 | |
2383 | if (TREE_CODE (r) != CALL_EXPR) |
2384 | { |
2385 | x = cp_fold (r); |
2386 | break; |
2387 | } |
2388 | |
2389 | optimize = nw; |
2390 | |
2391 | /* Invoke maybe_constant_value for functions declared |
2392 | constexpr and not called with AGGR_INIT_EXPRs. |
2393 | TODO: |
2394 | Do constexpr expansion of expressions where the call itself is not |
2395 | constant, but the call followed by an INDIRECT_REF is. */ |
2396 | if (callee && DECL_DECLARED_CONSTEXPR_P (callee) |
2397 | && !flag_no_inline) |
2398 | r = maybe_constant_value (x); |
2399 | optimize = sv; |
2400 | |
2401 | if (TREE_CODE (r) != CALL_EXPR) |
2402 | { |
2403 | if (DECL_CONSTRUCTOR_P (callee)) |
2404 | { |
2405 | loc = EXPR_LOCATION (x); |
2406 | tree s = build_fold_indirect_ref_loc (loc, |
2407 | CALL_EXPR_ARG (x, 0)); |
2408 | r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r); |
2409 | } |
2410 | x = r; |
2411 | break; |
2412 | } |
2413 | |
2414 | if (!changed) |
2415 | x = org_x; |
2416 | break; |
2417 | } |
2418 | |
2419 | case CONSTRUCTOR: |
2420 | { |
2421 | unsigned i; |
2422 | constructor_elt *p; |
2423 | vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x); |
2424 | vec<constructor_elt, va_gc> *nelts = NULL; |
2425 | FOR_EACH_VEC_SAFE_ELT (elts, i, p) |
2426 | { |
2427 | tree op = cp_fold (p->value); |
2428 | if (op != p->value) |
2429 | { |
2430 | if (op == error_mark_node) |
2431 | { |
2432 | x = error_mark_node; |
2433 | vec_free (nelts); |
2434 | break; |
2435 | } |
2436 | if (nelts == NULL) |
2437 | nelts = elts->copy (); |
2438 | (*nelts)[i].value = op; |
2439 | } |
2440 | } |
2441 | if (nelts) |
2442 | x = build_constructor (TREE_TYPE (x), nelts); |
2443 | break; |
2444 | } |
2445 | case TREE_VEC: |
2446 | { |
2447 | bool changed = false; |
2448 | vec<tree, va_gc> *vec = make_tree_vector (); |
2449 | int i, n = TREE_VEC_LENGTH (x); |
2450 | vec_safe_reserve (vec, n); |
2451 | |
2452 | for (i = 0; i < n; i++) |
2453 | { |
2454 | tree op = cp_fold (TREE_VEC_ELT (x, i)); |
2455 | vec->quick_push (op); |
2456 | if (op != TREE_VEC_ELT (x, i)) |
2457 | changed = true; |
2458 | } |
2459 | |
2460 | if (changed) |
2461 | { |
2462 | r = copy_node (x); |
2463 | for (i = 0; i < n; i++) |
2464 | TREE_VEC_ELT (r, i) = (*vec)[i]; |
2465 | x = r; |
2466 | } |
2467 | |
2468 | release_tree_vector (vec); |
2469 | } |
2470 | |
2471 | break; |
2472 | |
2473 | case ARRAY_REF: |
2474 | case ARRAY_RANGE_REF: |
2475 | |
2476 | loc = EXPR_LOCATION (x); |
2477 | op0 = cp_fold (TREE_OPERAND (x, 0)); |
2478 | op1 = cp_fold (TREE_OPERAND (x, 1)); |
2479 | op2 = cp_fold (TREE_OPERAND (x, 2)); |
2480 | op3 = cp_fold (TREE_OPERAND (x, 3)); |
2481 | |
2482 | if (op0 != TREE_OPERAND (x, 0) |
2483 | || op1 != TREE_OPERAND (x, 1) |
2484 | || op2 != TREE_OPERAND (x, 2) |
2485 | || op3 != TREE_OPERAND (x, 3)) |
2486 | { |
2487 | if (op0 == error_mark_node |
2488 | || op1 == error_mark_node |
2489 | || op2 == error_mark_node |
2490 | || op3 == error_mark_node) |
2491 | x = error_mark_node; |
2492 | else |
2493 | { |
2494 | x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3); |
2495 | TREE_READONLY (x) = TREE_READONLY (org_x); |
2496 | TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x); |
2497 | TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x); |
2498 | } |
2499 | } |
2500 | |
2501 | x = fold (x); |
2502 | break; |
2503 | |
2504 | case SAVE_EXPR: |
2505 | /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after |
2506 | folding, evaluates to an invariant. In that case no need to wrap |
2507 | this folded tree with a SAVE_EXPR. */ |
2508 | r = cp_fold (TREE_OPERAND (x, 0)); |
2509 | if (tree_invariant_p (r)) |
2510 | x = r; |
2511 | break; |
2512 | |
2513 | default: |
2514 | return org_x; |
2515 | } |
2516 | |
2517 | fold_cache->put (org_x, x); |
2518 | /* Prevent that we try to fold an already folded result again. */ |
2519 | if (x != org_x) |
2520 | fold_cache->put (x, x); |
2521 | |
2522 | return x; |
2523 | } |
2524 | |
2525 | #include "gt-cp-cp-gimplify.h" |
2526 | |