1 | /* Tree lowering pass. This pass converts the GENERIC functions-as-trees |
2 | tree representation into the GIMPLE form. |
3 | Copyright (C) 2002-2017 Free Software Foundation, Inc. |
4 | Major work done by Sebastian Pop <s.pop@laposte.net>, |
5 | Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. |
6 | |
7 | This file is part of GCC. |
8 | |
9 | GCC is free software; you can redistribute it and/or modify it under |
10 | the terms of the GNU General Public License as published by the Free |
11 | Software Foundation; either version 3, or (at your option) any later |
12 | version. |
13 | |
14 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
15 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
17 | for more details. |
18 | |
19 | You should have received a copy of the GNU General Public License |
20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ |
22 | |
23 | #include "config.h" |
24 | #include "system.h" |
25 | #include "coretypes.h" |
26 | #include "backend.h" |
27 | #include "target.h" |
28 | #include "rtl.h" |
29 | #include "tree.h" |
30 | #include "memmodel.h" |
31 | #include "tm_p.h" |
32 | #include "gimple.h" |
33 | #include "gimple-predict.h" |
34 | #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */ |
35 | #include "ssa.h" |
36 | #include "cgraph.h" |
37 | #include "tree-pretty-print.h" |
38 | #include "diagnostic-core.h" |
39 | #include "alias.h" |
40 | #include "fold-const.h" |
41 | #include "calls.h" |
42 | #include "varasm.h" |
43 | #include "stmt.h" |
44 | #include "expr.h" |
45 | #include "gimple-fold.h" |
46 | #include "tree-eh.h" |
47 | #include "gimplify.h" |
48 | #include "gimple-iterator.h" |
49 | #include "stor-layout.h" |
50 | #include "print-tree.h" |
51 | #include "tree-iterator.h" |
52 | #include "tree-inline.h" |
53 | #include "langhooks.h" |
54 | #include "tree-cfg.h" |
55 | #include "tree-ssa.h" |
56 | #include "omp-general.h" |
57 | #include "omp-low.h" |
58 | #include "gimple-low.h" |
59 | #include "gomp-constants.h" |
60 | #include "splay-tree.h" |
61 | #include "gimple-walk.h" |
62 | #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */ |
63 | #include "builtins.h" |
64 | #include "stringpool.h" |
65 | #include "attribs.h" |
66 | #include "asan.h" |
67 | #include "dbgcnt.h" |
68 | |
69 | /* Hash set of poisoned variables in a bind expr. */ |
70 | static hash_set<tree> *asan_poisoned_variables = NULL; |
71 | |
72 | enum gimplify_omp_var_data |
73 | { |
74 | GOVD_SEEN = 1, |
75 | GOVD_EXPLICIT = 2, |
76 | GOVD_SHARED = 4, |
77 | GOVD_PRIVATE = 8, |
78 | GOVD_FIRSTPRIVATE = 16, |
79 | GOVD_LASTPRIVATE = 32, |
80 | GOVD_REDUCTION = 64, |
81 | GOVD_LOCAL = 128, |
82 | GOVD_MAP = 256, |
83 | GOVD_DEBUG_PRIVATE = 512, |
84 | GOVD_PRIVATE_OUTER_REF = 1024, |
85 | GOVD_LINEAR = 2048, |
86 | GOVD_ALIGNED = 4096, |
87 | |
88 | /* Flag for GOVD_MAP: don't copy back. */ |
89 | GOVD_MAP_TO_ONLY = 8192, |
90 | |
91 | /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */ |
92 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384, |
93 | |
94 | GOVD_MAP_0LEN_ARRAY = 32768, |
95 | |
96 | /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */ |
97 | GOVD_MAP_ALWAYS_TO = 65536, |
98 | |
99 | /* Flag for shared vars that are or might be stored to in the region. */ |
100 | GOVD_WRITTEN = 131072, |
101 | |
102 | /* Flag for GOVD_MAP, if it is a forced mapping. */ |
103 | GOVD_MAP_FORCE = 262144, |
104 | |
105 | /* Flag for GOVD_MAP: must be present already. */ |
106 | GOVD_MAP_FORCE_PRESENT = 524288, |
107 | |
108 | GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE |
109 | | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR |
110 | | GOVD_LOCAL) |
111 | }; |
112 | |
113 | |
114 | enum omp_region_type |
115 | { |
116 | ORT_WORKSHARE = 0x00, |
117 | ORT_SIMD = 0x01, |
118 | |
119 | ORT_PARALLEL = 0x02, |
120 | ORT_COMBINED_PARALLEL = 0x03, |
121 | |
122 | ORT_TASK = 0x04, |
123 | ORT_UNTIED_TASK = 0x05, |
124 | |
125 | ORT_TEAMS = 0x08, |
126 | ORT_COMBINED_TEAMS = 0x09, |
127 | |
128 | /* Data region. */ |
129 | ORT_TARGET_DATA = 0x10, |
130 | |
131 | /* Data region with offloading. */ |
132 | ORT_TARGET = 0x20, |
133 | ORT_COMBINED_TARGET = 0x21, |
134 | |
135 | /* OpenACC variants. */ |
136 | ORT_ACC = 0x40, /* A generic OpenACC region. */ |
137 | ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */ |
138 | ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */ |
139 | ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */ |
140 | ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */ |
141 | |
142 | /* Dummy OpenMP region, used to disable expansion of |
143 | DECL_VALUE_EXPRs in taskloop pre body. */ |
144 | ORT_NONE = 0x100 |
145 | }; |
146 | |
147 | /* Gimplify hashtable helper. */ |
148 | |
149 | struct gimplify_hasher : free_ptr_hash <elt_t> |
150 | { |
151 | static inline hashval_t hash (const elt_t *); |
152 | static inline bool equal (const elt_t *, const elt_t *); |
153 | }; |
154 | |
155 | struct gimplify_ctx |
156 | { |
157 | struct gimplify_ctx *prev_context; |
158 | |
159 | vec<gbind *> bind_expr_stack; |
160 | tree temps; |
161 | gimple_seq conditional_cleanups; |
162 | tree exit_label; |
163 | tree return_temp; |
164 | |
165 | vec<tree> case_labels; |
166 | hash_set<tree> *live_switch_vars; |
167 | /* The formal temporary table. Should this be persistent? */ |
168 | hash_table<gimplify_hasher> *temp_htab; |
169 | |
170 | int conditions; |
171 | unsigned into_ssa : 1; |
172 | unsigned allow_rhs_cond_expr : 1; |
173 | unsigned in_cleanup_point_expr : 1; |
174 | unsigned keep_stack : 1; |
175 | unsigned save_stack : 1; |
176 | unsigned in_switch_expr : 1; |
177 | }; |
178 | |
179 | struct gimplify_omp_ctx |
180 | { |
181 | struct gimplify_omp_ctx *outer_context; |
182 | splay_tree variables; |
183 | hash_set<tree> *privatized_types; |
184 | /* Iteration variables in an OMP_FOR. */ |
185 | vec<tree> loop_iter_var; |
186 | location_t location; |
187 | enum omp_clause_default_kind default_kind; |
188 | enum omp_region_type region_type; |
189 | bool combined_loop; |
190 | bool distribute; |
191 | bool target_map_scalars_firstprivate; |
192 | bool target_map_pointers_as_0len_arrays; |
193 | bool target_firstprivatize_array_bases; |
194 | }; |
195 | |
196 | static struct gimplify_ctx *gimplify_ctxp; |
197 | static struct gimplify_omp_ctx *gimplify_omp_ctxp; |
198 | |
199 | /* Forward declaration. */ |
200 | static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool); |
201 | static hash_map<tree, tree> *oacc_declare_returns; |
202 | static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *, |
203 | bool (*) (tree), fallback_t, bool); |
204 | |
205 | /* Shorter alias name for the above function for use in gimplify.c |
206 | only. */ |
207 | |
208 | static inline void |
209 | gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs) |
210 | { |
211 | gimple_seq_add_stmt_without_update (seq_p, gs); |
212 | } |
213 | |
214 | /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is |
215 | NULL, a new sequence is allocated. This function is |
216 | similar to gimple_seq_add_seq, but does not scan the operands. |
217 | During gimplification, we need to manipulate statement sequences |
218 | before the def/use vectors have been constructed. */ |
219 | |
220 | static void |
221 | gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src) |
222 | { |
223 | gimple_stmt_iterator si; |
224 | |
225 | if (src == NULL) |
226 | return; |
227 | |
228 | si = gsi_last (*dst_p); |
229 | gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); |
230 | } |
231 | |
232 | |
233 | /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing |
234 | and popping gimplify contexts. */ |
235 | |
236 | static struct gimplify_ctx *ctx_pool = NULL; |
237 | |
238 | /* Return a gimplify context struct from the pool. */ |
239 | |
240 | static inline struct gimplify_ctx * |
241 | ctx_alloc (void) |
242 | { |
243 | struct gimplify_ctx * c = ctx_pool; |
244 | |
245 | if (c) |
246 | ctx_pool = c->prev_context; |
247 | else |
248 | c = XNEW (struct gimplify_ctx); |
249 | |
250 | memset (c, '\0', sizeof (*c)); |
251 | return c; |
252 | } |
253 | |
254 | /* Put gimplify context C back into the pool. */ |
255 | |
256 | static inline void |
257 | ctx_free (struct gimplify_ctx *c) |
258 | { |
259 | c->prev_context = ctx_pool; |
260 | ctx_pool = c; |
261 | } |
262 | |
263 | /* Free allocated ctx stack memory. */ |
264 | |
265 | void |
266 | free_gimplify_stack (void) |
267 | { |
268 | struct gimplify_ctx *c; |
269 | |
270 | while ((c = ctx_pool)) |
271 | { |
272 | ctx_pool = c->prev_context; |
273 | free (c); |
274 | } |
275 | } |
276 | |
277 | |
278 | /* Set up a context for the gimplifier. */ |
279 | |
280 | void |
281 | push_gimplify_context (bool in_ssa, bool rhs_cond_ok) |
282 | { |
283 | struct gimplify_ctx *c = ctx_alloc (); |
284 | |
285 | c->prev_context = gimplify_ctxp; |
286 | gimplify_ctxp = c; |
287 | gimplify_ctxp->into_ssa = in_ssa; |
288 | gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok; |
289 | } |
290 | |
291 | /* Tear down a context for the gimplifier. If BODY is non-null, then |
292 | put the temporaries into the outer BIND_EXPR. Otherwise, put them |
293 | in the local_decls. |
294 | |
295 | BODY is not a sequence, but the first tuple in a sequence. */ |
296 | |
297 | void |
298 | pop_gimplify_context (gimple *body) |
299 | { |
300 | struct gimplify_ctx *c = gimplify_ctxp; |
301 | |
302 | gcc_assert (c |
303 | && (!c->bind_expr_stack.exists () |
304 | || c->bind_expr_stack.is_empty ())); |
305 | c->bind_expr_stack.release (); |
306 | gimplify_ctxp = c->prev_context; |
307 | |
308 | if (body) |
309 | declare_vars (c->temps, body, false); |
310 | else |
311 | record_vars (c->temps); |
312 | |
313 | delete c->temp_htab; |
314 | c->temp_htab = NULL; |
315 | ctx_free (c); |
316 | } |
317 | |
318 | /* Push a GIMPLE_BIND tuple onto the stack of bindings. */ |
319 | |
320 | static void |
321 | gimple_push_bind_expr (gbind *bind_stmt) |
322 | { |
323 | gimplify_ctxp->bind_expr_stack.reserve (8); |
324 | gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt); |
325 | } |
326 | |
327 | /* Pop the first element off the stack of bindings. */ |
328 | |
329 | static void |
330 | gimple_pop_bind_expr (void) |
331 | { |
332 | gimplify_ctxp->bind_expr_stack.pop (); |
333 | } |
334 | |
335 | /* Return the first element of the stack of bindings. */ |
336 | |
337 | gbind * |
338 | gimple_current_bind_expr (void) |
339 | { |
340 | return gimplify_ctxp->bind_expr_stack.last (); |
341 | } |
342 | |
343 | /* Return the stack of bindings created during gimplification. */ |
344 | |
345 | vec<gbind *> |
346 | gimple_bind_expr_stack (void) |
347 | { |
348 | return gimplify_ctxp->bind_expr_stack; |
349 | } |
350 | |
351 | /* Return true iff there is a COND_EXPR between us and the innermost |
352 | CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */ |
353 | |
354 | static bool |
355 | gimple_conditional_context (void) |
356 | { |
357 | return gimplify_ctxp->conditions > 0; |
358 | } |
359 | |
360 | /* Note that we've entered a COND_EXPR. */ |
361 | |
362 | static void |
363 | gimple_push_condition (void) |
364 | { |
365 | #ifdef ENABLE_GIMPLE_CHECKING |
366 | if (gimplify_ctxp->conditions == 0) |
367 | gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups)); |
368 | #endif |
369 | ++(gimplify_ctxp->conditions); |
370 | } |
371 | |
372 | /* Note that we've left a COND_EXPR. If we're back at unconditional scope |
373 | now, add any conditional cleanups we've seen to the prequeue. */ |
374 | |
375 | static void |
376 | gimple_pop_condition (gimple_seq *pre_p) |
377 | { |
378 | int conds = --(gimplify_ctxp->conditions); |
379 | |
380 | gcc_assert (conds >= 0); |
381 | if (conds == 0) |
382 | { |
383 | gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups); |
384 | gimplify_ctxp->conditional_cleanups = NULL; |
385 | } |
386 | } |
387 | |
388 | /* A stable comparison routine for use with splay trees and DECLs. */ |
389 | |
390 | static int |
391 | splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) |
392 | { |
393 | tree a = (tree) xa; |
394 | tree b = (tree) xb; |
395 | |
396 | return DECL_UID (a) - DECL_UID (b); |
397 | } |
398 | |
399 | /* Create a new omp construct that deals with variable remapping. */ |
400 | |
401 | static struct gimplify_omp_ctx * |
402 | new_omp_context (enum omp_region_type region_type) |
403 | { |
404 | struct gimplify_omp_ctx *c; |
405 | |
406 | c = XCNEW (struct gimplify_omp_ctx); |
407 | c->outer_context = gimplify_omp_ctxp; |
408 | c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); |
409 | c->privatized_types = new hash_set<tree>; |
410 | c->location = input_location; |
411 | c->region_type = region_type; |
412 | if ((region_type & ORT_TASK) == 0) |
413 | c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; |
414 | else |
415 | c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; |
416 | |
417 | return c; |
418 | } |
419 | |
420 | /* Destroy an omp construct that deals with variable remapping. */ |
421 | |
422 | static void |
423 | delete_omp_context (struct gimplify_omp_ctx *c) |
424 | { |
425 | splay_tree_delete (c->variables); |
426 | delete c->privatized_types; |
427 | c->loop_iter_var.release (); |
428 | XDELETE (c); |
429 | } |
430 | |
431 | static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); |
432 | static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); |
433 | |
434 | /* Both gimplify the statement T and append it to *SEQ_P. This function |
435 | behaves exactly as gimplify_stmt, but you don't have to pass T as a |
436 | reference. */ |
437 | |
438 | void |
439 | gimplify_and_add (tree t, gimple_seq *seq_p) |
440 | { |
441 | gimplify_stmt (&t, seq_p); |
442 | } |
443 | |
444 | /* Gimplify statement T into sequence *SEQ_P, and return the first |
445 | tuple in the sequence of generated tuples for this statement. |
446 | Return NULL if gimplifying T produced no tuples. */ |
447 | |
448 | static gimple * |
449 | gimplify_and_return_first (tree t, gimple_seq *seq_p) |
450 | { |
451 | gimple_stmt_iterator last = gsi_last (*seq_p); |
452 | |
453 | gimplify_and_add (t, seq_p); |
454 | |
455 | if (!gsi_end_p (last)) |
456 | { |
457 | gsi_next (&last); |
458 | return gsi_stmt (last); |
459 | } |
460 | else |
461 | return gimple_seq_first_stmt (*seq_p); |
462 | } |
463 | |
464 | /* Returns true iff T is a valid RHS for an assignment to an un-renamed |
465 | LHS, or for a call argument. */ |
466 | |
467 | static bool |
468 | is_gimple_mem_rhs (tree t) |
469 | { |
470 | /* If we're dealing with a renamable type, either source or dest must be |
471 | a renamed variable. */ |
472 | if (is_gimple_reg_type (TREE_TYPE (t))) |
473 | return is_gimple_val (t); |
474 | else |
475 | return is_gimple_val (t) || is_gimple_lvalue (t); |
476 | } |
477 | |
478 | /* Return true if T is a CALL_EXPR or an expression that can be |
479 | assigned to a temporary. Note that this predicate should only be |
480 | used during gimplification. See the rationale for this in |
481 | gimplify_modify_expr. */ |
482 | |
483 | static bool |
484 | is_gimple_reg_rhs_or_call (tree t) |
485 | { |
486 | return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS |
487 | || TREE_CODE (t) == CALL_EXPR); |
488 | } |
489 | |
490 | /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that |
491 | this predicate should only be used during gimplification. See the |
492 | rationale for this in gimplify_modify_expr. */ |
493 | |
494 | static bool |
495 | is_gimple_mem_rhs_or_call (tree t) |
496 | { |
497 | /* If we're dealing with a renamable type, either source or dest must be |
498 | a renamed variable. */ |
499 | if (is_gimple_reg_type (TREE_TYPE (t))) |
500 | return is_gimple_val (t); |
501 | else |
502 | return (is_gimple_val (t) |
503 | || is_gimple_lvalue (t) |
504 | || TREE_CLOBBER_P (t) |
505 | || TREE_CODE (t) == CALL_EXPR); |
506 | } |
507 | |
508 | /* Create a temporary with a name derived from VAL. Subroutine of |
509 | lookup_tmp_var; nobody else should call this function. */ |
510 | |
511 | static inline tree |
512 | create_tmp_from_val (tree val) |
513 | { |
514 | /* Drop all qualifiers and address-space information from the value type. */ |
515 | tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val)); |
516 | tree var = create_tmp_var (type, get_name (val)); |
517 | if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE |
518 | || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE) |
519 | DECL_GIMPLE_REG_P (var) = 1; |
520 | return var; |
521 | } |
522 | |
523 | /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse |
524 | an existing expression temporary. */ |
525 | |
526 | static tree |
527 | lookup_tmp_var (tree val, bool is_formal) |
528 | { |
529 | tree ret; |
530 | |
531 | /* If not optimizing, never really reuse a temporary. local-alloc |
532 | won't allocate any variable that is used in more than one basic |
533 | block, which means it will go into memory, causing much extra |
534 | work in reload and final and poorer code generation, outweighing |
535 | the extra memory allocation here. */ |
536 | if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val)) |
537 | ret = create_tmp_from_val (val); |
538 | else |
539 | { |
540 | elt_t elt, *elt_p; |
541 | elt_t **slot; |
542 | |
543 | elt.val = val; |
544 | if (!gimplify_ctxp->temp_htab) |
545 | gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000); |
546 | slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT); |
547 | if (*slot == NULL) |
548 | { |
549 | elt_p = XNEW (elt_t); |
550 | elt_p->val = val; |
551 | elt_p->temp = ret = create_tmp_from_val (val); |
552 | *slot = elt_p; |
553 | } |
554 | else |
555 | { |
556 | elt_p = *slot; |
557 | ret = elt_p->temp; |
558 | } |
559 | } |
560 | |
561 | return ret; |
562 | } |
563 | |
564 | /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */ |
565 | |
566 | static tree |
567 | internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, |
568 | bool is_formal, bool allow_ssa) |
569 | { |
570 | tree t, mod; |
571 | |
572 | /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we |
573 | can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */ |
574 | gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call, |
575 | fb_rvalue); |
576 | |
577 | if (allow_ssa |
578 | && gimplify_ctxp->into_ssa |
579 | && is_gimple_reg_type (TREE_TYPE (val))) |
580 | { |
581 | t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val))); |
582 | if (! gimple_in_ssa_p (cfun)) |
583 | { |
584 | const char *name = get_name (val); |
585 | if (name) |
586 | SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name)); |
587 | } |
588 | } |
589 | else |
590 | t = lookup_tmp_var (val, is_formal); |
591 | |
592 | mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); |
593 | |
594 | SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location)); |
595 | |
596 | /* gimplify_modify_expr might want to reduce this further. */ |
597 | gimplify_and_add (mod, pre_p); |
598 | ggc_free (mod); |
599 | |
600 | return t; |
601 | } |
602 | |
603 | /* Return a formal temporary variable initialized with VAL. PRE_P is as |
604 | in gimplify_expr. Only use this function if: |
605 | |
606 | 1) The value of the unfactored expression represented by VAL will not |
607 | change between the initialization and use of the temporary, and |
608 | 2) The temporary will not be otherwise modified. |
609 | |
610 | For instance, #1 means that this is inappropriate for SAVE_EXPR temps, |
611 | and #2 means it is inappropriate for && temps. |
612 | |
613 | For other cases, use get_initialized_tmp_var instead. */ |
614 | |
615 | tree |
616 | get_formal_tmp_var (tree val, gimple_seq *pre_p) |
617 | { |
618 | return internal_get_tmp_var (val, pre_p, NULL, true, true); |
619 | } |
620 | |
621 | /* Return a temporary variable initialized with VAL. PRE_P and POST_P |
622 | are as in gimplify_expr. */ |
623 | |
624 | tree |
625 | get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, |
626 | bool allow_ssa) |
627 | { |
628 | return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa); |
629 | } |
630 | |
631 | /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true, |
632 | generate debug info for them; otherwise don't. */ |
633 | |
634 | void |
635 | declare_vars (tree vars, gimple *gs, bool debug_info) |
636 | { |
637 | tree last = vars; |
638 | if (last) |
639 | { |
640 | tree temps, block; |
641 | |
642 | gbind *scope = as_a <gbind *> (gs); |
643 | |
644 | temps = nreverse (last); |
645 | |
646 | block = gimple_bind_block (scope); |
647 | gcc_assert (!block || TREE_CODE (block) == BLOCK); |
648 | if (!block || !debug_info) |
649 | { |
650 | DECL_CHAIN (last) = gimple_bind_vars (scope); |
651 | gimple_bind_set_vars (scope, temps); |
652 | } |
653 | else |
654 | { |
655 | /* We need to attach the nodes both to the BIND_EXPR and to its |
656 | associated BLOCK for debugging purposes. The key point here |
657 | is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR |
658 | is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ |
659 | if (BLOCK_VARS (block)) |
660 | BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); |
661 | else |
662 | { |
663 | gimple_bind_set_vars (scope, |
664 | chainon (gimple_bind_vars (scope), temps)); |
665 | BLOCK_VARS (block) = temps; |
666 | } |
667 | } |
668 | } |
669 | } |
670 | |
671 | /* For VAR a VAR_DECL of variable size, try to find a constant upper bound |
672 | for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if |
673 | no such upper bound can be obtained. */ |
674 | |
675 | static void |
676 | force_constant_size (tree var) |
677 | { |
678 | /* The only attempt we make is by querying the maximum size of objects |
679 | of the variable's type. */ |
680 | |
681 | HOST_WIDE_INT max_size; |
682 | |
683 | gcc_assert (VAR_P (var)); |
684 | |
685 | max_size = max_int_size_in_bytes (TREE_TYPE (var)); |
686 | |
687 | gcc_assert (max_size >= 0); |
688 | |
689 | DECL_SIZE_UNIT (var) |
690 | = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); |
691 | DECL_SIZE (var) |
692 | = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); |
693 | } |
694 | |
695 | /* Push the temporary variable TMP into the current binding. */ |
696 | |
697 | void |
698 | gimple_add_tmp_var_fn (struct function *fn, tree tmp) |
699 | { |
700 | gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); |
701 | |
702 | /* Later processing assumes that the object size is constant, which might |
703 | not be true at this point. Force the use of a constant upper bound in |
704 | this case. */ |
705 | if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp))) |
706 | force_constant_size (tmp); |
707 | |
708 | DECL_CONTEXT (tmp) = fn->decl; |
709 | DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; |
710 | |
711 | record_vars_into (tmp, fn->decl); |
712 | } |
713 | |
714 | /* Push the temporary variable TMP into the current binding. */ |
715 | |
716 | void |
717 | gimple_add_tmp_var (tree tmp) |
718 | { |
719 | gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); |
720 | |
721 | /* Later processing assumes that the object size is constant, which might |
722 | not be true at this point. Force the use of a constant upper bound in |
723 | this case. */ |
724 | if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp))) |
725 | force_constant_size (tmp); |
726 | |
727 | DECL_CONTEXT (tmp) = current_function_decl; |
728 | DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; |
729 | |
730 | if (gimplify_ctxp) |
731 | { |
732 | DECL_CHAIN (tmp) = gimplify_ctxp->temps; |
733 | gimplify_ctxp->temps = tmp; |
734 | |
735 | /* Mark temporaries local within the nearest enclosing parallel. */ |
736 | if (gimplify_omp_ctxp) |
737 | { |
738 | struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; |
739 | while (ctx |
740 | && (ctx->region_type == ORT_WORKSHARE |
741 | || ctx->region_type == ORT_SIMD |
742 | || ctx->region_type == ORT_ACC)) |
743 | ctx = ctx->outer_context; |
744 | if (ctx) |
745 | omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN); |
746 | } |
747 | } |
748 | else if (cfun) |
749 | record_vars (tmp); |
750 | else |
751 | { |
752 | gimple_seq body_seq; |
753 | |
754 | /* This case is for nested functions. We need to expose the locals |
755 | they create. */ |
756 | body_seq = gimple_body (current_function_decl); |
757 | declare_vars (tmp, gimple_seq_first_stmt (body_seq), false); |
758 | } |
759 | } |
760 | |
761 | |
762 | |
763 | /* This page contains routines to unshare tree nodes, i.e. to duplicate tree |
764 | nodes that are referenced more than once in GENERIC functions. This is |
765 | necessary because gimplification (translation into GIMPLE) is performed |
766 | by modifying tree nodes in-place, so gimplication of a shared node in a |
767 | first context could generate an invalid GIMPLE form in a second context. |
768 | |
769 | This is achieved with a simple mark/copy/unmark algorithm that walks the |
770 | GENERIC representation top-down, marks nodes with TREE_VISITED the first |
771 | time it encounters them, duplicates them if they already have TREE_VISITED |
772 | set, and finally removes the TREE_VISITED marks it has set. |
773 | |
774 | The algorithm works only at the function level, i.e. it generates a GENERIC |
775 | representation of a function with no nodes shared within the function when |
776 | passed a GENERIC function (except for nodes that are allowed to be shared). |
777 | |
778 | At the global level, it is also necessary to unshare tree nodes that are |
779 | referenced in more than one function, for the same aforementioned reason. |
780 | This requires some cooperation from the front-end. There are 2 strategies: |
781 | |
782 | 1. Manual unsharing. The front-end needs to call unshare_expr on every |
783 | expression that might end up being shared across functions. |
784 | |
785 | 2. Deep unsharing. This is an extension of regular unsharing. Instead |
786 | of calling unshare_expr on expressions that might be shared across |
787 | functions, the front-end pre-marks them with TREE_VISITED. This will |
788 | ensure that they are unshared on the first reference within functions |
789 | when the regular unsharing algorithm runs. The counterpart is that |
790 | this algorithm must look deeper than for manual unsharing, which is |
791 | specified by LANG_HOOKS_DEEP_UNSHARING. |
792 | |
793 | If there are only few specific cases of node sharing across functions, it is |
794 | probably easier for a front-end to unshare the expressions manually. On the |
795 | contrary, if the expressions generated at the global level are as widespread |
796 | as expressions generated within functions, deep unsharing is very likely the |
797 | way to go. */ |
798 | |
799 | /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes. |
800 | These nodes model computations that must be done once. If we were to |
801 | unshare something like SAVE_EXPR(i++), the gimplification process would |
802 | create wrong code. However, if DATA is non-null, it must hold a pointer |
803 | set that is used to unshare the subtrees of these nodes. */ |
804 | |
805 | static tree |
806 | mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) |
807 | { |
808 | tree t = *tp; |
809 | enum tree_code code = TREE_CODE (t); |
810 | |
811 | /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but |
812 | copy their subtrees if we can make sure to do it only once. */ |
813 | if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR) |
814 | { |
815 | if (data && !((hash_set<tree> *)data)->add (t)) |
816 | ; |
817 | else |
818 | *walk_subtrees = 0; |
819 | } |
820 | |
821 | /* Stop at types, decls, constants like copy_tree_r. */ |
822 | else if (TREE_CODE_CLASS (code) == tcc_type |
823 | || TREE_CODE_CLASS (code) == tcc_declaration |
824 | || TREE_CODE_CLASS (code) == tcc_constant) |
825 | *walk_subtrees = 0; |
826 | |
827 | /* Cope with the statement expression extension. */ |
828 | else if (code == STATEMENT_LIST) |
829 | ; |
830 | |
831 | /* Leave the bulk of the work to copy_tree_r itself. */ |
832 | else |
833 | copy_tree_r (tp, walk_subtrees, NULL); |
834 | |
835 | return NULL_TREE; |
836 | } |
837 | |
838 | /* Callback for walk_tree to unshare most of the shared trees rooted at *TP. |
839 | If *TP has been visited already, then *TP is deeply copied by calling |
840 | mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */ |
841 | |
842 | static tree |
843 | copy_if_shared_r (tree *tp, int *walk_subtrees, void *data) |
844 | { |
845 | tree t = *tp; |
846 | enum tree_code code = TREE_CODE (t); |
847 | |
848 | /* Skip types, decls, and constants. But we do want to look at their |
849 | types and the bounds of types. Mark them as visited so we properly |
850 | unmark their subtrees on the unmark pass. If we've already seen them, |
851 | don't look down further. */ |
852 | if (TREE_CODE_CLASS (code) == tcc_type |
853 | || TREE_CODE_CLASS (code) == tcc_declaration |
854 | || TREE_CODE_CLASS (code) == tcc_constant) |
855 | { |
856 | if (TREE_VISITED (t)) |
857 | *walk_subtrees = 0; |
858 | else |
859 | TREE_VISITED (t) = 1; |
860 | } |
861 | |
862 | /* If this node has been visited already, unshare it and don't look |
863 | any deeper. */ |
864 | else if (TREE_VISITED (t)) |
865 | { |
866 | walk_tree (tp, mostly_copy_tree_r, data, NULL); |
867 | *walk_subtrees = 0; |
868 | } |
869 | |
870 | /* Otherwise, mark the node as visited and keep looking. */ |
871 | else |
872 | TREE_VISITED (t) = 1; |
873 | |
874 | return NULL_TREE; |
875 | } |
876 | |
877 | /* Unshare most of the shared trees rooted at *TP. DATA is passed to the |
878 | copy_if_shared_r callback unmodified. */ |
879 | |
880 | static inline void |
881 | copy_if_shared (tree *tp, void *data) |
882 | { |
883 | walk_tree (tp, copy_if_shared_r, data, NULL); |
884 | } |
885 | |
886 | /* Unshare all the trees in the body of FNDECL, as well as in the bodies of |
887 | any nested functions. */ |
888 | |
889 | static void |
890 | unshare_body (tree fndecl) |
891 | { |
892 | struct cgraph_node *cgn = cgraph_node::get (fndecl); |
893 | /* If the language requires deep unsharing, we need a pointer set to make |
894 | sure we don't repeatedly unshare subtrees of unshareable nodes. */ |
895 | hash_set<tree> *visited |
896 | = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL; |
897 | |
898 | copy_if_shared (&DECL_SAVED_TREE (fndecl), visited); |
899 | copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited); |
900 | copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited); |
901 | |
902 | delete visited; |
903 | |
904 | if (cgn) |
905 | for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) |
906 | unshare_body (cgn->decl); |
907 | } |
908 | |
909 | /* Callback for walk_tree to unmark the visited trees rooted at *TP. |
910 | Subtrees are walked until the first unvisited node is encountered. */ |
911 | |
912 | static tree |
913 | unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
914 | { |
915 | tree t = *tp; |
916 | |
917 | /* If this node has been visited, unmark it and keep looking. */ |
918 | if (TREE_VISITED (t)) |
919 | TREE_VISITED (t) = 0; |
920 | |
921 | /* Otherwise, don't look any deeper. */ |
922 | else |
923 | *walk_subtrees = 0; |
924 | |
925 | return NULL_TREE; |
926 | } |
927 | |
928 | /* Unmark the visited trees rooted at *TP. */ |
929 | |
930 | static inline void |
931 | unmark_visited (tree *tp) |
932 | { |
933 | walk_tree (tp, unmark_visited_r, NULL, NULL); |
934 | } |
935 | |
936 | /* Likewise, but mark all trees as not visited. */ |
937 | |
938 | static void |
939 | unvisit_body (tree fndecl) |
940 | { |
941 | struct cgraph_node *cgn = cgraph_node::get (fndecl); |
942 | |
943 | unmark_visited (&DECL_SAVED_TREE (fndecl)); |
944 | unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl))); |
945 | unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl))); |
946 | |
947 | if (cgn) |
948 | for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) |
949 | unvisit_body (cgn->decl); |
950 | } |
951 | |
952 | /* Unconditionally make an unshared copy of EXPR. This is used when using |
953 | stored expressions which span multiple functions, such as BINFO_VTABLE, |
954 | as the normal unsharing process can't tell that they're shared. */ |
955 | |
956 | tree |
957 | unshare_expr (tree expr) |
958 | { |
959 | walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); |
960 | return expr; |
961 | } |
962 | |
963 | /* Worker for unshare_expr_without_location. */ |
964 | |
965 | static tree |
966 | prune_expr_location (tree *tp, int *walk_subtrees, void *) |
967 | { |
968 | if (EXPR_P (*tp)) |
969 | SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION); |
970 | else |
971 | *walk_subtrees = 0; |
972 | return NULL_TREE; |
973 | } |
974 | |
975 | /* Similar to unshare_expr but also prune all expression locations |
976 | from EXPR. */ |
977 | |
978 | tree |
979 | unshare_expr_without_location (tree expr) |
980 | { |
981 | walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); |
982 | if (EXPR_P (expr)) |
983 | walk_tree (&expr, prune_expr_location, NULL, NULL); |
984 | return expr; |
985 | } |
986 | |
987 | /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has |
988 | one, OR_ELSE otherwise. The location of a STATEMENT_LISTs |
989 | comprising at least one DEBUG_BEGIN_STMT followed by exactly one |
990 | EXPR is the location of the EXPR. */ |
991 | |
992 | static location_t |
993 | rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION) |
994 | { |
995 | if (!expr) |
996 | return or_else; |
997 | |
998 | if (EXPR_HAS_LOCATION (expr)) |
999 | return EXPR_LOCATION (expr); |
1000 | |
1001 | if (TREE_CODE (expr) != STATEMENT_LIST) |
1002 | return or_else; |
1003 | |
1004 | tree_stmt_iterator i = tsi_start (expr); |
1005 | |
1006 | bool found = false; |
1007 | while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT) |
1008 | { |
1009 | found = true; |
1010 | tsi_next (&i); |
1011 | } |
1012 | |
1013 | if (!found || !tsi_one_before_end_p (i)) |
1014 | return or_else; |
1015 | |
1016 | return rexpr_location (tsi_stmt (i), or_else); |
1017 | } |
1018 | |
1019 | /* Return TRUE iff EXPR (maybe recursively) has a location; see |
1020 | rexpr_location for the potential recursion. */ |
1021 | |
1022 | static inline bool |
1023 | rexpr_has_location (tree expr) |
1024 | { |
1025 | return rexpr_location (expr) != UNKNOWN_LOCATION; |
1026 | } |
1027 | |
1028 | |
1029 | /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both |
1030 | contain statements and have a value. Assign its value to a temporary |
1031 | and give it void_type_node. Return the temporary, or NULL_TREE if |
1032 | WRAPPER was already void. */ |
1033 | |
1034 | tree |
1035 | voidify_wrapper_expr (tree wrapper, tree temp) |
1036 | { |
1037 | tree type = TREE_TYPE (wrapper); |
1038 | if (type && !VOID_TYPE_P (type)) |
1039 | { |
1040 | tree *p; |
1041 | |
1042 | /* Set p to point to the body of the wrapper. Loop until we find |
1043 | something that isn't a wrapper. */ |
1044 | for (p = &wrapper; p && *p; ) |
1045 | { |
1046 | switch (TREE_CODE (*p)) |
1047 | { |
1048 | case BIND_EXPR: |
1049 | TREE_SIDE_EFFECTS (*p) = 1; |
1050 | TREE_TYPE (*p) = void_type_node; |
1051 | /* For a BIND_EXPR, the body is operand 1. */ |
1052 | p = &BIND_EXPR_BODY (*p); |
1053 | break; |
1054 | |
1055 | case CLEANUP_POINT_EXPR: |
1056 | case TRY_FINALLY_EXPR: |
1057 | case TRY_CATCH_EXPR: |
1058 | TREE_SIDE_EFFECTS (*p) = 1; |
1059 | TREE_TYPE (*p) = void_type_node; |
1060 | p = &TREE_OPERAND (*p, 0); |
1061 | break; |
1062 | |
1063 | case STATEMENT_LIST: |
1064 | { |
1065 | tree_stmt_iterator i = tsi_last (*p); |
1066 | TREE_SIDE_EFFECTS (*p) = 1; |
1067 | TREE_TYPE (*p) = void_type_node; |
1068 | p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); |
1069 | } |
1070 | break; |
1071 | |
1072 | case COMPOUND_EXPR: |
1073 | /* Advance to the last statement. Set all container types to |
1074 | void. */ |
1075 | for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) |
1076 | { |
1077 | TREE_SIDE_EFFECTS (*p) = 1; |
1078 | TREE_TYPE (*p) = void_type_node; |
1079 | } |
1080 | break; |
1081 | |
1082 | case TRANSACTION_EXPR: |
1083 | TREE_SIDE_EFFECTS (*p) = 1; |
1084 | TREE_TYPE (*p) = void_type_node; |
1085 | p = &TRANSACTION_EXPR_BODY (*p); |
1086 | break; |
1087 | |
1088 | default: |
1089 | /* Assume that any tree upon which voidify_wrapper_expr is |
1090 | directly called is a wrapper, and that its body is op0. */ |
1091 | if (p == &wrapper) |
1092 | { |
1093 | TREE_SIDE_EFFECTS (*p) = 1; |
1094 | TREE_TYPE (*p) = void_type_node; |
1095 | p = &TREE_OPERAND (*p, 0); |
1096 | break; |
1097 | } |
1098 | goto out; |
1099 | } |
1100 | } |
1101 | |
1102 | out: |
1103 | if (p == NULL || IS_EMPTY_STMT (*p)) |
1104 | temp = NULL_TREE; |
1105 | else if (temp) |
1106 | { |
1107 | /* The wrapper is on the RHS of an assignment that we're pushing |
1108 | down. */ |
1109 | gcc_assert (TREE_CODE (temp) == INIT_EXPR |
1110 | || TREE_CODE (temp) == MODIFY_EXPR); |
1111 | TREE_OPERAND (temp, 1) = *p; |
1112 | *p = temp; |
1113 | } |
1114 | else |
1115 | { |
1116 | temp = create_tmp_var (type, "retval" ); |
1117 | *p = build2 (INIT_EXPR, type, temp, *p); |
1118 | } |
1119 | |
1120 | return temp; |
1121 | } |
1122 | |
1123 | return NULL_TREE; |
1124 | } |
1125 | |
1126 | /* Prepare calls to builtins to SAVE and RESTORE the stack as well as |
1127 | a temporary through which they communicate. */ |
1128 | |
1129 | static void |
1130 | build_stack_save_restore (gcall **save, gcall **restore) |
1131 | { |
1132 | tree tmp_var; |
1133 | |
1134 | *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0); |
1135 | tmp_var = create_tmp_var (ptr_type_node, "saved_stack" ); |
1136 | gimple_call_set_lhs (*save, tmp_var); |
1137 | |
1138 | *restore |
1139 | = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE), |
1140 | 1, tmp_var); |
1141 | } |
1142 | |
1143 | /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */ |
1144 | |
1145 | static tree |
1146 | build_asan_poison_call_expr (tree decl) |
1147 | { |
1148 | /* Do not poison variables that have size equal to zero. */ |
1149 | tree unit_size = DECL_SIZE_UNIT (decl); |
1150 | if (zerop (unit_size)) |
1151 | return NULL_TREE; |
1152 | |
1153 | tree base = build_fold_addr_expr (decl); |
1154 | |
1155 | return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK, |
1156 | void_type_node, 3, |
1157 | build_int_cst (integer_type_node, |
1158 | ASAN_MARK_POISON), |
1159 | base, unit_size); |
1160 | } |
1161 | |
1162 | /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending |
1163 | on POISON flag, shadow memory of a DECL variable. The call will be |
1164 | put on location identified by IT iterator, where BEFORE flag drives |
1165 | position where the stmt will be put. */ |
1166 | |
1167 | static void |
1168 | asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it, |
1169 | bool before) |
1170 | { |
1171 | /* When within an OMP context, do not emit ASAN_MARK internal fns. */ |
1172 | if (gimplify_omp_ctxp) |
1173 | return; |
1174 | |
1175 | tree unit_size = DECL_SIZE_UNIT (decl); |
1176 | tree base = build_fold_addr_expr (decl); |
1177 | |
1178 | /* Do not poison variables that have size equal to zero. */ |
1179 | if (zerop (unit_size)) |
1180 | return; |
1181 | |
1182 | /* It's necessary to have all stack variables aligned to ASAN granularity |
1183 | bytes. */ |
1184 | if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY) |
1185 | SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY); |
1186 | |
1187 | HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON; |
1188 | |
1189 | gimple *g |
1190 | = gimple_build_call_internal (IFN_ASAN_MARK, 3, |
1191 | build_int_cst (integer_type_node, flags), |
1192 | base, unit_size); |
1193 | |
1194 | if (before) |
1195 | gsi_insert_before (it, g, GSI_NEW_STMT); |
1196 | else |
1197 | gsi_insert_after (it, g, GSI_NEW_STMT); |
1198 | } |
1199 | |
1200 | /* Generate IFN_ASAN_MARK internal call that depending on POISON flag |
1201 | either poisons or unpoisons a DECL. Created statement is appended |
1202 | to SEQ_P gimple sequence. */ |
1203 | |
1204 | static void |
1205 | asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p) |
1206 | { |
1207 | gimple_stmt_iterator it = gsi_last (*seq_p); |
1208 | bool before = false; |
1209 | |
1210 | if (gsi_end_p (it)) |
1211 | before = true; |
1212 | |
1213 | asan_poison_variable (decl, poison, &it, before); |
1214 | } |
1215 | |
1216 | /* Sort pair of VAR_DECLs A and B by DECL_UID. */ |
1217 | |
1218 | static int |
1219 | sort_by_decl_uid (const void *a, const void *b) |
1220 | { |
1221 | const tree *t1 = (const tree *)a; |
1222 | const tree *t2 = (const tree *)b; |
1223 | |
1224 | int uid1 = DECL_UID (*t1); |
1225 | int uid2 = DECL_UID (*t2); |
1226 | |
1227 | if (uid1 < uid2) |
1228 | return -1; |
1229 | else if (uid1 > uid2) |
1230 | return 1; |
1231 | else |
1232 | return 0; |
1233 | } |
1234 | |
1235 | /* Generate IFN_ASAN_MARK internal call for all VARIABLES |
1236 | depending on POISON flag. Created statement is appended |
1237 | to SEQ_P gimple sequence. */ |
1238 | |
1239 | static void |
1240 | asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p) |
1241 | { |
1242 | unsigned c = variables->elements (); |
1243 | if (c == 0) |
1244 | return; |
1245 | |
1246 | auto_vec<tree> sorted_variables (c); |
1247 | |
1248 | for (hash_set<tree>::iterator it = variables->begin (); |
1249 | it != variables->end (); ++it) |
1250 | sorted_variables.safe_push (*it); |
1251 | |
1252 | sorted_variables.qsort (sort_by_decl_uid); |
1253 | |
1254 | unsigned i; |
1255 | tree var; |
1256 | FOR_EACH_VEC_ELT (sorted_variables, i, var) |
1257 | { |
1258 | asan_poison_variable (var, poison, seq_p); |
1259 | |
1260 | /* Add use_after_scope_memory attribute for the variable in order |
1261 | to prevent re-written into SSA. */ |
1262 | if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE, |
1263 | DECL_ATTRIBUTES (var))) |
1264 | DECL_ATTRIBUTES (var) |
1265 | = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE), |
1266 | integer_one_node, |
1267 | DECL_ATTRIBUTES (var)); |
1268 | } |
1269 | } |
1270 | |
1271 | /* Gimplify a BIND_EXPR. Just voidify and recurse. */ |
1272 | |
1273 | static enum gimplify_status |
1274 | gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p) |
1275 | { |
1276 | tree bind_expr = *expr_p; |
1277 | bool old_keep_stack = gimplify_ctxp->keep_stack; |
1278 | bool old_save_stack = gimplify_ctxp->save_stack; |
1279 | tree t; |
1280 | gbind *bind_stmt; |
1281 | gimple_seq body, cleanup; |
1282 | gcall *stack_save; |
1283 | location_t start_locus = 0, end_locus = 0; |
1284 | tree ret_clauses = NULL; |
1285 | |
1286 | tree temp = voidify_wrapper_expr (bind_expr, NULL); |
1287 | |
1288 | /* Mark variables seen in this bind expr. */ |
1289 | for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) |
1290 | { |
1291 | if (VAR_P (t)) |
1292 | { |
1293 | struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; |
1294 | |
1295 | /* Mark variable as local. */ |
1296 | if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t) |
1297 | && (! DECL_SEEN_IN_BIND_EXPR_P (t) |
1298 | || splay_tree_lookup (ctx->variables, |
1299 | (splay_tree_key) t) == NULL)) |
1300 | { |
1301 | if (ctx->region_type == ORT_SIMD |
1302 | && TREE_ADDRESSABLE (t) |
1303 | && !TREE_STATIC (t)) |
1304 | omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN); |
1305 | else |
1306 | omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN); |
1307 | } |
1308 | |
1309 | DECL_SEEN_IN_BIND_EXPR_P (t) = 1; |
1310 | |
1311 | if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) |
1312 | cfun->has_local_explicit_reg_vars = true; |
1313 | } |
1314 | |
1315 | /* Preliminarily mark non-addressed complex variables as eligible |
1316 | for promotion to gimple registers. We'll transform their uses |
1317 | as we find them. */ |
1318 | if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE |
1319 | || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) |
1320 | && !TREE_THIS_VOLATILE (t) |
1321 | && (VAR_P (t) && !DECL_HARD_REGISTER (t)) |
1322 | && !needs_to_live_in_memory (t)) |
1323 | DECL_GIMPLE_REG_P (t) = 1; |
1324 | } |
1325 | |
1326 | bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, |
1327 | BIND_EXPR_BLOCK (bind_expr)); |
1328 | gimple_push_bind_expr (bind_stmt); |
1329 | |
1330 | gimplify_ctxp->keep_stack = false; |
1331 | gimplify_ctxp->save_stack = false; |
1332 | |
1333 | /* Gimplify the body into the GIMPLE_BIND tuple's body. */ |
1334 | body = NULL; |
1335 | gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body); |
1336 | gimple_bind_set_body (bind_stmt, body); |
1337 | |
1338 | /* Source location wise, the cleanup code (stack_restore and clobbers) |
1339 | belongs to the end of the block, so propagate what we have. The |
1340 | stack_save operation belongs to the beginning of block, which we can |
1341 | infer from the bind_expr directly if the block has no explicit |
1342 | assignment. */ |
1343 | if (BIND_EXPR_BLOCK (bind_expr)) |
1344 | { |
1345 | end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr)); |
1346 | start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr)); |
1347 | } |
1348 | if (start_locus == 0) |
1349 | start_locus = EXPR_LOCATION (bind_expr); |
1350 | |
1351 | cleanup = NULL; |
1352 | stack_save = NULL; |
1353 | |
1354 | /* If the code both contains VLAs and calls alloca, then we cannot reclaim |
1355 | the stack space allocated to the VLAs. */ |
1356 | if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack) |
1357 | { |
1358 | gcall *stack_restore; |
1359 | |
1360 | /* Save stack on entry and restore it on exit. Add a try_finally |
1361 | block to achieve this. */ |
1362 | build_stack_save_restore (&stack_save, &stack_restore); |
1363 | |
1364 | gimple_set_location (stack_save, start_locus); |
1365 | gimple_set_location (stack_restore, end_locus); |
1366 | |
1367 | gimplify_seq_add_stmt (&cleanup, stack_restore); |
1368 | } |
1369 | |
1370 | /* Add clobbers for all variables that go out of scope. */ |
1371 | for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) |
1372 | { |
1373 | if (VAR_P (t) |
1374 | && !is_global_var (t) |
1375 | && DECL_CONTEXT (t) == current_function_decl) |
1376 | { |
1377 | if (!DECL_HARD_REGISTER (t) |
1378 | && !TREE_THIS_VOLATILE (t) |
1379 | && !DECL_HAS_VALUE_EXPR_P (t) |
1380 | /* Only care for variables that have to be in memory. Others |
1381 | will be rewritten into SSA names, hence moved to the |
1382 | top-level. */ |
1383 | && !is_gimple_reg (t) |
1384 | && flag_stack_reuse != SR_NONE) |
1385 | { |
1386 | tree clobber = build_constructor (TREE_TYPE (t), NULL); |
1387 | gimple *clobber_stmt; |
1388 | TREE_THIS_VOLATILE (clobber) = 1; |
1389 | clobber_stmt = gimple_build_assign (t, clobber); |
1390 | gimple_set_location (clobber_stmt, end_locus); |
1391 | gimplify_seq_add_stmt (&cleanup, clobber_stmt); |
1392 | } |
1393 | |
1394 | if (flag_openacc && oacc_declare_returns != NULL) |
1395 | { |
1396 | tree *c = oacc_declare_returns->get (t); |
1397 | if (c != NULL) |
1398 | { |
1399 | if (ret_clauses) |
1400 | OMP_CLAUSE_CHAIN (*c) = ret_clauses; |
1401 | |
1402 | ret_clauses = *c; |
1403 | |
1404 | oacc_declare_returns->remove (t); |
1405 | |
1406 | if (oacc_declare_returns->elements () == 0) |
1407 | { |
1408 | delete oacc_declare_returns; |
1409 | oacc_declare_returns = NULL; |
1410 | } |
1411 | } |
1412 | } |
1413 | } |
1414 | |
1415 | if (asan_poisoned_variables != NULL |
1416 | && asan_poisoned_variables->contains (t)) |
1417 | { |
1418 | asan_poisoned_variables->remove (t); |
1419 | asan_poison_variable (t, true, &cleanup); |
1420 | } |
1421 | |
1422 | if (gimplify_ctxp->live_switch_vars != NULL |
1423 | && gimplify_ctxp->live_switch_vars->contains (t)) |
1424 | gimplify_ctxp->live_switch_vars->remove (t); |
1425 | } |
1426 | |
1427 | if (ret_clauses) |
1428 | { |
1429 | gomp_target *stmt; |
1430 | gimple_stmt_iterator si = gsi_start (cleanup); |
1431 | |
1432 | stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE, |
1433 | ret_clauses); |
1434 | gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT); |
1435 | } |
1436 | |
1437 | if (cleanup) |
1438 | { |
1439 | gtry *gs; |
1440 | gimple_seq new_body; |
1441 | |
1442 | new_body = NULL; |
1443 | gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup, |
1444 | GIMPLE_TRY_FINALLY); |
1445 | |
1446 | if (stack_save) |
1447 | gimplify_seq_add_stmt (&new_body, stack_save); |
1448 | gimplify_seq_add_stmt (&new_body, gs); |
1449 | gimple_bind_set_body (bind_stmt, new_body); |
1450 | } |
1451 | |
1452 | /* keep_stack propagates all the way up to the outermost BIND_EXPR. */ |
1453 | if (!gimplify_ctxp->keep_stack) |
1454 | gimplify_ctxp->keep_stack = old_keep_stack; |
1455 | gimplify_ctxp->save_stack = old_save_stack; |
1456 | |
1457 | gimple_pop_bind_expr (); |
1458 | |
1459 | gimplify_seq_add_stmt (pre_p, bind_stmt); |
1460 | |
1461 | if (temp) |
1462 | { |
1463 | *expr_p = temp; |
1464 | return GS_OK; |
1465 | } |
1466 | |
1467 | *expr_p = NULL_TREE; |
1468 | return GS_ALL_DONE; |
1469 | } |
1470 | |
1471 | /* Maybe add early return predict statement to PRE_P sequence. */ |
1472 | |
1473 | static void |
1474 | maybe_add_early_return_predict_stmt (gimple_seq *pre_p) |
1475 | { |
1476 | /* If we are not in a conditional context, add PREDICT statement. */ |
1477 | if (gimple_conditional_context ()) |
1478 | { |
1479 | gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN, |
1480 | NOT_TAKEN); |
1481 | gimplify_seq_add_stmt (pre_p, predict); |
1482 | } |
1483 | } |
1484 | |
1485 | /* Gimplify a RETURN_EXPR. If the expression to be returned is not a |
1486 | GIMPLE value, it is assigned to a new temporary and the statement is |
1487 | re-written to return the temporary. |
1488 | |
1489 | PRE_P points to the sequence where side effects that must happen before |
1490 | STMT should be stored. */ |
1491 | |
1492 | static enum gimplify_status |
1493 | gimplify_return_expr (tree stmt, gimple_seq *pre_p) |
1494 | { |
1495 | greturn *ret; |
1496 | tree ret_expr = TREE_OPERAND (stmt, 0); |
1497 | tree result_decl, result; |
1498 | |
1499 | if (ret_expr == error_mark_node) |
1500 | return GS_ERROR; |
1501 | |
1502 | if (!ret_expr |
1503 | || TREE_CODE (ret_expr) == RESULT_DECL |
1504 | || ret_expr == error_mark_node) |
1505 | { |
1506 | maybe_add_early_return_predict_stmt (pre_p); |
1507 | greturn *ret = gimple_build_return (ret_expr); |
1508 | gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); |
1509 | gimplify_seq_add_stmt (pre_p, ret); |
1510 | return GS_ALL_DONE; |
1511 | } |
1512 | |
1513 | if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) |
1514 | result_decl = NULL_TREE; |
1515 | else |
1516 | { |
1517 | result_decl = TREE_OPERAND (ret_expr, 0); |
1518 | |
1519 | /* See through a return by reference. */ |
1520 | if (TREE_CODE (result_decl) == INDIRECT_REF) |
1521 | result_decl = TREE_OPERAND (result_decl, 0); |
1522 | |
1523 | gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR |
1524 | || TREE_CODE (ret_expr) == INIT_EXPR) |
1525 | && TREE_CODE (result_decl) == RESULT_DECL); |
1526 | } |
1527 | |
1528 | /* If aggregate_value_p is true, then we can return the bare RESULT_DECL. |
1529 | Recall that aggregate_value_p is FALSE for any aggregate type that is |
1530 | returned in registers. If we're returning values in registers, then |
1531 | we don't want to extend the lifetime of the RESULT_DECL, particularly |
1532 | across another call. In addition, for those aggregates for which |
1533 | hard_function_value generates a PARALLEL, we'll die during normal |
1534 | expansion of structure assignments; there's special code in expand_return |
1535 | to handle this case that does not exist in expand_expr. */ |
1536 | if (!result_decl) |
1537 | result = NULL_TREE; |
1538 | else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) |
1539 | { |
1540 | if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST) |
1541 | { |
1542 | if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl))) |
1543 | gimplify_type_sizes (TREE_TYPE (result_decl), pre_p); |
1544 | /* Note that we don't use gimplify_vla_decl because the RESULT_DECL |
1545 | should be effectively allocated by the caller, i.e. all calls to |
1546 | this function must be subject to the Return Slot Optimization. */ |
1547 | gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p); |
1548 | gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p); |
1549 | } |
1550 | result = result_decl; |
1551 | } |
1552 | else if (gimplify_ctxp->return_temp) |
1553 | result = gimplify_ctxp->return_temp; |
1554 | else |
1555 | { |
1556 | result = create_tmp_reg (TREE_TYPE (result_decl)); |
1557 | |
1558 | /* ??? With complex control flow (usually involving abnormal edges), |
1559 | we can wind up warning about an uninitialized value for this. Due |
1560 | to how this variable is constructed and initialized, this is never |
1561 | true. Give up and never warn. */ |
1562 | TREE_NO_WARNING (result) = 1; |
1563 | |
1564 | gimplify_ctxp->return_temp = result; |
1565 | } |
1566 | |
1567 | /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use. |
1568 | Then gimplify the whole thing. */ |
1569 | if (result != result_decl) |
1570 | TREE_OPERAND (ret_expr, 0) = result; |
1571 | |
1572 | gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p); |
1573 | |
1574 | maybe_add_early_return_predict_stmt (pre_p); |
1575 | ret = gimple_build_return (result); |
1576 | gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); |
1577 | gimplify_seq_add_stmt (pre_p, ret); |
1578 | |
1579 | return GS_ALL_DONE; |
1580 | } |
1581 | |
1582 | /* Gimplify a variable-length array DECL. */ |
1583 | |
1584 | static void |
1585 | gimplify_vla_decl (tree decl, gimple_seq *seq_p) |
1586 | { |
1587 | /* This is a variable-sized decl. Simplify its size and mark it |
1588 | for deferred expansion. */ |
1589 | tree t, addr, ptr_type; |
1590 | |
1591 | gimplify_one_sizepos (&DECL_SIZE (decl), seq_p); |
1592 | gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p); |
1593 | |
1594 | /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */ |
1595 | if (DECL_HAS_VALUE_EXPR_P (decl)) |
1596 | return; |
1597 | |
1598 | /* All occurrences of this decl in final gimplified code will be |
1599 | replaced by indirection. Setting DECL_VALUE_EXPR does two |
1600 | things: First, it lets the rest of the gimplifier know what |
1601 | replacement to use. Second, it lets the debug info know |
1602 | where to find the value. */ |
1603 | ptr_type = build_pointer_type (TREE_TYPE (decl)); |
1604 | addr = create_tmp_var (ptr_type, get_name (decl)); |
1605 | DECL_IGNORED_P (addr) = 0; |
1606 | t = build_fold_indirect_ref (addr); |
1607 | TREE_THIS_NOTRAP (t) = 1; |
1608 | SET_DECL_VALUE_EXPR (decl, t); |
1609 | DECL_HAS_VALUE_EXPR_P (decl) = 1; |
1610 | |
1611 | t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl), |
1612 | max_int_size_in_bytes (TREE_TYPE (decl))); |
1613 | /* The call has been built for a variable-sized object. */ |
1614 | CALL_ALLOCA_FOR_VAR_P (t) = 1; |
1615 | t = fold_convert (ptr_type, t); |
1616 | t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); |
1617 | |
1618 | gimplify_and_add (t, seq_p); |
1619 | } |
1620 | |
1621 | /* A helper function to be called via walk_tree. Mark all labels under *TP |
1622 | as being forced. To be called for DECL_INITIAL of static variables. */ |
1623 | |
1624 | static tree |
1625 | force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
1626 | { |
1627 | if (TYPE_P (*tp)) |
1628 | *walk_subtrees = 0; |
1629 | if (TREE_CODE (*tp) == LABEL_DECL) |
1630 | { |
1631 | FORCED_LABEL (*tp) = 1; |
1632 | cfun->has_forced_label_in_static = 1; |
1633 | } |
1634 | |
1635 | return NULL_TREE; |
1636 | } |
1637 | |
1638 | /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation |
1639 | and initialization explicit. */ |
1640 | |
1641 | static enum gimplify_status |
1642 | gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p) |
1643 | { |
1644 | tree stmt = *stmt_p; |
1645 | tree decl = DECL_EXPR_DECL (stmt); |
1646 | |
1647 | *stmt_p = NULL_TREE; |
1648 | |
1649 | if (TREE_TYPE (decl) == error_mark_node) |
1650 | return GS_ERROR; |
1651 | |
1652 | if ((TREE_CODE (decl) == TYPE_DECL |
1653 | || VAR_P (decl)) |
1654 | && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) |
1655 | { |
1656 | gimplify_type_sizes (TREE_TYPE (decl), seq_p); |
1657 | if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE) |
1658 | gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p); |
1659 | } |
1660 | |
1661 | /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified |
1662 | in case its size expressions contain problematic nodes like CALL_EXPR. */ |
1663 | if (TREE_CODE (decl) == TYPE_DECL |
1664 | && DECL_ORIGINAL_TYPE (decl) |
1665 | && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl))) |
1666 | { |
1667 | gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p); |
1668 | if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE) |
1669 | gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p); |
1670 | } |
1671 | |
1672 | if (VAR_P (decl) && !DECL_EXTERNAL (decl)) |
1673 | { |
1674 | tree init = DECL_INITIAL (decl); |
1675 | bool is_vla = false; |
1676 | |
1677 | if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST |
1678 | || (!TREE_STATIC (decl) |
1679 | && flag_stack_check == GENERIC_STACK_CHECK |
1680 | && compare_tree_int (DECL_SIZE_UNIT (decl), |
1681 | STACK_CHECK_MAX_VAR_SIZE) > 0)) |
1682 | { |
1683 | gimplify_vla_decl (decl, seq_p); |
1684 | is_vla = true; |
1685 | } |
1686 | |
1687 | if (asan_poisoned_variables |
1688 | && !is_vla |
1689 | && TREE_ADDRESSABLE (decl) |
1690 | && !TREE_STATIC (decl) |
1691 | && !DECL_HAS_VALUE_EXPR_P (decl) |
1692 | && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT |
1693 | && dbg_cnt (asan_use_after_scope)) |
1694 | { |
1695 | asan_poisoned_variables->add (decl); |
1696 | asan_poison_variable (decl, false, seq_p); |
1697 | if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars) |
1698 | gimplify_ctxp->live_switch_vars->add (decl); |
1699 | } |
1700 | |
1701 | /* Some front ends do not explicitly declare all anonymous |
1702 | artificial variables. We compensate here by declaring the |
1703 | variables, though it would be better if the front ends would |
1704 | explicitly declare them. */ |
1705 | if (!DECL_SEEN_IN_BIND_EXPR_P (decl) |
1706 | && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) |
1707 | gimple_add_tmp_var (decl); |
1708 | |
1709 | if (init && init != error_mark_node) |
1710 | { |
1711 | if (!TREE_STATIC (decl)) |
1712 | { |
1713 | DECL_INITIAL (decl) = NULL_TREE; |
1714 | init = build2 (INIT_EXPR, void_type_node, decl, init); |
1715 | gimplify_and_add (init, seq_p); |
1716 | ggc_free (init); |
1717 | } |
1718 | else |
1719 | /* We must still examine initializers for static variables |
1720 | as they may contain a label address. */ |
1721 | walk_tree (&init, force_labels_r, NULL, NULL); |
1722 | } |
1723 | } |
1724 | |
1725 | return GS_ALL_DONE; |
1726 | } |
1727 | |
1728 | /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body |
1729 | and replacing the LOOP_EXPR with goto, but if the loop contains an |
1730 | EXIT_EXPR, we need to append a label for it to jump to. */ |
1731 | |
1732 | static enum gimplify_status |
1733 | gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p) |
1734 | { |
1735 | tree saved_label = gimplify_ctxp->exit_label; |
1736 | tree start_label = create_artificial_label (UNKNOWN_LOCATION); |
1737 | |
1738 | gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label)); |
1739 | |
1740 | gimplify_ctxp->exit_label = NULL_TREE; |
1741 | |
1742 | gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p); |
1743 | |
1744 | gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label)); |
1745 | |
1746 | if (gimplify_ctxp->exit_label) |
1747 | gimplify_seq_add_stmt (pre_p, |
1748 | gimple_build_label (gimplify_ctxp->exit_label)); |
1749 | |
1750 | gimplify_ctxp->exit_label = saved_label; |
1751 | |
1752 | *expr_p = NULL; |
1753 | return GS_ALL_DONE; |
1754 | } |
1755 | |
1756 | /* Gimplify a statement list onto a sequence. These may be created either |
1757 | by an enlightened front-end, or by shortcut_cond_expr. */ |
1758 | |
1759 | static enum gimplify_status |
1760 | gimplify_statement_list (tree *expr_p, gimple_seq *pre_p) |
1761 | { |
1762 | tree temp = voidify_wrapper_expr (*expr_p, NULL); |
1763 | |
1764 | tree_stmt_iterator i = tsi_start (*expr_p); |
1765 | |
1766 | while (!tsi_end_p (i)) |
1767 | { |
1768 | gimplify_stmt (tsi_stmt_ptr (i), pre_p); |
1769 | tsi_delink (&i); |
1770 | } |
1771 | |
1772 | if (temp) |
1773 | { |
1774 | *expr_p = temp; |
1775 | return GS_OK; |
1776 | } |
1777 | |
1778 | return GS_ALL_DONE; |
1779 | } |
1780 | |
1781 | /* Callback for walk_gimple_seq. */ |
1782 | |
1783 | static tree |
1784 | warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, |
1785 | struct walk_stmt_info *wi) |
1786 | { |
1787 | gimple *stmt = gsi_stmt (*gsi_p); |
1788 | |
1789 | *handled_ops_p = true; |
1790 | switch (gimple_code (stmt)) |
1791 | { |
1792 | case GIMPLE_TRY: |
1793 | /* A compiler-generated cleanup or a user-written try block. |
1794 | If it's empty, don't dive into it--that would result in |
1795 | worse location info. */ |
1796 | if (gimple_try_eval (stmt) == NULL) |
1797 | { |
1798 | wi->info = stmt; |
1799 | return integer_zero_node; |
1800 | } |
1801 | /* Fall through. */ |
1802 | case GIMPLE_BIND: |
1803 | case GIMPLE_CATCH: |
1804 | case GIMPLE_EH_FILTER: |
1805 | case GIMPLE_TRANSACTION: |
1806 | /* Walk the sub-statements. */ |
1807 | *handled_ops_p = false; |
1808 | break; |
1809 | |
1810 | case GIMPLE_DEBUG: |
1811 | /* Ignore these. We may generate them before declarations that |
1812 | are never executed. If there's something to warn about, |
1813 | there will be non-debug stmts too, and we'll catch those. */ |
1814 | break; |
1815 | |
1816 | case GIMPLE_CALL: |
1817 | if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) |
1818 | { |
1819 | *handled_ops_p = false; |
1820 | break; |
1821 | } |
1822 | /* Fall through. */ |
1823 | default: |
1824 | /* Save the first "real" statement (not a decl/lexical scope/...). */ |
1825 | wi->info = stmt; |
1826 | return integer_zero_node; |
1827 | } |
1828 | return NULL_TREE; |
1829 | } |
1830 | |
1831 | /* Possibly warn about unreachable statements between switch's controlling |
1832 | expression and the first case. SEQ is the body of a switch expression. */ |
1833 | |
1834 | static void |
1835 | maybe_warn_switch_unreachable (gimple_seq seq) |
1836 | { |
1837 | if (!warn_switch_unreachable |
1838 | /* This warning doesn't play well with Fortran when optimizations |
1839 | are on. */ |
1840 | || lang_GNU_Fortran () |
1841 | || seq == NULL) |
1842 | return; |
1843 | |
1844 | struct walk_stmt_info wi; |
1845 | memset (&wi, 0, sizeof (wi)); |
1846 | walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi); |
1847 | gimple *stmt = (gimple *) wi.info; |
1848 | |
1849 | if (stmt && gimple_code (stmt) != GIMPLE_LABEL) |
1850 | { |
1851 | if (gimple_code (stmt) == GIMPLE_GOTO |
1852 | && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL |
1853 | && DECL_ARTIFICIAL (gimple_goto_dest (stmt))) |
1854 | /* Don't warn for compiler-generated gotos. These occur |
1855 | in Duff's devices, for example. */; |
1856 | else |
1857 | warning_at (gimple_location (stmt), OPT_Wswitch_unreachable, |
1858 | "statement will never be executed" ); |
1859 | } |
1860 | } |
1861 | |
1862 | |
1863 | /* A label entry that pairs label and a location. */ |
1864 | struct label_entry |
1865 | { |
1866 | tree label; |
1867 | location_t loc; |
1868 | }; |
1869 | |
1870 | /* Find LABEL in vector of label entries VEC. */ |
1871 | |
1872 | static struct label_entry * |
1873 | find_label_entry (const auto_vec<struct label_entry> *vec, tree label) |
1874 | { |
1875 | unsigned int i; |
1876 | struct label_entry *l; |
1877 | |
1878 | FOR_EACH_VEC_ELT (*vec, i, l) |
1879 | if (l->label == label) |
1880 | return l; |
1881 | return NULL; |
1882 | } |
1883 | |
1884 | /* Return true if LABEL, a LABEL_DECL, represents a case label |
1885 | in a vector of labels CASES. */ |
1886 | |
1887 | static bool |
1888 | case_label_p (const vec<tree> *cases, tree label) |
1889 | { |
1890 | unsigned int i; |
1891 | tree l; |
1892 | |
1893 | FOR_EACH_VEC_ELT (*cases, i, l) |
1894 | if (CASE_LABEL (l) == label) |
1895 | return true; |
1896 | return false; |
1897 | } |
1898 | |
1899 | /* Find the last nondebug statement in a scope STMT. */ |
1900 | |
1901 | static gimple * |
1902 | last_stmt_in_scope (gimple *stmt) |
1903 | { |
1904 | if (!stmt) |
1905 | return NULL; |
1906 | |
1907 | switch (gimple_code (stmt)) |
1908 | { |
1909 | case GIMPLE_BIND: |
1910 | { |
1911 | gbind *bind = as_a <gbind *> (stmt); |
1912 | stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind)); |
1913 | return last_stmt_in_scope (stmt); |
1914 | } |
1915 | |
1916 | case GIMPLE_TRY: |
1917 | { |
1918 | gtry *try_stmt = as_a <gtry *> (stmt); |
1919 | stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt)); |
1920 | gimple *last_eval = last_stmt_in_scope (stmt); |
1921 | if (gimple_stmt_may_fallthru (last_eval) |
1922 | && (last_eval == NULL |
1923 | || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH)) |
1924 | && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY) |
1925 | { |
1926 | stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt)); |
1927 | return last_stmt_in_scope (stmt); |
1928 | } |
1929 | else |
1930 | return last_eval; |
1931 | } |
1932 | |
1933 | case GIMPLE_DEBUG: |
1934 | gcc_unreachable (); |
1935 | |
1936 | default: |
1937 | return stmt; |
1938 | } |
1939 | } |
1940 | |
1941 | /* Collect interesting labels in LABELS and return the statement preceding |
1942 | another case label, or a user-defined label. */ |
1943 | |
1944 | static gimple * |
1945 | collect_fallthrough_labels (gimple_stmt_iterator *gsi_p, |
1946 | auto_vec <struct label_entry> *labels) |
1947 | { |
1948 | gimple *prev = NULL; |
1949 | |
1950 | do |
1951 | { |
1952 | if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND) |
1953 | { |
1954 | /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr, |
1955 | which starts on a GIMPLE_SWITCH and ends with a break label. |
1956 | Handle that as a single statement that can fall through. */ |
1957 | gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p)); |
1958 | gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind)); |
1959 | gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind)); |
1960 | if (last |
1961 | && gimple_code (first) == GIMPLE_SWITCH |
1962 | && gimple_code (last) == GIMPLE_LABEL) |
1963 | { |
1964 | tree label = gimple_label_label (as_a <glabel *> (last)); |
1965 | if (SWITCH_BREAK_LABEL_P (label)) |
1966 | { |
1967 | prev = bind; |
1968 | gsi_next (gsi_p); |
1969 | continue; |
1970 | } |
1971 | } |
1972 | } |
1973 | if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND |
1974 | || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY) |
1975 | { |
1976 | /* Nested scope. Only look at the last statement of |
1977 | the innermost scope. */ |
1978 | location_t bind_loc = gimple_location (gsi_stmt (*gsi_p)); |
1979 | gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p)); |
1980 | if (last) |
1981 | { |
1982 | prev = last; |
1983 | /* It might be a label without a location. Use the |
1984 | location of the scope then. */ |
1985 | if (!gimple_has_location (prev)) |
1986 | gimple_set_location (prev, bind_loc); |
1987 | } |
1988 | gsi_next (gsi_p); |
1989 | continue; |
1990 | } |
1991 | |
1992 | /* Ifs are tricky. */ |
1993 | if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND) |
1994 | { |
1995 | gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p)); |
1996 | tree false_lab = gimple_cond_false_label (cond_stmt); |
1997 | location_t if_loc = gimple_location (cond_stmt); |
1998 | |
1999 | /* If we have e.g. |
2000 | if (i > 1) goto <D.2259>; else goto D; |
2001 | we can't do much with the else-branch. */ |
2002 | if (!DECL_ARTIFICIAL (false_lab)) |
2003 | break; |
2004 | |
2005 | /* Go on until the false label, then one step back. */ |
2006 | for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p)) |
2007 | { |
2008 | gimple *stmt = gsi_stmt (*gsi_p); |
2009 | if (gimple_code (stmt) == GIMPLE_LABEL |
2010 | && gimple_label_label (as_a <glabel *> (stmt)) == false_lab) |
2011 | break; |
2012 | } |
2013 | |
2014 | /* Not found? Oops. */ |
2015 | if (gsi_end_p (*gsi_p)) |
2016 | break; |
2017 | |
2018 | struct label_entry l = { false_lab, if_loc }; |
2019 | labels->safe_push (l); |
2020 | |
2021 | /* Go to the last statement of the then branch. */ |
2022 | gsi_prev (gsi_p); |
2023 | |
2024 | /* if (i != 0) goto <D.1759>; else goto <D.1760>; |
2025 | <D.1759>: |
2026 | <stmt>; |
2027 | goto <D.1761>; |
2028 | <D.1760>: |
2029 | */ |
2030 | if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO |
2031 | && !gimple_has_location (gsi_stmt (*gsi_p))) |
2032 | { |
2033 | /* Look at the statement before, it might be |
2034 | attribute fallthrough, in which case don't warn. */ |
2035 | gsi_prev (gsi_p); |
2036 | bool fallthru_before_dest |
2037 | = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH); |
2038 | gsi_next (gsi_p); |
2039 | tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p)); |
2040 | if (!fallthru_before_dest) |
2041 | { |
2042 | struct label_entry l = { goto_dest, if_loc }; |
2043 | labels->safe_push (l); |
2044 | } |
2045 | } |
2046 | /* And move back. */ |
2047 | gsi_next (gsi_p); |
2048 | } |
2049 | |
2050 | /* Remember the last statement. Skip labels that are of no interest |
2051 | to us. */ |
2052 | if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL) |
2053 | { |
2054 | tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p))); |
2055 | if (find_label_entry (labels, label)) |
2056 | prev = gsi_stmt (*gsi_p); |
2057 | } |
2058 | else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK)) |
2059 | ; |
2060 | else if (!is_gimple_debug (gsi_stmt (*gsi_p))) |
2061 | prev = gsi_stmt (*gsi_p); |
2062 | gsi_next (gsi_p); |
2063 | } |
2064 | while (!gsi_end_p (*gsi_p) |
2065 | /* Stop if we find a case or a user-defined label. */ |
2066 | && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL |
2067 | || !gimple_has_location (gsi_stmt (*gsi_p)))); |
2068 | |
2069 | return prev; |
2070 | } |
2071 | |
2072 | /* Return true if the switch fallthough warning should occur. LABEL is |
2073 | the label statement that we're falling through to. */ |
2074 | |
2075 | static bool |
2076 | should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label) |
2077 | { |
2078 | gimple_stmt_iterator gsi = *gsi_p; |
2079 | |
2080 | /* Don't warn if the label is marked with a "falls through" comment. */ |
2081 | if (FALLTHROUGH_LABEL_P (label)) |
2082 | return false; |
2083 | |
2084 | /* Don't warn for non-case labels followed by a statement: |
2085 | case 0: |
2086 | foo (); |
2087 | label: |
2088 | bar (); |
2089 | as these are likely intentional. */ |
2090 | if (!case_label_p (&gimplify_ctxp->case_labels, label)) |
2091 | { |
2092 | tree l; |
2093 | while (!gsi_end_p (gsi) |
2094 | && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL |
2095 | && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi)))) |
2096 | && !case_label_p (&gimplify_ctxp->case_labels, l)) |
2097 | gsi_next_nondebug (&gsi); |
2098 | if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL) |
2099 | return false; |
2100 | } |
2101 | |
2102 | /* Don't warn for terminated branches, i.e. when the subsequent case labels |
2103 | immediately breaks. */ |
2104 | gsi = *gsi_p; |
2105 | |
2106 | /* Skip all immediately following labels. */ |
2107 | while (!gsi_end_p (gsi) |
2108 | && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL |
2109 | || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT)) |
2110 | gsi_next_nondebug (&gsi); |
2111 | |
2112 | /* { ... something; default:; } */ |
2113 | if (gsi_end_p (gsi) |
2114 | /* { ... something; default: break; } or |
2115 | { ... something; default: goto L; } */ |
2116 | || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO |
2117 | /* { ... something; default: return; } */ |
2118 | || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) |
2119 | return false; |
2120 | |
2121 | return true; |
2122 | } |
2123 | |
2124 | /* Callback for walk_gimple_seq. */ |
2125 | |
2126 | static tree |
2127 | warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, |
2128 | struct walk_stmt_info *) |
2129 | { |
2130 | gimple *stmt = gsi_stmt (*gsi_p); |
2131 | |
2132 | *handled_ops_p = true; |
2133 | switch (gimple_code (stmt)) |
2134 | { |
2135 | case GIMPLE_TRY: |
2136 | case GIMPLE_BIND: |
2137 | case GIMPLE_CATCH: |
2138 | case GIMPLE_EH_FILTER: |
2139 | case GIMPLE_TRANSACTION: |
2140 | /* Walk the sub-statements. */ |
2141 | *handled_ops_p = false; |
2142 | break; |
2143 | |
2144 | /* Find a sequence of form: |
2145 | |
2146 | GIMPLE_LABEL |
2147 | [...] |
2148 | <may fallthru stmt> |
2149 | GIMPLE_LABEL |
2150 | |
2151 | and possibly warn. */ |
2152 | case GIMPLE_LABEL: |
2153 | { |
2154 | /* Found a label. Skip all immediately following labels. */ |
2155 | while (!gsi_end_p (*gsi_p) |
2156 | && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL) |
2157 | gsi_next_nondebug (gsi_p); |
2158 | |
2159 | /* There might be no more statements. */ |
2160 | if (gsi_end_p (*gsi_p)) |
2161 | return integer_zero_node; |
2162 | |
2163 | /* Vector of labels that fall through. */ |
2164 | auto_vec <struct label_entry> labels; |
2165 | gimple *prev = collect_fallthrough_labels (gsi_p, &labels); |
2166 | |
2167 | /* There might be no more statements. */ |
2168 | if (gsi_end_p (*gsi_p)) |
2169 | return integer_zero_node; |
2170 | |
2171 | gimple *next = gsi_stmt (*gsi_p); |
2172 | tree label; |
2173 | /* If what follows is a label, then we may have a fallthrough. */ |
2174 | if (gimple_code (next) == GIMPLE_LABEL |
2175 | && gimple_has_location (next) |
2176 | && (label = gimple_label_label (as_a <glabel *> (next))) |
2177 | && prev != NULL) |
2178 | { |
2179 | struct label_entry *l; |
2180 | bool warned_p = false; |
2181 | if (!should_warn_for_implicit_fallthrough (gsi_p, label)) |
2182 | /* Quiet. */; |
2183 | else if (gimple_code (prev) == GIMPLE_LABEL |
2184 | && (label = gimple_label_label (as_a <glabel *> (prev))) |
2185 | && (l = find_label_entry (&labels, label))) |
2186 | warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_, |
2187 | "this statement may fall through" ); |
2188 | else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH) |
2189 | /* Try to be clever and don't warn when the statement |
2190 | can't actually fall through. */ |
2191 | && gimple_stmt_may_fallthru (prev) |
2192 | && gimple_has_location (prev)) |
2193 | warned_p = warning_at (gimple_location (prev), |
2194 | OPT_Wimplicit_fallthrough_, |
2195 | "this statement may fall through" ); |
2196 | if (warned_p) |
2197 | inform (gimple_location (next), "here" ); |
2198 | |
2199 | /* Mark this label as processed so as to prevent multiple |
2200 | warnings in nested switches. */ |
2201 | FALLTHROUGH_LABEL_P (label) = true; |
2202 | |
2203 | /* So that next warn_implicit_fallthrough_r will start looking for |
2204 | a new sequence starting with this label. */ |
2205 | gsi_prev (gsi_p); |
2206 | } |
2207 | } |
2208 | break; |
2209 | default: |
2210 | break; |
2211 | } |
2212 | return NULL_TREE; |
2213 | } |
2214 | |
2215 | /* Warn when a switch case falls through. */ |
2216 | |
2217 | static void |
2218 | maybe_warn_implicit_fallthrough (gimple_seq seq) |
2219 | { |
2220 | if (!warn_implicit_fallthrough) |
2221 | return; |
2222 | |
2223 | /* This warning is meant for C/C++/ObjC/ObjC++ only. */ |
2224 | if (!(lang_GNU_C () |
2225 | || lang_GNU_CXX () |
2226 | || lang_GNU_OBJC ())) |
2227 | return; |
2228 | |
2229 | struct walk_stmt_info wi; |
2230 | memset (&wi, 0, sizeof (wi)); |
2231 | walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi); |
2232 | } |
2233 | |
2234 | /* Callback for walk_gimple_seq. */ |
2235 | |
2236 | static tree |
2237 | expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, |
2238 | struct walk_stmt_info *) |
2239 | { |
2240 | gimple *stmt = gsi_stmt (*gsi_p); |
2241 | |
2242 | *handled_ops_p = true; |
2243 | switch (gimple_code (stmt)) |
2244 | { |
2245 | case GIMPLE_TRY: |
2246 | case GIMPLE_BIND: |
2247 | case GIMPLE_CATCH: |
2248 | case GIMPLE_EH_FILTER: |
2249 | case GIMPLE_TRANSACTION: |
2250 | /* Walk the sub-statements. */ |
2251 | *handled_ops_p = false; |
2252 | break; |
2253 | case GIMPLE_CALL: |
2254 | if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH)) |
2255 | { |
2256 | gsi_remove (gsi_p, true); |
2257 | if (gsi_end_p (*gsi_p)) |
2258 | return integer_zero_node; |
2259 | |
2260 | bool found = false; |
2261 | location_t loc = gimple_location (stmt); |
2262 | |
2263 | gimple_stmt_iterator gsi2 = *gsi_p; |
2264 | stmt = gsi_stmt (gsi2); |
2265 | if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt)) |
2266 | { |
2267 | /* Go on until the artificial label. */ |
2268 | tree goto_dest = gimple_goto_dest (stmt); |
2269 | for (; !gsi_end_p (gsi2); gsi_next (&gsi2)) |
2270 | { |
2271 | if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL |
2272 | && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2))) |
2273 | == goto_dest) |
2274 | break; |
2275 | } |
2276 | |
2277 | /* Not found? Stop. */ |
2278 | if (gsi_end_p (gsi2)) |
2279 | break; |
2280 | |
2281 | /* Look one past it. */ |
2282 | gsi_next (&gsi2); |
2283 | } |
2284 | |
2285 | /* We're looking for a case label or default label here. */ |
2286 | while (!gsi_end_p (gsi2)) |
2287 | { |
2288 | stmt = gsi_stmt (gsi2); |
2289 | if (gimple_code (stmt) == GIMPLE_LABEL) |
2290 | { |
2291 | tree label = gimple_label_label (as_a <glabel *> (stmt)); |
2292 | if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label)) |
2293 | { |
2294 | found = true; |
2295 | break; |
2296 | } |
2297 | } |
2298 | else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) |
2299 | ; |
2300 | else if (!is_gimple_debug (stmt)) |
2301 | /* Anything else is not expected. */ |
2302 | break; |
2303 | gsi_next (&gsi2); |
2304 | } |
2305 | if (!found) |
2306 | warning_at (loc, 0, "attribute %<fallthrough%> not preceding " |
2307 | "a case label or default label" ); |
2308 | } |
2309 | break; |
2310 | default: |
2311 | break; |
2312 | } |
2313 | return NULL_TREE; |
2314 | } |
2315 | |
2316 | /* Expand all FALLTHROUGH () calls in SEQ. */ |
2317 | |
2318 | static void |
2319 | expand_FALLTHROUGH (gimple_seq *seq_p) |
2320 | { |
2321 | struct walk_stmt_info wi; |
2322 | memset (&wi, 0, sizeof (wi)); |
2323 | walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi); |
2324 | } |
2325 | |
2326 | |
2327 | /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can |
2328 | branch to. */ |
2329 | |
2330 | static enum gimplify_status |
2331 | gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p) |
2332 | { |
2333 | tree switch_expr = *expr_p; |
2334 | gimple_seq switch_body_seq = NULL; |
2335 | enum gimplify_status ret; |
2336 | tree index_type = TREE_TYPE (switch_expr); |
2337 | if (index_type == NULL_TREE) |
2338 | index_type = TREE_TYPE (SWITCH_COND (switch_expr)); |
2339 | |
2340 | ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val, |
2341 | fb_rvalue); |
2342 | if (ret == GS_ERROR || ret == GS_UNHANDLED) |
2343 | return ret; |
2344 | |
2345 | if (SWITCH_BODY (switch_expr)) |
2346 | { |
2347 | vec<tree> labels; |
2348 | vec<tree> saved_labels; |
2349 | hash_set<tree> *saved_live_switch_vars = NULL; |
2350 | tree default_case = NULL_TREE; |
2351 | gswitch *switch_stmt; |
2352 | |
2353 | /* Save old labels, get new ones from body, then restore the old |
2354 | labels. Save all the things from the switch body to append after. */ |
2355 | saved_labels = gimplify_ctxp->case_labels; |
2356 | gimplify_ctxp->case_labels.create (8); |
2357 | |
2358 | /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */ |
2359 | saved_live_switch_vars = gimplify_ctxp->live_switch_vars; |
2360 | tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr)); |
2361 | if (body_type == BIND_EXPR || body_type == STATEMENT_LIST) |
2362 | gimplify_ctxp->live_switch_vars = new hash_set<tree> (4); |
2363 | else |
2364 | gimplify_ctxp->live_switch_vars = NULL; |
2365 | |
2366 | bool old_in_switch_expr = gimplify_ctxp->in_switch_expr; |
2367 | gimplify_ctxp->in_switch_expr = true; |
2368 | |
2369 | gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq); |
2370 | |
2371 | gimplify_ctxp->in_switch_expr = old_in_switch_expr; |
2372 | maybe_warn_switch_unreachable (switch_body_seq); |
2373 | maybe_warn_implicit_fallthrough (switch_body_seq); |
2374 | /* Only do this for the outermost GIMPLE_SWITCH. */ |
2375 | if (!gimplify_ctxp->in_switch_expr) |
2376 | expand_FALLTHROUGH (&switch_body_seq); |
2377 | |
2378 | labels = gimplify_ctxp->case_labels; |
2379 | gimplify_ctxp->case_labels = saved_labels; |
2380 | |
2381 | if (gimplify_ctxp->live_switch_vars) |
2382 | { |
2383 | gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0); |
2384 | delete gimplify_ctxp->live_switch_vars; |
2385 | } |
2386 | gimplify_ctxp->live_switch_vars = saved_live_switch_vars; |
2387 | |
2388 | preprocess_case_label_vec_for_gimple (labels, index_type, |
2389 | &default_case); |
2390 | |
2391 | bool add_bind = false; |
2392 | if (!default_case) |
2393 | { |
2394 | glabel *new_default; |
2395 | |
2396 | default_case |
2397 | = build_case_label (NULL_TREE, NULL_TREE, |
2398 | create_artificial_label (UNKNOWN_LOCATION)); |
2399 | if (old_in_switch_expr) |
2400 | { |
2401 | SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1; |
2402 | add_bind = true; |
2403 | } |
2404 | new_default = gimple_build_label (CASE_LABEL (default_case)); |
2405 | gimplify_seq_add_stmt (&switch_body_seq, new_default); |
2406 | } |
2407 | else if (old_in_switch_expr) |
2408 | { |
2409 | gimple *last = gimple_seq_last_stmt (switch_body_seq); |
2410 | if (last && gimple_code (last) == GIMPLE_LABEL) |
2411 | { |
2412 | tree label = gimple_label_label (as_a <glabel *> (last)); |
2413 | if (SWITCH_BREAK_LABEL_P (label)) |
2414 | add_bind = true; |
2415 | } |
2416 | } |
2417 | |
2418 | switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr), |
2419 | default_case, labels); |
2420 | /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq |
2421 | ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL, |
2422 | wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND, |
2423 | so that we can easily find the start and end of the switch |
2424 | statement. */ |
2425 | if (add_bind) |
2426 | { |
2427 | gimple_seq bind_body = NULL; |
2428 | gimplify_seq_add_stmt (&bind_body, switch_stmt); |
2429 | gimple_seq_add_seq (&bind_body, switch_body_seq); |
2430 | gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE); |
2431 | gimple_set_location (bind, EXPR_LOCATION (switch_expr)); |
2432 | gimplify_seq_add_stmt (pre_p, bind); |
2433 | } |
2434 | else |
2435 | { |
2436 | gimplify_seq_add_stmt (pre_p, switch_stmt); |
2437 | gimplify_seq_add_seq (pre_p, switch_body_seq); |
2438 | } |
2439 | labels.release (); |
2440 | } |
2441 | else |
2442 | gcc_unreachable (); |
2443 | |
2444 | return GS_ALL_DONE; |
2445 | } |
2446 | |
2447 | /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */ |
2448 | |
2449 | static enum gimplify_status |
2450 | gimplify_label_expr (tree *expr_p, gimple_seq *pre_p) |
2451 | { |
2452 | gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p)) |
2453 | == current_function_decl); |
2454 | |
2455 | tree label = LABEL_EXPR_LABEL (*expr_p); |
2456 | glabel *label_stmt = gimple_build_label (label); |
2457 | gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p)); |
2458 | gimplify_seq_add_stmt (pre_p, label_stmt); |
2459 | |
2460 | if (lookup_attribute ("cold" , DECL_ATTRIBUTES (label))) |
2461 | gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL, |
2462 | NOT_TAKEN)); |
2463 | else if (lookup_attribute ("hot" , DECL_ATTRIBUTES (label))) |
2464 | gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL, |
2465 | TAKEN)); |
2466 | |
2467 | return GS_ALL_DONE; |
2468 | } |
2469 | |
2470 | /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */ |
2471 | |
2472 | static enum gimplify_status |
2473 | gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p) |
2474 | { |
2475 | struct gimplify_ctx *ctxp; |
2476 | glabel *label_stmt; |
2477 | |
2478 | /* Invalid programs can play Duff's Device type games with, for example, |
2479 | #pragma omp parallel. At least in the C front end, we don't |
2480 | detect such invalid branches until after gimplification, in the |
2481 | diagnose_omp_blocks pass. */ |
2482 | for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) |
2483 | if (ctxp->case_labels.exists ()) |
2484 | break; |
2485 | |
2486 | label_stmt = gimple_build_label (CASE_LABEL (*expr_p)); |
2487 | gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p)); |
2488 | ctxp->case_labels.safe_push (*expr_p); |
2489 | gimplify_seq_add_stmt (pre_p, label_stmt); |
2490 | |
2491 | return GS_ALL_DONE; |
2492 | } |
2493 | |
2494 | /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first |
2495 | if necessary. */ |
2496 | |
2497 | tree |
2498 | build_and_jump (tree *label_p) |
2499 | { |
2500 | if (label_p == NULL) |
2501 | /* If there's nowhere to jump, just fall through. */ |
2502 | return NULL_TREE; |
2503 | |
2504 | if (*label_p == NULL_TREE) |
2505 | { |
2506 | tree label = create_artificial_label (UNKNOWN_LOCATION); |
2507 | *label_p = label; |
2508 | } |
2509 | |
2510 | return build1 (GOTO_EXPR, void_type_node, *label_p); |
2511 | } |
2512 | |
2513 | /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR. |
2514 | This also involves building a label to jump to and communicating it to |
2515 | gimplify_loop_expr through gimplify_ctxp->exit_label. */ |
2516 | |
2517 | static enum gimplify_status |
2518 | gimplify_exit_expr (tree *expr_p) |
2519 | { |
2520 | tree cond = TREE_OPERAND (*expr_p, 0); |
2521 | tree expr; |
2522 | |
2523 | expr = build_and_jump (&gimplify_ctxp->exit_label); |
2524 | expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); |
2525 | *expr_p = expr; |
2526 | |
2527 | return GS_OK; |
2528 | } |
2529 | |
2530 | /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is |
2531 | different from its canonical type, wrap the whole thing inside a |
2532 | NOP_EXPR and force the type of the COMPONENT_REF to be the canonical |
2533 | type. |
2534 | |
2535 | The canonical type of a COMPONENT_REF is the type of the field being |
2536 | referenced--unless the field is a bit-field which can be read directly |
2537 | in a smaller mode, in which case the canonical type is the |
2538 | sign-appropriate type corresponding to that mode. */ |
2539 | |
2540 | static void |
2541 | canonicalize_component_ref (tree *expr_p) |
2542 | { |
2543 | tree expr = *expr_p; |
2544 | tree type; |
2545 | |
2546 | gcc_assert (TREE_CODE (expr) == COMPONENT_REF); |
2547 | |
2548 | if (INTEGRAL_TYPE_P (TREE_TYPE (expr))) |
2549 | type = TREE_TYPE (get_unwidened (expr, NULL_TREE)); |
2550 | else |
2551 | type = TREE_TYPE (TREE_OPERAND (expr, 1)); |
2552 | |
2553 | /* One could argue that all the stuff below is not necessary for |
2554 | the non-bitfield case and declare it a FE error if type |
2555 | adjustment would be needed. */ |
2556 | if (TREE_TYPE (expr) != type) |
2557 | { |
2558 | #ifdef ENABLE_TYPES_CHECKING |
2559 | tree old_type = TREE_TYPE (expr); |
2560 | #endif |
2561 | int type_quals; |
2562 | |
2563 | /* We need to preserve qualifiers and propagate them from |
2564 | operand 0. */ |
2565 | type_quals = TYPE_QUALS (type) |
2566 | | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0))); |
2567 | if (TYPE_QUALS (type) != type_quals) |
2568 | type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals); |
2569 | |
2570 | /* Set the type of the COMPONENT_REF to the underlying type. */ |
2571 | TREE_TYPE (expr) = type; |
2572 | |
2573 | #ifdef ENABLE_TYPES_CHECKING |
2574 | /* It is now a FE error, if the conversion from the canonical |
2575 | type to the original expression type is not useless. */ |
2576 | gcc_assert (useless_type_conversion_p (old_type, type)); |
2577 | #endif |
2578 | } |
2579 | } |
2580 | |
2581 | /* If a NOP conversion is changing a pointer to array of foo to a pointer |
2582 | to foo, embed that change in the ADDR_EXPR by converting |
2583 | T array[U]; |
2584 | (T *)&array |
2585 | ==> |
2586 | &array[L] |
2587 | where L is the lower bound. For simplicity, only do this for constant |
2588 | lower bound. |
2589 | The constraint is that the type of &array[L] is trivially convertible |
2590 | to T *. */ |
2591 | |
2592 | static void |
2593 | canonicalize_addr_expr (tree *expr_p) |
2594 | { |
2595 | tree expr = *expr_p; |
2596 | tree addr_expr = TREE_OPERAND (expr, 0); |
2597 | tree datype, ddatype, pddatype; |
2598 | |
2599 | /* We simplify only conversions from an ADDR_EXPR to a pointer type. */ |
2600 | if (!POINTER_TYPE_P (TREE_TYPE (expr)) |
2601 | || TREE_CODE (addr_expr) != ADDR_EXPR) |
2602 | return; |
2603 | |
2604 | /* The addr_expr type should be a pointer to an array. */ |
2605 | datype = TREE_TYPE (TREE_TYPE (addr_expr)); |
2606 | if (TREE_CODE (datype) != ARRAY_TYPE) |
2607 | return; |
2608 | |
2609 | /* The pointer to element type shall be trivially convertible to |
2610 | the expression pointer type. */ |
2611 | ddatype = TREE_TYPE (datype); |
2612 | pddatype = build_pointer_type (ddatype); |
2613 | if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)), |
2614 | pddatype)) |
2615 | return; |
2616 | |
2617 | /* The lower bound and element sizes must be constant. */ |
2618 | if (!TYPE_SIZE_UNIT (ddatype) |
2619 | || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST |
2620 | || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype)) |
2621 | || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST) |
2622 | return; |
2623 | |
2624 | /* All checks succeeded. Build a new node to merge the cast. */ |
2625 | *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0), |
2626 | TYPE_MIN_VALUE (TYPE_DOMAIN (datype)), |
2627 | NULL_TREE, NULL_TREE); |
2628 | *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p); |
2629 | |
2630 | /* We can have stripped a required restrict qualifier above. */ |
2631 | if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) |
2632 | *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); |
2633 | } |
2634 | |
2635 | /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions |
2636 | underneath as appropriate. */ |
2637 | |
2638 | static enum gimplify_status |
2639 | gimplify_conversion (tree *expr_p) |
2640 | { |
2641 | location_t loc = EXPR_LOCATION (*expr_p); |
2642 | gcc_assert (CONVERT_EXPR_P (*expr_p)); |
2643 | |
2644 | /* Then strip away all but the outermost conversion. */ |
2645 | STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0)); |
2646 | |
2647 | /* And remove the outermost conversion if it's useless. */ |
2648 | if (tree_ssa_useless_type_conversion (*expr_p)) |
2649 | *expr_p = TREE_OPERAND (*expr_p, 0); |
2650 | |
2651 | /* If we still have a conversion at the toplevel, |
2652 | then canonicalize some constructs. */ |
2653 | if (CONVERT_EXPR_P (*expr_p)) |
2654 | { |
2655 | tree sub = TREE_OPERAND (*expr_p, 0); |
2656 | |
2657 | /* If a NOP conversion is changing the type of a COMPONENT_REF |
2658 | expression, then canonicalize its type now in order to expose more |
2659 | redundant conversions. */ |
2660 | if (TREE_CODE (sub) == COMPONENT_REF) |
2661 | canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0)); |
2662 | |
2663 | /* If a NOP conversion is changing a pointer to array of foo |
2664 | to a pointer to foo, embed that change in the ADDR_EXPR. */ |
2665 | else if (TREE_CODE (sub) == ADDR_EXPR) |
2666 | canonicalize_addr_expr (expr_p); |
2667 | } |
2668 | |
2669 | /* If we have a conversion to a non-register type force the |
2670 | use of a VIEW_CONVERT_EXPR instead. */ |
2671 | if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p))) |
2672 | *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p), |
2673 | TREE_OPERAND (*expr_p, 0)); |
2674 | |
2675 | /* Canonicalize CONVERT_EXPR to NOP_EXPR. */ |
2676 | if (TREE_CODE (*expr_p) == CONVERT_EXPR) |
2677 | TREE_SET_CODE (*expr_p, NOP_EXPR); |
2678 | |
2679 | return GS_OK; |
2680 | } |
2681 | |
2682 | /* Nonlocal VLAs seen in the current function. */ |
2683 | static hash_set<tree> *nonlocal_vlas; |
2684 | |
2685 | /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */ |
2686 | static tree nonlocal_vla_vars; |
2687 | |
2688 | /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a |
2689 | DECL_VALUE_EXPR, and it's worth re-examining things. */ |
2690 | |
2691 | static enum gimplify_status |
2692 | gimplify_var_or_parm_decl (tree *expr_p) |
2693 | { |
2694 | tree decl = *expr_p; |
2695 | |
2696 | /* ??? If this is a local variable, and it has not been seen in any |
2697 | outer BIND_EXPR, then it's probably the result of a duplicate |
2698 | declaration, for which we've already issued an error. It would |
2699 | be really nice if the front end wouldn't leak these at all. |
2700 | Currently the only known culprit is C++ destructors, as seen |
2701 | in g++.old-deja/g++.jason/binding.C. */ |
2702 | if (VAR_P (decl) |
2703 | && !DECL_SEEN_IN_BIND_EXPR_P (decl) |
2704 | && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) |
2705 | && decl_function_context (decl) == current_function_decl) |
2706 | { |
2707 | gcc_assert (seen_error ()); |
2708 | return GS_ERROR; |
2709 | } |
2710 | |
2711 | /* When within an OMP context, notice uses of variables. */ |
2712 | if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) |
2713 | return GS_ALL_DONE; |
2714 | |
2715 | /* If the decl is an alias for another expression, substitute it now. */ |
2716 | if (DECL_HAS_VALUE_EXPR_P (decl)) |
2717 | { |
2718 | tree value_expr = DECL_VALUE_EXPR (decl); |
2719 | |
2720 | /* For referenced nonlocal VLAs add a decl for debugging purposes |
2721 | to the current function. */ |
2722 | if (VAR_P (decl) |
2723 | && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST |
2724 | && nonlocal_vlas != NULL |
2725 | && TREE_CODE (value_expr) == INDIRECT_REF |
2726 | && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL |
2727 | && decl_function_context (decl) != current_function_decl) |
2728 | { |
2729 | struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; |
2730 | while (ctx |
2731 | && (ctx->region_type == ORT_WORKSHARE |
2732 | || ctx->region_type == ORT_SIMD |
2733 | || ctx->region_type == ORT_ACC)) |
2734 | ctx = ctx->outer_context; |
2735 | if (!ctx && !nonlocal_vlas->add (decl)) |
2736 | { |
2737 | tree copy = copy_node (decl); |
2738 | |
2739 | lang_hooks.dup_lang_specific_decl (copy); |
2740 | SET_DECL_RTL (copy, 0); |
2741 | TREE_USED (copy) = 1; |
2742 | DECL_CHAIN (copy) = nonlocal_vla_vars; |
2743 | nonlocal_vla_vars = copy; |
2744 | SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr)); |
2745 | DECL_HAS_VALUE_EXPR_P (copy) = 1; |
2746 | } |
2747 | } |
2748 | |
2749 | *expr_p = unshare_expr (value_expr); |
2750 | return GS_OK; |
2751 | } |
2752 | |
2753 | return GS_ALL_DONE; |
2754 | } |
2755 | |
2756 | /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */ |
2757 | |
2758 | static void |
2759 | recalculate_side_effects (tree t) |
2760 | { |
2761 | enum tree_code code = TREE_CODE (t); |
2762 | int len = TREE_OPERAND_LENGTH (t); |
2763 | int i; |
2764 | |
2765 | switch (TREE_CODE_CLASS (code)) |
2766 | { |
2767 | case tcc_expression: |
2768 | switch (code) |
2769 | { |
2770 | case INIT_EXPR: |
2771 | case MODIFY_EXPR: |
2772 | case VA_ARG_EXPR: |
2773 | case PREDECREMENT_EXPR: |
2774 | case PREINCREMENT_EXPR: |
2775 | case POSTDECREMENT_EXPR: |
2776 | case POSTINCREMENT_EXPR: |
2777 | /* All of these have side-effects, no matter what their |
2778 | operands are. */ |
2779 | return; |
2780 | |
2781 | default: |
2782 | break; |
2783 | } |
2784 | /* Fall through. */ |
2785 | |
2786 | case tcc_comparison: /* a comparison expression */ |
2787 | case tcc_unary: /* a unary arithmetic expression */ |
2788 | case tcc_binary: /* a binary arithmetic expression */ |
2789 | case tcc_reference: /* a reference */ |
2790 | case tcc_vl_exp: /* a function call */ |
2791 | TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t); |
2792 | for (i = 0; i < len; ++i) |
2793 | { |
2794 | tree op = TREE_OPERAND (t, i); |
2795 | if (op && TREE_SIDE_EFFECTS (op)) |
2796 | TREE_SIDE_EFFECTS (t) = 1; |
2797 | } |
2798 | break; |
2799 | |
2800 | case tcc_constant: |
2801 | /* No side-effects. */ |
2802 | return; |
2803 | |
2804 | default: |
2805 | gcc_unreachable (); |
2806 | } |
2807 | } |
2808 | |
2809 | /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR |
2810 | node *EXPR_P. |
2811 | |
2812 | compound_lval |
2813 | : min_lval '[' val ']' |
2814 | | min_lval '.' ID |
2815 | | compound_lval '[' val ']' |
2816 | | compound_lval '.' ID |
2817 | |
2818 | This is not part of the original SIMPLE definition, which separates |
2819 | array and member references, but it seems reasonable to handle them |
2820 | together. Also, this way we don't run into problems with union |
2821 | aliasing; gcc requires that for accesses through a union to alias, the |
2822 | union reference must be explicit, which was not always the case when we |
2823 | were splitting up array and member refs. |
2824 | |
2825 | PRE_P points to the sequence where side effects that must happen before |
2826 | *EXPR_P should be stored. |
2827 | |
2828 | POST_P points to the sequence where side effects that must happen after |
2829 | *EXPR_P should be stored. */ |
2830 | |
2831 | static enum gimplify_status |
2832 | gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
2833 | fallback_t fallback) |
2834 | { |
2835 | tree *p; |
2836 | enum gimplify_status ret = GS_ALL_DONE, tret; |
2837 | int i; |
2838 | location_t loc = EXPR_LOCATION (*expr_p); |
2839 | tree expr = *expr_p; |
2840 | |
2841 | /* Create a stack of the subexpressions so later we can walk them in |
2842 | order from inner to outer. */ |
2843 | auto_vec<tree, 10> expr_stack; |
2844 | |
2845 | /* We can handle anything that get_inner_reference can deal with. */ |
2846 | for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) |
2847 | { |
2848 | restart: |
2849 | /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ |
2850 | if (TREE_CODE (*p) == INDIRECT_REF) |
2851 | *p = fold_indirect_ref_loc (loc, *p); |
2852 | |
2853 | if (handled_component_p (*p)) |
2854 | ; |
2855 | /* Expand DECL_VALUE_EXPR now. In some cases that may expose |
2856 | additional COMPONENT_REFs. */ |
2857 | else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL) |
2858 | && gimplify_var_or_parm_decl (p) == GS_OK) |
2859 | goto restart; |
2860 | else |
2861 | break; |
2862 | |
2863 | expr_stack.safe_push (*p); |
2864 | } |
2865 | |
2866 | gcc_assert (expr_stack.length ()); |
2867 | |
2868 | /* Now EXPR_STACK is a stack of pointers to all the refs we've |
2869 | walked through and P points to the innermost expression. |
2870 | |
2871 | Java requires that we elaborated nodes in source order. That |
2872 | means we must gimplify the inner expression followed by each of |
2873 | the indices, in order. But we can't gimplify the inner |
2874 | expression until we deal with any variable bounds, sizes, or |
2875 | positions in order to deal with PLACEHOLDER_EXPRs. |
2876 | |
2877 | So we do this in three steps. First we deal with the annotations |
2878 | for any variables in the components, then we gimplify the base, |
2879 | then we gimplify any indices, from left to right. */ |
2880 | for (i = expr_stack.length () - 1; i >= 0; i--) |
2881 | { |
2882 | tree t = expr_stack[i]; |
2883 | |
2884 | if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
2885 | { |
2886 | /* Gimplify the low bound and element type size and put them into |
2887 | the ARRAY_REF. If these values are set, they have already been |
2888 | gimplified. */ |
2889 | if (TREE_OPERAND (t, 2) == NULL_TREE) |
2890 | { |
2891 | tree low = unshare_expr (array_ref_low_bound (t)); |
2892 | if (!is_gimple_min_invariant (low)) |
2893 | { |
2894 | TREE_OPERAND (t, 2) = low; |
2895 | tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, |
2896 | post_p, is_gimple_reg, |
2897 | fb_rvalue); |
2898 | ret = MIN (ret, tret); |
2899 | } |
2900 | } |
2901 | else |
2902 | { |
2903 | tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, |
2904 | is_gimple_reg, fb_rvalue); |
2905 | ret = MIN (ret, tret); |
2906 | } |
2907 | |
2908 | if (TREE_OPERAND (t, 3) == NULL_TREE) |
2909 | { |
2910 | tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0))); |
2911 | tree elmt_size = unshare_expr (array_ref_element_size (t)); |
2912 | tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type)); |
2913 | |
2914 | /* Divide the element size by the alignment of the element |
2915 | type (above). */ |
2916 | elmt_size |
2917 | = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor); |
2918 | |
2919 | if (!is_gimple_min_invariant (elmt_size)) |
2920 | { |
2921 | TREE_OPERAND (t, 3) = elmt_size; |
2922 | tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, |
2923 | post_p, is_gimple_reg, |
2924 | fb_rvalue); |
2925 | ret = MIN (ret, tret); |
2926 | } |
2927 | } |
2928 | else |
2929 | { |
2930 | tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p, |
2931 | is_gimple_reg, fb_rvalue); |
2932 | ret = MIN (ret, tret); |
2933 | } |
2934 | } |
2935 | else if (TREE_CODE (t) == COMPONENT_REF) |
2936 | { |
2937 | /* Set the field offset into T and gimplify it. */ |
2938 | if (TREE_OPERAND (t, 2) == NULL_TREE) |
2939 | { |
2940 | tree offset = unshare_expr (component_ref_field_offset (t)); |
2941 | tree field = TREE_OPERAND (t, 1); |
2942 | tree factor |
2943 | = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT); |
2944 | |
2945 | /* Divide the offset by its alignment. */ |
2946 | offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor); |
2947 | |
2948 | if (!is_gimple_min_invariant (offset)) |
2949 | { |
2950 | TREE_OPERAND (t, 2) = offset; |
2951 | tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, |
2952 | post_p, is_gimple_reg, |
2953 | fb_rvalue); |
2954 | ret = MIN (ret, tret); |
2955 | } |
2956 | } |
2957 | else |
2958 | { |
2959 | tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, |
2960 | is_gimple_reg, fb_rvalue); |
2961 | ret = MIN (ret, tret); |
2962 | } |
2963 | } |
2964 | } |
2965 | |
2966 | /* Step 2 is to gimplify the base expression. Make sure lvalue is set |
2967 | so as to match the min_lval predicate. Failure to do so may result |
2968 | in the creation of large aggregate temporaries. */ |
2969 | tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, |
2970 | fallback | fb_lvalue); |
2971 | ret = MIN (ret, tret); |
2972 | |
2973 | /* And finally, the indices and operands of ARRAY_REF. During this |
2974 | loop we also remove any useless conversions. */ |
2975 | for (; expr_stack.length () > 0; ) |
2976 | { |
2977 | tree t = expr_stack.pop (); |
2978 | |
2979 | if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
2980 | { |
2981 | /* Gimplify the dimension. */ |
2982 | if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))) |
2983 | { |
2984 | tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, |
2985 | is_gimple_val, fb_rvalue); |
2986 | ret = MIN (ret, tret); |
2987 | } |
2988 | } |
2989 | |
2990 | STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0)); |
2991 | |
2992 | /* The innermost expression P may have originally had |
2993 | TREE_SIDE_EFFECTS set which would have caused all the outer |
2994 | expressions in *EXPR_P leading to P to also have had |
2995 | TREE_SIDE_EFFECTS set. */ |
2996 | recalculate_side_effects (t); |
2997 | } |
2998 | |
2999 | /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ |
3000 | if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) |
3001 | { |
3002 | canonicalize_component_ref (expr_p); |
3003 | } |
3004 | |
3005 | expr_stack.release (); |
3006 | |
3007 | gcc_assert (*expr_p == expr || ret != GS_ALL_DONE); |
3008 | |
3009 | return ret; |
3010 | } |
3011 | |
3012 | /* Gimplify the self modifying expression pointed to by EXPR_P |
3013 | (++, --, +=, -=). |
3014 | |
3015 | PRE_P points to the list where side effects that must happen before |
3016 | *EXPR_P should be stored. |
3017 | |
3018 | POST_P points to the list where side effects that must happen after |
3019 | *EXPR_P should be stored. |
3020 | |
3021 | WANT_VALUE is nonzero iff we want to use the value of this expression |
3022 | in another expression. |
3023 | |
3024 | ARITH_TYPE is the type the computation should be performed in. */ |
3025 | |
3026 | enum gimplify_status |
3027 | gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
3028 | bool want_value, tree arith_type) |
3029 | { |
3030 | enum tree_code code; |
3031 | tree lhs, lvalue, rhs, t1; |
3032 | gimple_seq post = NULL, *orig_post_p = post_p; |
3033 | bool postfix; |
3034 | enum tree_code arith_code; |
3035 | enum gimplify_status ret; |
3036 | location_t loc = EXPR_LOCATION (*expr_p); |
3037 | |
3038 | code = TREE_CODE (*expr_p); |
3039 | |
3040 | gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR |
3041 | || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR); |
3042 | |
3043 | /* Prefix or postfix? */ |
3044 | if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR) |
3045 | /* Faster to treat as prefix if result is not used. */ |
3046 | postfix = want_value; |
3047 | else |
3048 | postfix = false; |
3049 | |
3050 | /* For postfix, make sure the inner expression's post side effects |
3051 | are executed after side effects from this expression. */ |
3052 | if (postfix) |
3053 | post_p = &post; |
3054 | |
3055 | /* Add or subtract? */ |
3056 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
3057 | arith_code = PLUS_EXPR; |
3058 | else |
3059 | arith_code = MINUS_EXPR; |
3060 | |
3061 | /* Gimplify the LHS into a GIMPLE lvalue. */ |
3062 | lvalue = TREE_OPERAND (*expr_p, 0); |
3063 | ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue); |
3064 | if (ret == GS_ERROR) |
3065 | return ret; |
3066 | |
3067 | /* Extract the operands to the arithmetic operation. */ |
3068 | lhs = lvalue; |
3069 | rhs = TREE_OPERAND (*expr_p, 1); |
3070 | |
3071 | /* For postfix operator, we evaluate the LHS to an rvalue and then use |
3072 | that as the result value and in the postqueue operation. */ |
3073 | if (postfix) |
3074 | { |
3075 | ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue); |
3076 | if (ret == GS_ERROR) |
3077 | return ret; |
3078 | |
3079 | lhs = get_initialized_tmp_var (lhs, pre_p, NULL); |
3080 | } |
3081 | |
3082 | /* For POINTERs increment, use POINTER_PLUS_EXPR. */ |
3083 | if (POINTER_TYPE_P (TREE_TYPE (lhs))) |
3084 | { |
3085 | rhs = convert_to_ptrofftype_loc (loc, rhs); |
3086 | if (arith_code == MINUS_EXPR) |
3087 | rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs); |
3088 | t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs); |
3089 | } |
3090 | else |
3091 | t1 = fold_convert (TREE_TYPE (*expr_p), |
3092 | fold_build2 (arith_code, arith_type, |
3093 | fold_convert (arith_type, lhs), |
3094 | fold_convert (arith_type, rhs))); |
3095 | |
3096 | if (postfix) |
3097 | { |
3098 | gimplify_assign (lvalue, t1, pre_p); |
3099 | gimplify_seq_add_seq (orig_post_p, post); |
3100 | *expr_p = lhs; |
3101 | return GS_ALL_DONE; |
3102 | } |
3103 | else |
3104 | { |
3105 | *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); |
3106 | return GS_OK; |
3107 | } |
3108 | } |
3109 | |
3110 | /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */ |
3111 | |
3112 | static void |
3113 | maybe_with_size_expr (tree *expr_p) |
3114 | { |
3115 | tree expr = *expr_p; |
3116 | tree type = TREE_TYPE (expr); |
3117 | tree size; |
3118 | |
3119 | /* If we've already wrapped this or the type is error_mark_node, we can't do |
3120 | anything. */ |
3121 | if (TREE_CODE (expr) == WITH_SIZE_EXPR |
3122 | || type == error_mark_node) |
3123 | return; |
3124 | |
3125 | /* If the size isn't known or is a constant, we have nothing to do. */ |
3126 | size = TYPE_SIZE_UNIT (type); |
3127 | if (!size || TREE_CODE (size) == INTEGER_CST) |
3128 | return; |
3129 | |
3130 | /* Otherwise, make a WITH_SIZE_EXPR. */ |
3131 | size = unshare_expr (size); |
3132 | size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr); |
3133 | *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size); |
3134 | } |
3135 | |
3136 | /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P |
3137 | Store any side-effects in PRE_P. CALL_LOCATION is the location of |
3138 | the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be |
3139 | gimplified to an SSA name. */ |
3140 | |
3141 | enum gimplify_status |
3142 | gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location, |
3143 | bool allow_ssa) |
3144 | { |
3145 | bool (*test) (tree); |
3146 | fallback_t fb; |
3147 | |
3148 | /* In general, we allow lvalues for function arguments to avoid |
3149 | extra overhead of copying large aggregates out of even larger |
3150 | aggregates into temporaries only to copy the temporaries to |
3151 | the argument list. Make optimizers happy by pulling out to |
3152 | temporaries those types that fit in registers. */ |
3153 | if (is_gimple_reg_type (TREE_TYPE (*arg_p))) |
3154 | test = is_gimple_val, fb = fb_rvalue; |
3155 | else |
3156 | { |
3157 | test = is_gimple_lvalue, fb = fb_either; |
3158 | /* Also strip a TARGET_EXPR that would force an extra copy. */ |
3159 | if (TREE_CODE (*arg_p) == TARGET_EXPR) |
3160 | { |
3161 | tree init = TARGET_EXPR_INITIAL (*arg_p); |
3162 | if (init |
3163 | && !VOID_TYPE_P (TREE_TYPE (init))) |
3164 | *arg_p = init; |
3165 | } |
3166 | } |
3167 | |
3168 | /* If this is a variable sized type, we must remember the size. */ |
3169 | maybe_with_size_expr (arg_p); |
3170 | |
3171 | /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */ |
3172 | /* Make sure arguments have the same location as the function call |
3173 | itself. */ |
3174 | protected_set_expr_location (*arg_p, call_location); |
3175 | |
3176 | /* There is a sequence point before a function call. Side effects in |
3177 | the argument list must occur before the actual call. So, when |
3178 | gimplifying arguments, force gimplify_expr to use an internal |
3179 | post queue which is then appended to the end of PRE_P. */ |
3180 | return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa); |
3181 | } |
3182 | |
3183 | /* Don't fold inside offloading or taskreg regions: it can break code by |
3184 | adding decl references that weren't in the source. We'll do it during |
3185 | omplower pass instead. */ |
3186 | |
3187 | static bool |
3188 | maybe_fold_stmt (gimple_stmt_iterator *gsi) |
3189 | { |
3190 | struct gimplify_omp_ctx *ctx; |
3191 | for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context) |
3192 | if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0) |
3193 | return false; |
3194 | return fold_stmt (gsi); |
3195 | } |
3196 | |
3197 | /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P. |
3198 | WANT_VALUE is true if the result of the call is desired. */ |
3199 | |
3200 | static enum gimplify_status |
3201 | gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) |
3202 | { |
3203 | tree fndecl, parms, p, fnptrtype; |
3204 | enum gimplify_status ret; |
3205 | int i, nargs; |
3206 | gcall *call; |
3207 | bool builtin_va_start_p = false; |
3208 | location_t loc = EXPR_LOCATION (*expr_p); |
3209 | |
3210 | gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR); |
3211 | |
3212 | /* For reliable diagnostics during inlining, it is necessary that |
3213 | every call_expr be annotated with file and line. */ |
3214 | if (! EXPR_HAS_LOCATION (*expr_p)) |
3215 | SET_EXPR_LOCATION (*expr_p, input_location); |
3216 | |
3217 | /* Gimplify internal functions created in the FEs. */ |
3218 | if (CALL_EXPR_FN (*expr_p) == NULL_TREE) |
3219 | { |
3220 | if (want_value) |
3221 | return GS_ALL_DONE; |
3222 | |
3223 | nargs = call_expr_nargs (*expr_p); |
3224 | enum internal_fn ifn = CALL_EXPR_IFN (*expr_p); |
3225 | auto_vec<tree> vargs (nargs); |
3226 | |
3227 | for (i = 0; i < nargs; i++) |
3228 | { |
3229 | gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, |
3230 | EXPR_LOCATION (*expr_p)); |
3231 | vargs.quick_push (CALL_EXPR_ARG (*expr_p, i)); |
3232 | } |
3233 | |
3234 | gcall *call = gimple_build_call_internal_vec (ifn, vargs); |
3235 | gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p)); |
3236 | gimplify_seq_add_stmt (pre_p, call); |
3237 | return GS_ALL_DONE; |
3238 | } |
3239 | |
3240 | /* This may be a call to a builtin function. |
3241 | |
3242 | Builtin function calls may be transformed into different |
3243 | (and more efficient) builtin function calls under certain |
3244 | circumstances. Unfortunately, gimplification can muck things |
3245 | up enough that the builtin expanders are not aware that certain |
3246 | transformations are still valid. |
3247 | |
3248 | So we attempt transformation/gimplification of the call before |
3249 | we gimplify the CALL_EXPR. At this time we do not manage to |
3250 | transform all calls in the same manner as the expanders do, but |
3251 | we do transform most of them. */ |
3252 | fndecl = get_callee_fndecl (*expr_p); |
3253 | if (fndecl |
3254 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) |
3255 | switch (DECL_FUNCTION_CODE (fndecl)) |
3256 | { |
3257 | CASE_BUILT_IN_ALLOCA: |
3258 | /* If the call has been built for a variable-sized object, then we |
3259 | want to restore the stack level when the enclosing BIND_EXPR is |
3260 | exited to reclaim the allocated space; otherwise, we precisely |
3261 | need to do the opposite and preserve the latest stack level. */ |
3262 | if (CALL_ALLOCA_FOR_VAR_P (*expr_p)) |
3263 | gimplify_ctxp->save_stack = true; |
3264 | else |
3265 | gimplify_ctxp->keep_stack = true; |
3266 | break; |
3267 | |
3268 | case BUILT_IN_VA_START: |
3269 | { |
3270 | builtin_va_start_p = TRUE; |
3271 | if (call_expr_nargs (*expr_p) < 2) |
3272 | { |
3273 | error ("too few arguments to function %<va_start%>" ); |
3274 | *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); |
3275 | return GS_OK; |
3276 | } |
3277 | |
3278 | if (fold_builtin_next_arg (*expr_p, true)) |
3279 | { |
3280 | *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); |
3281 | return GS_OK; |
3282 | } |
3283 | break; |
3284 | } |
3285 | |
3286 | default: |
3287 | ; |
3288 | } |
3289 | if (fndecl && DECL_BUILT_IN (fndecl)) |
3290 | { |
3291 | tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); |
3292 | if (new_tree && new_tree != *expr_p) |
3293 | { |
3294 | /* There was a transformation of this call which computes the |
3295 | same value, but in a more efficient way. Return and try |
3296 | again. */ |
3297 | *expr_p = new_tree; |
3298 | return GS_OK; |
3299 | } |
3300 | } |
3301 | |
3302 | /* Remember the original function pointer type. */ |
3303 | fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p)); |
3304 | |
3305 | /* There is a sequence point before the call, so any side effects in |
3306 | the calling expression must occur before the actual call. Force |
3307 | gimplify_expr to use an internal post queue. */ |
3308 | ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL, |
3309 | is_gimple_call_addr, fb_rvalue); |
3310 | |
3311 | nargs = call_expr_nargs (*expr_p); |
3312 | |
3313 | /* Get argument types for verification. */ |
3314 | fndecl = get_callee_fndecl (*expr_p); |
3315 | parms = NULL_TREE; |
3316 | if (fndecl) |
3317 | parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); |
3318 | else |
3319 | parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype)); |
3320 | |
3321 | if (fndecl && DECL_ARGUMENTS (fndecl)) |
3322 | p = DECL_ARGUMENTS (fndecl); |
3323 | else if (parms) |
3324 | p = parms; |
3325 | else |
3326 | p = NULL_TREE; |
3327 | for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p)) |
3328 | ; |
3329 | |
3330 | /* If the last argument is __builtin_va_arg_pack () and it is not |
3331 | passed as a named argument, decrease the number of CALL_EXPR |
3332 | arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */ |
3333 | if (!p |
3334 | && i < nargs |
3335 | && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR) |
3336 | { |
3337 | tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1); |
3338 | tree last_arg_fndecl = get_callee_fndecl (last_arg); |
3339 | |
3340 | if (last_arg_fndecl |
3341 | && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL |
3342 | && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL |
3343 | && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK) |
3344 | { |
3345 | tree call = *expr_p; |
3346 | |
3347 | --nargs; |
3348 | *expr_p = build_call_array_loc (loc, TREE_TYPE (call), |
3349 | CALL_EXPR_FN (call), |
3350 | nargs, CALL_EXPR_ARGP (call)); |
3351 | |
3352 | /* Copy all CALL_EXPR flags, location and block, except |
3353 | CALL_EXPR_VA_ARG_PACK flag. */ |
3354 | CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call); |
3355 | CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call); |
3356 | CALL_EXPR_RETURN_SLOT_OPT (*expr_p) |
3357 | = CALL_EXPR_RETURN_SLOT_OPT (call); |
3358 | CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call); |
3359 | SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call)); |
3360 | |
3361 | /* Set CALL_EXPR_VA_ARG_PACK. */ |
3362 | CALL_EXPR_VA_ARG_PACK (*expr_p) = 1; |
3363 | } |
3364 | } |
3365 | |
3366 | /* If the call returns twice then after building the CFG the call |
3367 | argument computations will no longer dominate the call because |
3368 | we add an abnormal incoming edge to the call. So do not use SSA |
3369 | vars there. */ |
3370 | bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE; |
3371 | |
3372 | /* Gimplify the function arguments. */ |
3373 | if (nargs > 0) |
3374 | { |
3375 | for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0); |
3376 | PUSH_ARGS_REVERSED ? i >= 0 : i < nargs; |
3377 | PUSH_ARGS_REVERSED ? i-- : i++) |
3378 | { |
3379 | enum gimplify_status t; |
3380 | |
3381 | /* Avoid gimplifying the second argument to va_start, which needs to |
3382 | be the plain PARM_DECL. */ |
3383 | if ((i != 1) || !builtin_va_start_p) |
3384 | { |
3385 | t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, |
3386 | EXPR_LOCATION (*expr_p), ! returns_twice); |
3387 | |
3388 | if (t == GS_ERROR) |
3389 | ret = GS_ERROR; |
3390 | } |
3391 | } |
3392 | } |
3393 | |
3394 | /* Gimplify the static chain. */ |
3395 | if (CALL_EXPR_STATIC_CHAIN (*expr_p)) |
3396 | { |
3397 | if (fndecl && !DECL_STATIC_CHAIN (fndecl)) |
3398 | CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL; |
3399 | else |
3400 | { |
3401 | enum gimplify_status t; |
3402 | t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p, |
3403 | EXPR_LOCATION (*expr_p), ! returns_twice); |
3404 | if (t == GS_ERROR) |
3405 | ret = GS_ERROR; |
3406 | } |
3407 | } |
3408 | |
3409 | /* Verify the function result. */ |
3410 | if (want_value && fndecl |
3411 | && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype)))) |
3412 | { |
3413 | error_at (loc, "using result of function returning %<void%>" ); |
3414 | ret = GS_ERROR; |
3415 | } |
3416 | |
3417 | /* Try this again in case gimplification exposed something. */ |
3418 | if (ret != GS_ERROR) |
3419 | { |
3420 | tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); |
3421 | |
3422 | if (new_tree && new_tree != *expr_p) |
3423 | { |
3424 | /* There was a transformation of this call which computes the |
3425 | same value, but in a more efficient way. Return and try |
3426 | again. */ |
3427 | *expr_p = new_tree; |
3428 | return GS_OK; |
3429 | } |
3430 | } |
3431 | else |
3432 | { |
3433 | *expr_p = error_mark_node; |
3434 | return GS_ERROR; |
3435 | } |
3436 | |
3437 | /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its |
3438 | decl. This allows us to eliminate redundant or useless |
3439 | calls to "const" functions. */ |
3440 | if (TREE_CODE (*expr_p) == CALL_EXPR) |
3441 | { |
3442 | int flags = call_expr_flags (*expr_p); |
3443 | if (flags & (ECF_CONST | ECF_PURE) |
3444 | /* An infinite loop is considered a side effect. */ |
3445 | && !(flags & (ECF_LOOPING_CONST_OR_PURE))) |
3446 | TREE_SIDE_EFFECTS (*expr_p) = 0; |
3447 | } |
3448 | |
3449 | /* If the value is not needed by the caller, emit a new GIMPLE_CALL |
3450 | and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified |
3451 | form and delegate the creation of a GIMPLE_CALL to |
3452 | gimplify_modify_expr. This is always possible because when |
3453 | WANT_VALUE is true, the caller wants the result of this call into |
3454 | a temporary, which means that we will emit an INIT_EXPR in |
3455 | internal_get_tmp_var which will then be handled by |
3456 | gimplify_modify_expr. */ |
3457 | if (!want_value) |
3458 | { |
3459 | /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we |
3460 | have to do is replicate it as a GIMPLE_CALL tuple. */ |
3461 | gimple_stmt_iterator gsi; |
3462 | call = gimple_build_call_from_tree (*expr_p, fnptrtype); |
3463 | notice_special_calls (call); |
3464 | gimplify_seq_add_stmt (pre_p, call); |
3465 | gsi = gsi_last (*pre_p); |
3466 | maybe_fold_stmt (&gsi); |
3467 | *expr_p = NULL_TREE; |
3468 | } |
3469 | else |
3470 | /* Remember the original function type. */ |
3471 | CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype, |
3472 | CALL_EXPR_FN (*expr_p)); |
3473 | |
3474 | return ret; |
3475 | } |
3476 | |
3477 | /* Handle shortcut semantics in the predicate operand of a COND_EXPR by |
3478 | rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs. |
3479 | |
3480 | TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the |
3481 | condition is true or false, respectively. If null, we should generate |
3482 | our own to skip over the evaluation of this specific expression. |
3483 | |
3484 | LOCUS is the source location of the COND_EXPR. |
3485 | |
3486 | This function is the tree equivalent of do_jump. |
3487 | |
3488 | shortcut_cond_r should only be called by shortcut_cond_expr. */ |
3489 | |
3490 | static tree |
3491 | shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p, |
3492 | location_t locus) |
3493 | { |
3494 | tree local_label = NULL_TREE; |
3495 | tree t, expr = NULL; |
3496 | |
3497 | /* OK, it's not a simple case; we need to pull apart the COND_EXPR to |
3498 | retain the shortcut semantics. Just insert the gotos here; |
3499 | shortcut_cond_expr will append the real blocks later. */ |
3500 | if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) |
3501 | { |
3502 | location_t new_locus; |
3503 | |
3504 | /* Turn if (a && b) into |
3505 | |
3506 | if (a); else goto no; |
3507 | if (b) goto yes; else goto no; |
3508 | (no:) */ |
3509 | |
3510 | if (false_label_p == NULL) |
3511 | false_label_p = &local_label; |
3512 | |
3513 | /* Keep the original source location on the first 'if'. */ |
3514 | t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus); |
3515 | append_to_statement_list (t, &expr); |
3516 | |
3517 | /* Set the source location of the && on the second 'if'. */ |
3518 | new_locus = rexpr_location (pred, locus); |
3519 | t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, |
3520 | new_locus); |
3521 | append_to_statement_list (t, &expr); |
3522 | } |
3523 | else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR) |
3524 | { |
3525 | location_t new_locus; |
3526 | |
3527 | /* Turn if (a || b) into |
3528 | |
3529 | if (a) goto yes; |
3530 | if (b) goto yes; else goto no; |
3531 | (yes:) */ |
3532 | |
3533 | if (true_label_p == NULL) |
3534 | true_label_p = &local_label; |
3535 | |
3536 | /* Keep the original source location on the first 'if'. */ |
3537 | t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus); |
3538 | append_to_statement_list (t, &expr); |
3539 | |
3540 | /* Set the source location of the || on the second 'if'. */ |
3541 | new_locus = rexpr_location (pred, locus); |
3542 | t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, |
3543 | new_locus); |
3544 | append_to_statement_list (t, &expr); |
3545 | } |
3546 | else if (TREE_CODE (pred) == COND_EXPR |
3547 | && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1))) |
3548 | && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2)))) |
3549 | { |
3550 | location_t new_locus; |
3551 | |
3552 | /* As long as we're messing with gotos, turn if (a ? b : c) into |
3553 | if (a) |
3554 | if (b) goto yes; else goto no; |
3555 | else |
3556 | if (c) goto yes; else goto no; |
3557 | |
3558 | Don't do this if one of the arms has void type, which can happen |
3559 | in C++ when the arm is throw. */ |
3560 | |
3561 | /* Keep the original source location on the first 'if'. Set the source |
3562 | location of the ? on the second 'if'. */ |
3563 | new_locus = rexpr_location (pred, locus); |
3564 | expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), |
3565 | shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, |
3566 | false_label_p, locus), |
3567 | shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, |
3568 | false_label_p, new_locus)); |
3569 | } |
3570 | else |
3571 | { |
3572 | expr = build3 (COND_EXPR, void_type_node, pred, |
3573 | build_and_jump (true_label_p), |
3574 | build_and_jump (false_label_p)); |
3575 | SET_EXPR_LOCATION (expr, locus); |
3576 | } |
3577 | |
3578 | if (local_label) |
3579 | { |
3580 | t = build1 (LABEL_EXPR, void_type_node, local_label); |
3581 | append_to_statement_list (t, &expr); |
3582 | } |
3583 | |
3584 | return expr; |
3585 | } |
3586 | |
3587 | /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip |
3588 | any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent |
3589 | statement, if it is the last one. Otherwise, return NULL. */ |
3590 | |
3591 | static tree |
3592 | find_goto (tree expr) |
3593 | { |
3594 | if (!expr) |
3595 | return NULL_TREE; |
3596 | |
3597 | if (TREE_CODE (expr) == GOTO_EXPR) |
3598 | return expr; |
3599 | |
3600 | if (TREE_CODE (expr) != STATEMENT_LIST) |
3601 | return NULL_TREE; |
3602 | |
3603 | tree_stmt_iterator i = tsi_start (expr); |
3604 | |
3605 | while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT) |
3606 | tsi_next (&i); |
3607 | |
3608 | if (!tsi_one_before_end_p (i)) |
3609 | return NULL_TREE; |
3610 | |
3611 | return find_goto (tsi_stmt (i)); |
3612 | } |
3613 | |
3614 | /* Same as find_goto, except that it returns NULL if the destination |
3615 | is not a LABEL_DECL. */ |
3616 | |
3617 | static inline tree |
3618 | find_goto_label (tree expr) |
3619 | { |
3620 | tree dest = find_goto (expr); |
3621 | if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL) |
3622 | return dest; |
3623 | return NULL_TREE; |
3624 | } |
3625 | |
3626 | /* Given a conditional expression EXPR with short-circuit boolean |
3627 | predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the |
3628 | predicate apart into the equivalent sequence of conditionals. */ |
3629 | |
3630 | static tree |
3631 | shortcut_cond_expr (tree expr) |
3632 | { |
3633 | tree pred = TREE_OPERAND (expr, 0); |
3634 | tree then_ = TREE_OPERAND (expr, 1); |
3635 | tree else_ = TREE_OPERAND (expr, 2); |
3636 | tree true_label, false_label, end_label, t; |
3637 | tree *true_label_p; |
3638 | tree *false_label_p; |
3639 | bool emit_end, emit_false, jump_over_else; |
3640 | bool then_se = then_ && TREE_SIDE_EFFECTS (then_); |
3641 | bool else_se = else_ && TREE_SIDE_EFFECTS (else_); |
3642 | |
3643 | /* First do simple transformations. */ |
3644 | if (!else_se) |
3645 | { |
3646 | /* If there is no 'else', turn |
3647 | if (a && b) then c |
3648 | into |
3649 | if (a) if (b) then c. */ |
3650 | while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) |
3651 | { |
3652 | /* Keep the original source location on the first 'if'. */ |
3653 | location_t locus = EXPR_LOC_OR_LOC (expr, input_location); |
3654 | TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); |
3655 | /* Set the source location of the && on the second 'if'. */ |
3656 | if (rexpr_has_location (pred)) |
3657 | SET_EXPR_LOCATION (expr, rexpr_location (pred)); |
3658 | then_ = shortcut_cond_expr (expr); |
3659 | then_se = then_ && TREE_SIDE_EFFECTS (then_); |
3660 | pred = TREE_OPERAND (pred, 0); |
3661 | expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); |
3662 | SET_EXPR_LOCATION (expr, locus); |
3663 | } |
3664 | } |
3665 | |
3666 | if (!then_se) |
3667 | { |
3668 | /* If there is no 'then', turn |
3669 | if (a || b); else d |
3670 | into |
3671 | if (a); else if (b); else d. */ |
3672 | while (TREE_CODE (pred) == TRUTH_ORIF_EXPR) |
3673 | { |
3674 | /* Keep the original source location on the first 'if'. */ |
3675 | location_t locus = EXPR_LOC_OR_LOC (expr, input_location); |
3676 | TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); |
3677 | /* Set the source location of the || on the second 'if'. */ |
3678 | if (rexpr_has_location (pred)) |
3679 | SET_EXPR_LOCATION (expr, rexpr_location (pred)); |
3680 | else_ = shortcut_cond_expr (expr); |
3681 | else_se = else_ && TREE_SIDE_EFFECTS (else_); |
3682 | pred = TREE_OPERAND (pred, 0); |
3683 | expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); |
3684 | SET_EXPR_LOCATION (expr, locus); |
3685 | } |
3686 | } |
3687 | |
3688 | /* If we're done, great. */ |
3689 | if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR |
3690 | && TREE_CODE (pred) != TRUTH_ORIF_EXPR) |
3691 | return expr; |
3692 | |
3693 | /* Otherwise we need to mess with gotos. Change |
3694 | if (a) c; else d; |
3695 | to |
3696 | if (a); else goto no; |
3697 | c; goto end; |
3698 | no: d; end: |
3699 | and recursively gimplify the condition. */ |
3700 | |
3701 | true_label = false_label = end_label = NULL_TREE; |
3702 | |
3703 | /* If our arms just jump somewhere, hijack those labels so we don't |
3704 | generate jumps to jumps. */ |
3705 | |
3706 | if (tree then_goto = find_goto_label (then_)) |
3707 | { |
3708 | true_label = GOTO_DESTINATION (then_goto); |
3709 | then_ = NULL; |
3710 | then_se = false; |
3711 | } |
3712 | |
3713 | if (tree else_goto = find_goto_label (else_)) |
3714 | { |
3715 | false_label = GOTO_DESTINATION (else_goto); |
3716 | else_ = NULL; |
3717 | else_se = false; |
3718 | } |
3719 | |
3720 | /* If we aren't hijacking a label for the 'then' branch, it falls through. */ |
3721 | if (true_label) |
3722 | true_label_p = &true_label; |
3723 | else |
3724 | true_label_p = NULL; |
3725 | |
3726 | /* The 'else' branch also needs a label if it contains interesting code. */ |
3727 | if (false_label || else_se) |
3728 | false_label_p = &false_label; |
3729 | else |
3730 | false_label_p = NULL; |
3731 | |
3732 | /* If there was nothing else in our arms, just forward the label(s). */ |
3733 | if (!then_se && !else_se) |
3734 | return shortcut_cond_r (pred, true_label_p, false_label_p, |
3735 | EXPR_LOC_OR_LOC (expr, input_location)); |
3736 | |
3737 | /* If our last subexpression already has a terminal label, reuse it. */ |
3738 | if (else_se) |
3739 | t = expr_last (else_); |
3740 | else if (then_se) |
3741 | t = expr_last (then_); |
3742 | else |
3743 | t = NULL; |
3744 | if (t && TREE_CODE (t) == LABEL_EXPR) |
3745 | end_label = LABEL_EXPR_LABEL (t); |
3746 | |
3747 | /* If we don't care about jumping to the 'else' branch, jump to the end |
3748 | if the condition is false. */ |
3749 | if (!false_label_p) |
3750 | false_label_p = &end_label; |
3751 | |
3752 | /* We only want to emit these labels if we aren't hijacking them. */ |
3753 | emit_end = (end_label == NULL_TREE); |
3754 | emit_false = (false_label == NULL_TREE); |
3755 | |
3756 | /* We only emit the jump over the else clause if we have to--if the |
3757 | then clause may fall through. Otherwise we can wind up with a |
3758 | useless jump and a useless label at the end of gimplified code, |
3759 | which will cause us to think that this conditional as a whole |
3760 | falls through even if it doesn't. If we then inline a function |
3761 | which ends with such a condition, that can cause us to issue an |
3762 | inappropriate warning about control reaching the end of a |
3763 | non-void function. */ |
3764 | jump_over_else = block_may_fallthru (then_); |
3765 | |
3766 | pred = shortcut_cond_r (pred, true_label_p, false_label_p, |
3767 | EXPR_LOC_OR_LOC (expr, input_location)); |
3768 | |
3769 | expr = NULL; |
3770 | append_to_statement_list (pred, &expr); |
3771 | |
3772 | append_to_statement_list (then_, &expr); |
3773 | if (else_se) |
3774 | { |
3775 | if (jump_over_else) |
3776 | { |
3777 | tree last = expr_last (expr); |
3778 | t = build_and_jump (&end_label); |
3779 | if (rexpr_has_location (last)) |
3780 | SET_EXPR_LOCATION (t, rexpr_location (last)); |
3781 | append_to_statement_list (t, &expr); |
3782 | } |
3783 | if (emit_false) |
3784 | { |
3785 | t = build1 (LABEL_EXPR, void_type_node, false_label); |
3786 | append_to_statement_list (t, &expr); |
3787 | } |
3788 | append_to_statement_list (else_, &expr); |
3789 | } |
3790 | if (emit_end && end_label) |
3791 | { |
3792 | t = build1 (LABEL_EXPR, void_type_node, end_label); |
3793 | append_to_statement_list (t, &expr); |
3794 | } |
3795 | |
3796 | return expr; |
3797 | } |
3798 | |
3799 | /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */ |
3800 | |
3801 | tree |
3802 | gimple_boolify (tree expr) |
3803 | { |
3804 | tree type = TREE_TYPE (expr); |
3805 | location_t loc = EXPR_LOCATION (expr); |
3806 | |
3807 | if (TREE_CODE (expr) == NE_EXPR |
3808 | && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR |
3809 | && integer_zerop (TREE_OPERAND (expr, 1))) |
3810 | { |
3811 | tree call = TREE_OPERAND (expr, 0); |
3812 | tree fn = get_callee_fndecl (call); |
3813 | |
3814 | /* For __builtin_expect ((long) (x), y) recurse into x as well |
3815 | if x is truth_value_p. */ |
3816 | if (fn |
3817 | && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL |
3818 | && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT |
3819 | && call_expr_nargs (call) == 2) |
3820 | { |
3821 | tree arg = CALL_EXPR_ARG (call, 0); |
3822 | if (arg) |
3823 | { |
3824 | if (TREE_CODE (arg) == NOP_EXPR |
3825 | && TREE_TYPE (arg) == TREE_TYPE (call)) |
3826 | arg = TREE_OPERAND (arg, 0); |
3827 | if (truth_value_p (TREE_CODE (arg))) |
3828 | { |
3829 | arg = gimple_boolify (arg); |
3830 | CALL_EXPR_ARG (call, 0) |
3831 | = fold_convert_loc (loc, TREE_TYPE (call), arg); |
3832 | } |
3833 | } |
3834 | } |
3835 | } |
3836 | |
3837 | switch (TREE_CODE (expr)) |
3838 | { |
3839 | case TRUTH_AND_EXPR: |
3840 | case TRUTH_OR_EXPR: |
3841 | case TRUTH_XOR_EXPR: |
3842 | case TRUTH_ANDIF_EXPR: |
3843 | case TRUTH_ORIF_EXPR: |
3844 | /* Also boolify the arguments of truth exprs. */ |
3845 | TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1)); |
3846 | /* FALLTHRU */ |
3847 | |
3848 | case TRUTH_NOT_EXPR: |
3849 | TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); |
3850 | |
3851 | /* These expressions always produce boolean results. */ |
3852 | if (TREE_CODE (type) != BOOLEAN_TYPE) |
3853 | TREE_TYPE (expr) = boolean_type_node; |
3854 | return expr; |
3855 | |
3856 | case ANNOTATE_EXPR: |
3857 | switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))) |
3858 | { |
3859 | case annot_expr_ivdep_kind: |
3860 | case annot_expr_unroll_kind: |
3861 | case annot_expr_no_vector_kind: |
3862 | case annot_expr_vector_kind: |
3863 | case annot_expr_parallel_kind: |
3864 | TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); |
3865 | if (TREE_CODE (type) != BOOLEAN_TYPE) |
3866 | TREE_TYPE (expr) = boolean_type_node; |
3867 | return expr; |
3868 | default: |
3869 | gcc_unreachable (); |
3870 | } |
3871 | |
3872 | default: |
3873 | if (COMPARISON_CLASS_P (expr)) |
3874 | { |
3875 | /* There expressions always prduce boolean results. */ |
3876 | if (TREE_CODE (type) != BOOLEAN_TYPE) |
3877 | TREE_TYPE (expr) = boolean_type_node; |
3878 | return expr; |
3879 | } |
3880 | /* Other expressions that get here must have boolean values, but |
3881 | might need to be converted to the appropriate mode. */ |
3882 | if (TREE_CODE (type) == BOOLEAN_TYPE) |
3883 | return expr; |
3884 | return fold_convert_loc (loc, boolean_type_node, expr); |
3885 | } |
3886 | } |
3887 | |
3888 | /* Given a conditional expression *EXPR_P without side effects, gimplify |
3889 | its operands. New statements are inserted to PRE_P. */ |
3890 | |
3891 | static enum gimplify_status |
3892 | gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p) |
3893 | { |
3894 | tree expr = *expr_p, cond; |
3895 | enum gimplify_status ret, tret; |
3896 | enum tree_code code; |
3897 | |
3898 | cond = gimple_boolify (COND_EXPR_COND (expr)); |
3899 | |
3900 | /* We need to handle && and || specially, as their gimplification |
3901 | creates pure cond_expr, thus leading to an infinite cycle otherwise. */ |
3902 | code = TREE_CODE (cond); |
3903 | if (code == TRUTH_ANDIF_EXPR) |
3904 | TREE_SET_CODE (cond, TRUTH_AND_EXPR); |
3905 | else if (code == TRUTH_ORIF_EXPR) |
3906 | TREE_SET_CODE (cond, TRUTH_OR_EXPR); |
3907 | ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue); |
3908 | COND_EXPR_COND (*expr_p) = cond; |
3909 | |
3910 | tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL, |
3911 | is_gimple_val, fb_rvalue); |
3912 | ret = MIN (ret, tret); |
3913 | tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL, |
3914 | is_gimple_val, fb_rvalue); |
3915 | |
3916 | return MIN (ret, tret); |
3917 | } |
3918 | |
3919 | /* Return true if evaluating EXPR could trap. |
3920 | EXPR is GENERIC, while tree_could_trap_p can be called |
3921 | only on GIMPLE. */ |
3922 | |
3923 | static bool |
3924 | generic_expr_could_trap_p (tree expr) |
3925 | { |
3926 | unsigned i, n; |
3927 | |
3928 | if (!expr || is_gimple_val (expr)) |
3929 | return false; |
3930 | |
3931 | if (!EXPR_P (expr) || tree_could_trap_p (expr)) |
3932 | return true; |
3933 | |
3934 | n = TREE_OPERAND_LENGTH (expr); |
3935 | for (i = 0; i < n; i++) |
3936 | if (generic_expr_could_trap_p (TREE_OPERAND (expr, i))) |
3937 | return true; |
3938 | |
3939 | return false; |
3940 | } |
3941 | |
3942 | /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;' |
3943 | into |
3944 | |
3945 | if (p) if (p) |
3946 | t1 = a; a; |
3947 | else or else |
3948 | t1 = b; b; |
3949 | t1; |
3950 | |
3951 | The second form is used when *EXPR_P is of type void. |
3952 | |
3953 | PRE_P points to the list where side effects that must happen before |
3954 | *EXPR_P should be stored. */ |
3955 | |
3956 | static enum gimplify_status |
3957 | gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) |
3958 | { |
3959 | tree expr = *expr_p; |
3960 | tree type = TREE_TYPE (expr); |
3961 | location_t loc = EXPR_LOCATION (expr); |
3962 | tree tmp, arm1, arm2; |
3963 | enum gimplify_status ret; |
3964 | tree label_true, label_false, label_cont; |
3965 | bool have_then_clause_p, have_else_clause_p; |
3966 | gcond *cond_stmt; |
3967 | enum tree_code pred_code; |
3968 | gimple_seq seq = NULL; |
3969 | |
3970 | /* If this COND_EXPR has a value, copy the values into a temporary within |
3971 | the arms. */ |
3972 | if (!VOID_TYPE_P (type)) |
3973 | { |
3974 | tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2); |
3975 | tree result; |
3976 | |
3977 | /* If either an rvalue is ok or we do not require an lvalue, create the |
3978 | temporary. But we cannot do that if the type is addressable. */ |
3979 | if (((fallback & fb_rvalue) || !(fallback & fb_lvalue)) |
3980 | && !TREE_ADDRESSABLE (type)) |
3981 | { |
3982 | if (gimplify_ctxp->allow_rhs_cond_expr |
3983 | /* If either branch has side effects or could trap, it can't be |
3984 | evaluated unconditionally. */ |
3985 | && !TREE_SIDE_EFFECTS (then_) |
3986 | && !generic_expr_could_trap_p (then_) |
3987 | && !TREE_SIDE_EFFECTS (else_) |
3988 | && !generic_expr_could_trap_p (else_)) |
3989 | return gimplify_pure_cond_expr (expr_p, pre_p); |
3990 | |
3991 | tmp = create_tmp_var (type, "iftmp" ); |
3992 | result = tmp; |
3993 | } |
3994 | |
3995 | /* Otherwise, only create and copy references to the values. */ |
3996 | else |
3997 | { |
3998 | type = build_pointer_type (type); |
3999 | |
4000 | if (!VOID_TYPE_P (TREE_TYPE (then_))) |
4001 | then_ = build_fold_addr_expr_loc (loc, then_); |
4002 | |
4003 | if (!VOID_TYPE_P (TREE_TYPE (else_))) |
4004 | else_ = build_fold_addr_expr_loc (loc, else_); |
4005 | |
4006 | expr |
4007 | = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_); |
4008 | |
4009 | tmp = create_tmp_var (type, "iftmp" ); |
4010 | result = build_simple_mem_ref_loc (loc, tmp); |
4011 | } |
4012 | |
4013 | /* Build the new then clause, `tmp = then_;'. But don't build the |
4014 | assignment if the value is void; in C++ it can be if it's a throw. */ |
4015 | if (!VOID_TYPE_P (TREE_TYPE (then_))) |
4016 | TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_); |
4017 | |
4018 | /* Similarly, build the new else clause, `tmp = else_;'. */ |
4019 | if (!VOID_TYPE_P (TREE_TYPE (else_))) |
4020 | TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_); |
4021 | |
4022 | TREE_TYPE (expr) = void_type_node; |
4023 | recalculate_side_effects (expr); |
4024 | |
4025 | /* Move the COND_EXPR to the prequeue. */ |
4026 | gimplify_stmt (&expr, pre_p); |
4027 | |
4028 | *expr_p = result; |
4029 | return GS_ALL_DONE; |
4030 | } |
4031 | |
4032 | /* Remove any COMPOUND_EXPR so the following cases will be caught. */ |
4033 | STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0)); |
4034 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR) |
4035 | gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true); |
4036 | |
4037 | /* Make sure the condition has BOOLEAN_TYPE. */ |
4038 | TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); |
4039 | |
4040 | /* Break apart && and || conditions. */ |
4041 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR |
4042 | || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR) |
4043 | { |
4044 | expr = shortcut_cond_expr (expr); |
4045 | |
4046 | if (expr != *expr_p) |
4047 | { |
4048 | *expr_p = expr; |
4049 | |
4050 | /* We can't rely on gimplify_expr to re-gimplify the expanded |
4051 | form properly, as cleanups might cause the target labels to be |
4052 | wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to |
4053 | set up a conditional context. */ |
4054 | gimple_push_condition (); |
4055 | gimplify_stmt (expr_p, &seq); |
4056 | gimple_pop_condition (pre_p); |
4057 | gimple_seq_add_seq (pre_p, seq); |
4058 | |
4059 | return GS_ALL_DONE; |
4060 | } |
4061 | } |
4062 | |
4063 | /* Now do the normal gimplification. */ |
4064 | |
4065 | /* Gimplify condition. */ |
4066 | ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr, |
4067 | fb_rvalue); |
4068 | if (ret == GS_ERROR) |
4069 | return GS_ERROR; |
4070 | gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE); |
4071 | |
4072 | gimple_push_condition (); |
4073 | |
4074 | have_then_clause_p = have_else_clause_p = false; |
4075 | label_true = find_goto_label (TREE_OPERAND (expr, 1)); |
4076 | if (label_true |
4077 | && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl |
4078 | /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR |
4079 | have different locations, otherwise we end up with incorrect |
4080 | location information on the branches. */ |
4081 | && (optimize |
4082 | || !EXPR_HAS_LOCATION (expr) |
4083 | || !rexpr_has_location (label_true) |
4084 | || EXPR_LOCATION (expr) == rexpr_location (label_true))) |
4085 | { |
4086 | have_then_clause_p = true; |
4087 | label_true = GOTO_DESTINATION (label_true); |
4088 | } |
4089 | else |
4090 | label_true = create_artificial_label (UNKNOWN_LOCATION); |
4091 | label_false = find_goto_label (TREE_OPERAND (expr, 2)); |
4092 | if (label_false |
4093 | && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl |
4094 | /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR |
4095 | have different locations, otherwise we end up with incorrect |
4096 | location information on the branches. */ |
4097 | && (optimize |
4098 | || !EXPR_HAS_LOCATION (expr) |
4099 | || !rexpr_has_location (label_false) |
4100 | || EXPR_LOCATION (expr) == rexpr_location (label_false))) |
4101 | { |
4102 | have_else_clause_p = true; |
4103 | label_false = GOTO_DESTINATION (label_false); |
4104 | } |
4105 | else |
4106 | label_false = create_artificial_label (UNKNOWN_LOCATION); |
4107 | |
4108 | gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1, |
4109 | &arm2); |
4110 | cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, |
4111 | label_false); |
4112 | gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr))); |
4113 | gimplify_seq_add_stmt (&seq, cond_stmt); |
4114 | gimple_stmt_iterator gsi = gsi_last (seq); |
4115 | maybe_fold_stmt (&gsi); |
4116 | |
4117 | label_cont = NULL_TREE; |
4118 | if (!have_then_clause_p) |
4119 | { |
4120 | /* For if (...) {} else { code; } put label_true after |
4121 | the else block. */ |
4122 | if (TREE_OPERAND (expr, 1) == NULL_TREE |
4123 | && !have_else_clause_p |
4124 | && TREE_OPERAND (expr, 2) != NULL_TREE) |
4125 | label_cont = label_true; |
4126 | else |
4127 | { |
4128 | gimplify_seq_add_stmt (&seq, gimple_build_label (label_true)); |
4129 | have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq); |
4130 | /* For if (...) { code; } else {} or |
4131 | if (...) { code; } else goto label; or |
4132 | if (...) { code; return; } else { ... } |
4133 | label_cont isn't needed. */ |
4134 | if (!have_else_clause_p |
4135 | && TREE_OPERAND (expr, 2) != NULL_TREE |
4136 | && gimple_seq_may_fallthru (seq)) |
4137 | { |
4138 | gimple *g; |
4139 | label_cont = create_artificial_label (UNKNOWN_LOCATION); |
4140 | |
4141 | g = gimple_build_goto (label_cont); |
4142 | |
4143 | /* GIMPLE_COND's are very low level; they have embedded |
4144 | gotos. This particular embedded goto should not be marked |
4145 | with the location of the original COND_EXPR, as it would |
4146 | correspond to the COND_EXPR's condition, not the ELSE or the |
4147 | THEN arms. To avoid marking it with the wrong location, flag |
4148 | it as "no location". */ |
4149 | gimple_set_do_not_emit_location (g); |
4150 | |
4151 | gimplify_seq_add_stmt (&seq, g); |
4152 | } |
4153 | } |
4154 | } |
4155 | if (!have_else_clause_p) |
4156 | { |
4157 | gimplify_seq_add_stmt (&seq, gimple_build_label (label_false)); |
4158 | have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq); |
4159 | } |
4160 | if (label_cont) |
4161 | gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont)); |
4162 | |
4163 | gimple_pop_condition (pre_p); |
4164 | gimple_seq_add_seq (pre_p, seq); |
4165 | |
4166 | if (ret == GS_ERROR) |
4167 | ; /* Do nothing. */ |
4168 | else if (have_then_clause_p || have_else_clause_p) |
4169 | ret = GS_ALL_DONE; |
4170 | else |
4171 | { |
4172 | /* Both arms are empty; replace the COND_EXPR with its predicate. */ |
4173 | expr = TREE_OPERAND (expr, 0); |
4174 | gimplify_stmt (&expr, pre_p); |
4175 | } |
4176 | |
4177 | *expr_p = NULL; |
4178 | return ret; |
4179 | } |
4180 | |
4181 | /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression, |
4182 | to be marked addressable. |
4183 | |
4184 | We cannot rely on such an expression being directly markable if a temporary |
4185 | has been created by the gimplification. In this case, we create another |
4186 | temporary and initialize it with a copy, which will become a store after we |
4187 | mark it addressable. This can happen if the front-end passed us something |
4188 | that it could not mark addressable yet, like a Fortran pass-by-reference |
4189 | parameter (int) floatvar. */ |
4190 | |
4191 | static void |
4192 | prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p) |
4193 | { |
4194 | while (handled_component_p (*expr_p)) |
4195 | expr_p = &TREE_OPERAND (*expr_p, 0); |
4196 | if (is_gimple_reg (*expr_p)) |
4197 | { |
4198 | /* Do not allow an SSA name as the temporary. */ |
4199 | tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false); |
4200 | DECL_GIMPLE_REG_P (var) = 0; |
4201 | *expr_p = var; |
4202 | } |
4203 | } |
4204 | |
4205 | /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with |
4206 | a call to __builtin_memcpy. */ |
4207 | |
4208 | static enum gimplify_status |
4209 | gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value, |
4210 | gimple_seq *seq_p) |
4211 | { |
4212 | tree t, to, to_ptr, from, from_ptr; |
4213 | gcall *gs; |
4214 | location_t loc = EXPR_LOCATION (*expr_p); |
4215 | |
4216 | to = TREE_OPERAND (*expr_p, 0); |
4217 | from = TREE_OPERAND (*expr_p, 1); |
4218 | |
4219 | /* Mark the RHS addressable. Beware that it may not be possible to do so |
4220 | directly if a temporary has been created by the gimplification. */ |
4221 | prepare_gimple_addressable (&from, seq_p); |
4222 | |
4223 | mark_addressable (from); |
4224 | from_ptr = build_fold_addr_expr_loc (loc, from); |
4225 | gimplify_arg (&from_ptr, seq_p, loc); |
4226 | |
4227 | mark_addressable (to); |
4228 | to_ptr = build_fold_addr_expr_loc (loc, to); |
4229 | gimplify_arg (&to_ptr, seq_p, loc); |
4230 | |
4231 | t = builtin_decl_implicit (BUILT_IN_MEMCPY); |
4232 | |
4233 | gs = gimple_build_call (t, 3, to_ptr, from_ptr, size); |
4234 | |
4235 | if (want_value) |
4236 | { |
4237 | /* tmp = memcpy() */ |
4238 | t = create_tmp_var (TREE_TYPE (to_ptr)); |
4239 | gimple_call_set_lhs (gs, t); |
4240 | gimplify_seq_add_stmt (seq_p, gs); |
4241 | |
4242 | *expr_p = build_simple_mem_ref (t); |
4243 | return GS_ALL_DONE; |
4244 | } |
4245 | |
4246 | gimplify_seq_add_stmt (seq_p, gs); |
4247 | *expr_p = NULL; |
4248 | return GS_ALL_DONE; |
4249 | } |
4250 | |
4251 | /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with |
4252 | a call to __builtin_memset. In this case we know that the RHS is |
4253 | a CONSTRUCTOR with an empty element list. */ |
4254 | |
4255 | static enum gimplify_status |
4256 | gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value, |
4257 | gimple_seq *seq_p) |
4258 | { |
4259 | tree t, from, to, to_ptr; |
4260 | gcall *gs; |
4261 | location_t loc = EXPR_LOCATION (*expr_p); |
4262 | |
4263 | /* Assert our assumptions, to abort instead of producing wrong code |
4264 | silently if they are not met. Beware that the RHS CONSTRUCTOR might |
4265 | not be immediately exposed. */ |
4266 | from = TREE_OPERAND (*expr_p, 1); |
4267 | if (TREE_CODE (from) == WITH_SIZE_EXPR) |
4268 | from = TREE_OPERAND (from, 0); |
4269 | |
4270 | gcc_assert (TREE_CODE (from) == CONSTRUCTOR |
4271 | && vec_safe_is_empty (CONSTRUCTOR_ELTS (from))); |
4272 | |
4273 | /* Now proceed. */ |
4274 | to = TREE_OPERAND (*expr_p, 0); |
4275 | |
4276 | to_ptr = build_fold_addr_expr_loc (loc, to); |
4277 | gimplify_arg (&to_ptr, seq_p, loc); |
4278 | t = builtin_decl_implicit (BUILT_IN_MEMSET); |
4279 | |
4280 | gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size); |
4281 | |
4282 | if (want_value) |
4283 | { |
4284 | /* tmp = memset() */ |
4285 | t = create_tmp_var (TREE_TYPE (to_ptr)); |
4286 | gimple_call_set_lhs (gs, t); |
4287 | gimplify_seq_add_stmt (seq_p, gs); |
4288 | |
4289 | *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); |
4290 | return GS_ALL_DONE; |
4291 | } |
4292 | |
4293 | gimplify_seq_add_stmt (seq_p, gs); |
4294 | *expr_p = NULL; |
4295 | return GS_ALL_DONE; |
4296 | } |
4297 | |
4298 | /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree, |
4299 | determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an |
4300 | assignment. Return non-null if we detect a potential overlap. */ |
4301 | |
4302 | struct gimplify_init_ctor_preeval_data |
4303 | { |
4304 | /* The base decl of the lhs object. May be NULL, in which case we |
4305 | have to assume the lhs is indirect. */ |
4306 | tree lhs_base_decl; |
4307 | |
4308 | /* The alias set of the lhs object. */ |
4309 | alias_set_type lhs_alias_set; |
4310 | }; |
4311 | |
4312 | static tree |
4313 | gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata) |
4314 | { |
4315 | struct gimplify_init_ctor_preeval_data *data |
4316 | = (struct gimplify_init_ctor_preeval_data *) xdata; |
4317 | tree t = *tp; |
4318 | |
4319 | /* If we find the base object, obviously we have overlap. */ |
4320 | if (data->lhs_base_decl == t) |
4321 | return t; |
4322 | |
4323 | /* If the constructor component is indirect, determine if we have a |
4324 | potential overlap with the lhs. The only bits of information we |
4325 | have to go on at this point are addressability and alias sets. */ |
4326 | if ((INDIRECT_REF_P ( |
---|