1/* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2024 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "tree.h"
25#include "gimple.h"
26#include "cfghooks.h"
27#include "tree-pass.h"
28#include "ssa.h"
29#include "gimple-pretty-print.h"
30#include "diagnostic-core.h"
31#include "fold-const.h"
32#include "stor-layout.h"
33#include "gimple-iterator.h"
34#include "gimple-fold.h"
35#include "gimplify.h"
36#include "gimple-walk.h"
37#include "tree-ssa-loop-manip.h"
38#include "tree-into-ssa.h"
39#include "tree-ssa.h"
40#include "cfgloop.h"
41#include "cfgexpand.h"
42#include "tree-cfg.h"
43#include "tree-dfa.h"
44#include "stringpool.h"
45#include "attribs.h"
46#include "asan.h"
47
48/* Pointer map of variable mappings, keyed by edge. */
49static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
50
51
52/* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
53
54void
55redirect_edge_var_map_add (edge e, tree result, tree def, location_t locus)
56{
57 edge_var_map new_node;
58
59 if (edge_var_maps == NULL)
60 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
61
62 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (k: e);
63 new_node.def = def;
64 new_node.result = result;
65 new_node.locus = locus;
66
67 slot.safe_push (obj: new_node);
68}
69
70
71/* Clear the var mappings in edge E. */
72
73void
74redirect_edge_var_map_clear (edge e)
75{
76 if (!edge_var_maps)
77 return;
78
79 auto_vec<edge_var_map> *head = edge_var_maps->get (k: e);
80
81 if (head)
82 head->release ();
83}
84
85
86/* Duplicate the redirected var mappings in OLDE in NEWE.
87
88 This assumes a hash_map can have multiple edges mapping to the same
89 var_map (many to one mapping), since we don't remove the previous mappings.
90 */
91
92void
93redirect_edge_var_map_dup (edge newe, edge olde)
94{
95 if (!edge_var_maps)
96 return;
97
98 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (k: newe);
99 auto_vec<edge_var_map> *old_head = edge_var_maps->get (k: olde);
100 if (!old_head)
101 return;
102
103 new_head->safe_splice (src: *old_head);
104}
105
106
107/* Return the variable mappings for a given edge. If there is none, return
108 NULL. */
109
110vec<edge_var_map> *
111redirect_edge_var_map_vector (edge e)
112{
113 /* Hey, what kind of idiot would... you'd be surprised. */
114 if (!edge_var_maps)
115 return NULL;
116
117 auto_vec<edge_var_map> *slot = edge_var_maps->get (k: e);
118 if (!slot)
119 return NULL;
120
121 return slot;
122}
123
124/* Clear the edge variable mappings. */
125
126void
127redirect_edge_var_map_empty (void)
128{
129 if (edge_var_maps)
130 edge_var_maps->empty ();
131}
132
133
134/* Remove the corresponding arguments from the PHI nodes in E's
135 destination block and redirect it to DEST. Return redirected edge.
136 The list of removed arguments is stored in a vector accessed
137 through edge_var_maps. */
138
139edge
140ssa_redirect_edge (edge e, basic_block dest)
141{
142 gphi_iterator gsi;
143 gphi *phi;
144
145 redirect_edge_var_map_clear (e);
146
147 /* Remove the appropriate PHI arguments in E's destination block.
148 If we are redirecting a copied edge the destination has not
149 got PHI argument space reserved nor an interesting argument. */
150 if (! (e->dest->flags & BB_DUPLICATED))
151 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
152 {
153 tree def;
154 location_t locus;
155
156 phi = gsi.phi ();
157 def = gimple_phi_arg_def (gs: phi, index: e->dest_idx);
158 locus = gimple_phi_arg_location (phi, i: e->dest_idx);
159
160 if (def == NULL_TREE)
161 continue;
162
163 redirect_edge_var_map_add (e, result: gimple_phi_result (gs: phi), def, locus);
164 }
165
166 e = redirect_edge_succ_nodup (e, dest);
167
168 return e;
169}
170
171
172/* Add PHI arguments queued in PENDING_STMT list on edge E to edge
173 E->dest. */
174
175void
176flush_pending_stmts (edge e)
177{
178 gphi *phi;
179 edge_var_map *vm;
180 int i;
181 gphi_iterator gsi;
182
183 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
184 if (!v)
185 return;
186
187 for (gsi = gsi_start_phis (e->dest), i = 0;
188 !gsi_end_p (i: gsi) && v->iterate (ix: i, ptr: &vm);
189 gsi_next (i: &gsi), i++)
190 {
191 tree def;
192
193 phi = gsi.phi ();
194 def = redirect_edge_var_map_def (v: vm);
195 add_phi_arg (phi, def, e, redirect_edge_var_map_location (v: vm));
196 }
197
198 redirect_edge_var_map_clear (e);
199}
200
201/* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
202 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
203 expression with a different value.
204
205 This will update any annotations (say debug bind stmts) referring
206 to the original LHS, so that they use the RHS instead. This is
207 done even if NLHS and LHS are the same, for it is understood that
208 the RHS will be modified afterwards, and NLHS will not be assigned
209 an equivalent value.
210
211 Adjusting any non-annotation uses of the LHS, if needed, is a
212 responsibility of the caller.
213
214 The effect of this call should be pretty much the same as that of
215 inserting a copy of STMT before STMT, and then removing the
216 original stmt, at which time gsi_remove() would have update
217 annotations, but using this function saves all the inserting,
218 copying and removing. */
219
220void
221gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
222{
223 if (MAY_HAVE_DEBUG_BIND_STMTS)
224 {
225 tree lhs = gimple_get_lhs (stmt);
226
227 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
228
229 insert_debug_temp_for_var_def (NULL, lhs);
230 }
231
232 gimple_set_lhs (stmt, nlhs);
233}
234
235
236/* Given a tree for an expression for which we might want to emit
237 locations or values in debug information (generally a variable, but
238 we might deal with other kinds of trees in the future), return the
239 tree that should be used as the variable of a DEBUG_BIND STMT or
240 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
241
242tree
243target_for_debug_bind (tree var)
244{
245 if (!MAY_HAVE_DEBUG_BIND_STMTS)
246 return NULL_TREE;
247
248 if (TREE_CODE (var) == SSA_NAME)
249 {
250 var = SSA_NAME_VAR (var);
251 if (var == NULL_TREE)
252 return NULL_TREE;
253 }
254
255 if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var))
256 && TREE_CODE (var) != PARM_DECL)
257 return NULL_TREE;
258
259 if (DECL_HAS_VALUE_EXPR_P (var))
260 return target_for_debug_bind (DECL_VALUE_EXPR (var));
261
262 if (DECL_IGNORED_P (var))
263 return NULL_TREE;
264
265 /* var-tracking only tracks registers. */
266 if (!is_gimple_reg_type (TREE_TYPE (var)))
267 return NULL_TREE;
268
269 return var;
270}
271
272/* Called via walk_tree, look for SSA_NAMEs that have already been
273 released. */
274
275tree
276find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
277{
278 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
279
280 if (wi && wi->is_lhs)
281 return NULL_TREE;
282
283 if (TREE_CODE (*tp) == SSA_NAME)
284 {
285 if (SSA_NAME_IN_FREE_LIST (*tp))
286 return *tp;
287
288 *walk_subtrees = 0;
289 }
290 else if (IS_TYPE_OR_DECL_P (*tp))
291 *walk_subtrees = 0;
292
293 return NULL_TREE;
294}
295
296/* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
297 by other DEBUG stmts, and replace uses of the DEF with the
298 newly-created debug temp. */
299
300void
301insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
302{
303 imm_use_iterator imm_iter;
304 use_operand_p use_p;
305 gimple *stmt;
306 gimple *def_stmt = NULL;
307 int usecount = 0;
308 tree value = NULL;
309
310 if (!MAY_HAVE_DEBUG_BIND_STMTS)
311 return;
312
313 /* If this name has already been registered for replacement, do nothing
314 as anything that uses this name isn't in SSA form. */
315 if (name_registered_for_update_p (var))
316 return;
317
318 /* Check whether there are debug stmts that reference this variable and,
319 if there are, decide whether we should use a debug temp. */
320 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
321 {
322 stmt = USE_STMT (use_p);
323
324 if (!gimple_debug_bind_p (s: stmt))
325 continue;
326
327 if (usecount++)
328 break;
329
330 if (gimple_debug_bind_get_value (dbg: stmt) != var)
331 {
332 /* Count this as an additional use, so as to make sure we
333 use a temp unless VAR's definition has a SINGLE_RHS that
334 can be shared. */
335 usecount++;
336 break;
337 }
338 }
339
340 if (!usecount)
341 return;
342
343 if (gsi)
344 def_stmt = gsi_stmt (i: *gsi);
345 else
346 def_stmt = SSA_NAME_DEF_STMT (var);
347
348 /* If we didn't get an insertion point, and the stmt has already
349 been removed, we won't be able to insert the debug bind stmt, so
350 we'll have to drop debug information. */
351 if (gimple_code (g: def_stmt) == GIMPLE_PHI)
352 {
353 value = degenerate_phi_result (as_a <gphi *> (p: def_stmt));
354 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
355 value = NULL;
356 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
357 to. */
358 else if (value == error_mark_node)
359 value = NULL;
360 }
361 else if (gimple_clobber_p (s: def_stmt))
362 /* We can end up here when rewriting a decl into SSA and coming
363 along a clobber for the original decl. Turn that into
364 # DEBUG decl => NULL */
365 value = NULL;
366 else if (is_gimple_assign (gs: def_stmt))
367 {
368 bool no_value = false;
369
370 if (!dom_info_available_p (CDI_DOMINATORS))
371 {
372 struct walk_stmt_info wi;
373
374 memset (s: &wi, c: 0, n: sizeof (wi));
375
376 /* When removing blocks without following reverse dominance
377 order, we may sometimes encounter SSA_NAMEs that have
378 already been released, referenced in other SSA_DEFs that
379 we're about to release. Consider:
380
381 <bb X>:
382 v_1 = foo;
383
384 <bb Y>:
385 w_2 = v_1 + bar;
386 # DEBUG w => w_2
387
388 If we deleted BB X first, propagating the value of w_2
389 won't do us any good. It's too late to recover their
390 original definition of v_1: when it was deleted, it was
391 only referenced in other DEFs, it couldn't possibly know
392 it should have been retained, and propagating every
393 single DEF just in case it might have to be propagated
394 into a DEBUG STMT would probably be too wasteful.
395
396 When dominator information is not readily available, we
397 check for and accept some loss of debug information. But
398 if it is available, there's no excuse for us to remove
399 blocks in the wrong order, so we don't even check for
400 dead SSA NAMEs. SSA verification shall catch any
401 errors. */
402 if ((!gsi && !gimple_bb (g: def_stmt))
403 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
404 no_value = true;
405 }
406
407 if (!no_value)
408 value = gimple_assign_rhs_to_tree (def_stmt);
409 }
410
411 if (value)
412 {
413 /* If there's a single use of VAR, and VAR is the entire debug
414 expression (usecount would have been incremented again
415 otherwise), then we can propagate VALUE into this single use,
416 avoiding the temp.
417
418 We can also avoid using a temp if VALUE can be shared and
419 propagated into all uses, without generating expressions that
420 wouldn't be valid gimple RHSs.
421
422 Other cases that would require unsharing or non-gimple RHSs
423 are deferred to a debug temp, although we could avoid temps
424 at the expense of duplication of expressions. */
425
426 if (usecount == 1
427 || gimple_code (g: def_stmt) == GIMPLE_PHI
428 || CONSTANT_CLASS_P (value)
429 || is_gimple_reg (value))
430 ;
431 else
432 {
433 gdebug *def_temp;
434 tree vexpr = build_debug_expr_decl (TREE_TYPE (value));
435
436 def_temp = gimple_build_debug_bind (vexpr,
437 unshare_expr (value),
438 def_stmt);
439
440 /* FIXME: Is setting the mode really necessary? */
441 if (DECL_P (value))
442 SET_DECL_MODE (vexpr, DECL_MODE (value));
443 else
444 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value)));
445
446 if (gsi)
447 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
448 else
449 {
450 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
451 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
452 }
453
454 value = vexpr;
455 }
456 }
457
458 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
459 {
460 if (!gimple_debug_bind_p (s: stmt))
461 continue;
462
463 if (value)
464 {
465 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
466 SET_USE (use_p, unshare_expr (value));
467 /* If we didn't replace uses with a debug decl fold the
468 resulting expression. Otherwise we end up with invalid IL. */
469 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
470 {
471 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
472 fold_stmt_inplace (&gsi);
473 }
474 }
475 else
476 gimple_debug_bind_reset_value (dbg: stmt);
477
478 update_stmt (s: stmt);
479 }
480}
481
482
483/* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
484 other DEBUG stmts, and replace uses of the DEF with the
485 newly-created debug temp. */
486
487void
488insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
489{
490 gimple *stmt;
491 ssa_op_iter op_iter;
492 def_operand_p def_p;
493
494 if (!MAY_HAVE_DEBUG_BIND_STMTS)
495 return;
496
497 stmt = gsi_stmt (i: *gsi);
498
499 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
500 {
501 tree var = DEF_FROM_PTR (def_p);
502
503 if (TREE_CODE (var) != SSA_NAME)
504 continue;
505
506 insert_debug_temp_for_var_def (gsi, var);
507 }
508}
509
510/* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
511
512void
513reset_debug_uses (gimple *stmt)
514{
515 ssa_op_iter op_iter;
516 def_operand_p def_p;
517 imm_use_iterator imm_iter;
518 gimple *use_stmt;
519
520 if (!MAY_HAVE_DEBUG_BIND_STMTS)
521 return;
522
523 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
524 {
525 tree var = DEF_FROM_PTR (def_p);
526
527 if (TREE_CODE (var) != SSA_NAME)
528 continue;
529
530 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
531 {
532 if (!gimple_debug_bind_p (s: use_stmt))
533 continue;
534
535 gimple_debug_bind_reset_value (dbg: use_stmt);
536 update_stmt (s: use_stmt);
537 }
538 }
539}
540
541/* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
542 dominated stmts before their dominators, so that release_ssa_defs
543 stands a chance of propagating DEFs into debug bind stmts. */
544
545void
546release_defs_bitset (bitmap toremove)
547{
548 unsigned j;
549 bitmap_iterator bi;
550
551 /* Performing a topological sort is probably overkill, this will
552 most likely run in slightly superlinear time, rather than the
553 pathological quadratic worst case.
554 But iterate from max SSA name version to min one because
555 that mimics allocation order during code generation behavior best.
556 Use an array for this which we compact on-the-fly with a NULL
557 marker moving towards the end of the vector. */
558 auto_vec<tree, 16> names;
559 names.reserve (nelems: bitmap_count_bits (toremove) + 1);
560 names.quick_push (NULL_TREE);
561 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
562 names.quick_push (ssa_name (j));
563
564 bitmap_tree_view (toremove);
565 while (!bitmap_empty_p (map: toremove))
566 {
567 j = names.length () - 1;
568 for (unsigned i = names.length () - 1; names[i];)
569 {
570 bool remove_now = true;
571 tree var = names[i];
572 gimple *stmt;
573 imm_use_iterator uit;
574
575 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
576 {
577 ssa_op_iter dit;
578 def_operand_p def_p;
579
580 /* We can't propagate PHI nodes into debug stmts. */
581 if (gimple_code (g: stmt) == GIMPLE_PHI
582 || is_gimple_debug (gs: stmt))
583 continue;
584
585 /* If we find another definition to remove that uses
586 the one we're looking at, defer the removal of this
587 one, so that it can be propagated into debug stmts
588 after the other is. */
589 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
590 {
591 tree odef = DEF_FROM_PTR (def_p);
592
593 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
594 {
595 remove_now = false;
596 break;
597 }
598 }
599
600 if (!remove_now)
601 break;
602 }
603
604 if (remove_now)
605 {
606 gimple *def = SSA_NAME_DEF_STMT (var);
607 gimple_stmt_iterator gsi = gsi_for_stmt (def);
608
609 if (gimple_code (g: def) == GIMPLE_PHI)
610 remove_phi_node (&gsi, true);
611 else
612 {
613 gsi_remove (&gsi, true);
614 release_defs (def);
615 }
616 bitmap_clear_bit (toremove, SSA_NAME_VERSION (var));
617 }
618 else
619 --i;
620 if (--j != i)
621 names[i] = names[j];
622 }
623 }
624 bitmap_list_view (toremove);
625}
626
627/* Disable warnings about missing quoting in GCC diagnostics for
628 the verification errors. Their format strings don't follow GCC
629 diagnostic conventions and the calls are ultimately followed by
630 one to internal_error. */
631#if __GNUC__ >= 10
632# pragma GCC diagnostic push
633# pragma GCC diagnostic ignored "-Wformat-diag"
634#endif
635
636/* Verify virtual SSA form. */
637
638bool
639verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
640{
641 bool err = false;
642
643 if (!bitmap_set_bit (map: visited, bitno: bb->index))
644 return false;
645
646 /* Pick up the single virtual PHI def. */
647 gphi *phi = NULL;
648 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (i: si);
649 gsi_next (i: &si))
650 {
651 tree res = gimple_phi_result (gs: si.phi ());
652 if (virtual_operand_p (op: res))
653 {
654 if (phi)
655 {
656 error ("multiple virtual PHI nodes in BB %d", bb->index);
657 print_gimple_stmt (stderr, phi, 0);
658 print_gimple_stmt (stderr, si.phi (), 0);
659 err = true;
660 }
661 else
662 phi = si.phi ();
663 }
664 }
665 if (phi)
666 {
667 current_vdef = gimple_phi_result (gs: phi);
668 if (TREE_CODE (current_vdef) != SSA_NAME)
669 {
670 error ("virtual definition is not an SSA name");
671 print_gimple_stmt (stderr, phi, 0);
672 err = true;
673 }
674 }
675
676 /* Verify stmts. */
677 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi);
678 gsi_next (i: &gsi))
679 {
680 gimple *stmt = gsi_stmt (i: gsi);
681 tree vuse = gimple_vuse (g: stmt);
682 if (vuse)
683 {
684 if (vuse != current_vdef)
685 {
686 error ("stmt with wrong VUSE");
687 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
688 fprintf (stderr, format: "expected ");
689 print_generic_expr (stderr, current_vdef);
690 fprintf (stderr, format: "\n");
691 err = true;
692 }
693 tree vdef = gimple_vdef (g: stmt);
694 if (vdef)
695 {
696 current_vdef = vdef;
697 if (TREE_CODE (current_vdef) != SSA_NAME)
698 {
699 error ("virtual definition is not an SSA name");
700 print_gimple_stmt (stderr, phi, 0);
701 err = true;
702 }
703 }
704 }
705 }
706
707 /* Verify destination PHI uses and recurse. */
708 edge_iterator ei;
709 edge e;
710 FOR_EACH_EDGE (e, ei, bb->succs)
711 {
712 gphi *phi = get_virtual_phi (e->dest);
713 if (phi
714 && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
715 {
716 error ("PHI node with wrong VUSE on edge from BB %d",
717 e->src->index);
718 print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
719 fprintf (stderr, format: "expected ");
720 print_generic_expr (stderr, current_vdef);
721 fprintf (stderr, format: "\n");
722 err = true;
723 }
724
725 /* Recurse. */
726 err |= verify_vssa (bb: e->dest, current_vdef, visited);
727 }
728
729 return err;
730}
731
732/* Return true if SSA_NAME is malformed and mark it visited.
733
734 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
735 operand. */
736
737static bool
738verify_ssa_name (tree ssa_name, bool is_virtual)
739{
740 if (TREE_CODE (ssa_name) != SSA_NAME)
741 {
742 error ("expected an SSA_NAME object");
743 return true;
744 }
745
746 if (SSA_NAME_IN_FREE_LIST (ssa_name))
747 {
748 error ("found an SSA_NAME that had been released into the free pool");
749 return true;
750 }
751
752 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
753 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
754 {
755 error ("type mismatch between an SSA_NAME and its symbol");
756 return true;
757 }
758
759 if (is_virtual && !virtual_operand_p (op: ssa_name))
760 {
761 error ("found a virtual definition for a GIMPLE register");
762 return true;
763 }
764
765 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
766 {
767 error ("virtual SSA name for non-VOP decl");
768 return true;
769 }
770
771 if (!is_virtual && virtual_operand_p (op: ssa_name))
772 {
773 error ("found a real definition for a non-register");
774 return true;
775 }
776
777 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
778 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
779 {
780 error ("found a default name with a non-empty defining statement");
781 return true;
782 }
783
784 return false;
785}
786
787
788/* Return true if the definition of SSA_NAME at block BB is malformed.
789
790 STMT is the statement where SSA_NAME is created.
791
792 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
793 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
794 it means that the block in that array slot contains the
795 definition of SSA_NAME.
796
797 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
798
799static bool
800verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
801 gimple *stmt, bool is_virtual)
802{
803 if (verify_ssa_name (ssa_name, is_virtual))
804 goto err;
805
806 if (SSA_NAME_VAR (ssa_name)
807 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
808 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
809 {
810 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
811 goto err;
812 }
813
814 if (definition_block[SSA_NAME_VERSION (ssa_name)])
815 {
816 error ("SSA_NAME created in two different blocks %i and %i",
817 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
818 goto err;
819 }
820
821 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
822
823 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
824 {
825 error ("SSA_NAME_DEF_STMT is wrong");
826 fprintf (stderr, format: "Expected definition statement:\n");
827 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
828 fprintf (stderr, format: "\nActual definition statement:\n");
829 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
830 goto err;
831 }
832
833 return false;
834
835err:
836 fprintf (stderr, format: "while verifying SSA_NAME ");
837 print_generic_expr (stderr, ssa_name);
838 fprintf (stderr, format: " in statement\n");
839 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
840
841 return true;
842}
843
844
845/* Return true if the use of SSA_NAME at statement STMT in block BB is
846 malformed.
847
848 DEF_BB is the block where SSA_NAME was found to be created.
849
850 IDOM contains immediate dominator information for the flowgraph.
851
852 CHECK_ABNORMAL is true if the caller wants to check whether this use
853 is flowing through an abnormal edge (only used when checking PHI
854 arguments).
855
856 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
857 that are defined before STMT in basic block BB. */
858
859static bool
860verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
861 gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
862{
863 bool err = false;
864 tree ssa_name = USE_FROM_PTR (use_p);
865
866 if (!TREE_VISITED (ssa_name))
867 if (verify_imm_links (stderr, var: ssa_name))
868 err = true;
869
870 TREE_VISITED (ssa_name) = 1;
871
872 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
873 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
874 ; /* Default definitions have empty statements. Nothing to do. */
875 else if (!def_bb)
876 {
877 error ("missing definition");
878 err = true;
879 }
880 else if (bb != def_bb
881 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
882 {
883 error ("definition in block %i does not dominate use in block %i",
884 def_bb->index, bb->index);
885 err = true;
886 }
887 else if (bb == def_bb
888 && names_defined_in_bb != NULL
889 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
890 {
891 error ("definition in block %i follows the use", def_bb->index);
892 err = true;
893 }
894
895 if (check_abnormal
896 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
897 {
898 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
899 err = true;
900 }
901
902 /* Make sure the use is in an appropriate list by checking the previous
903 element to make sure it's the same. */
904 if (use_p->prev == NULL)
905 {
906 error ("no immediate_use list");
907 err = true;
908 }
909 else
910 {
911 tree listvar;
912 if (use_p->prev->use == NULL)
913 listvar = use_p->prev->loc.ssa_name;
914 else
915 listvar = USE_FROM_PTR (use_p->prev);
916 if (listvar != ssa_name)
917 {
918 error ("wrong immediate use list");
919 err = true;
920 }
921 }
922
923 if (err)
924 {
925 fprintf (stderr, format: "for SSA_NAME: ");
926 print_generic_expr (stderr, ssa_name, TDF_VOPS);
927 fprintf (stderr, format: " in statement:\n");
928 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
929 }
930
931 return err;
932}
933
934
935/* Return true if any of the arguments for PHI node PHI at block BB is
936 malformed.
937
938 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
939 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
940 it means that the block in that array slot contains the
941 definition of SSA_NAME. */
942
943static bool
944verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
945{
946 edge e;
947 bool err = false;
948 size_t i, phi_num_args = gimple_phi_num_args (gs: phi);
949
950 if (EDGE_COUNT (bb->preds) != phi_num_args)
951 {
952 error ("incoming edge count does not match number of PHI arguments");
953 err = true;
954 goto error;
955 }
956
957 for (i = 0; i < phi_num_args; i++)
958 {
959 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (gs: phi, i);
960 tree op = USE_FROM_PTR (op_p);
961
962 e = EDGE_PRED (bb, i);
963
964 if (op == NULL_TREE)
965 {
966 error ("PHI argument is missing for edge %d->%d",
967 e->src->index,
968 e->dest->index);
969 err = true;
970 goto error;
971 }
972
973 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
974 {
975 error ("PHI argument is not SSA_NAME, or invariant");
976 err = true;
977 }
978
979 if ((e->flags & EDGE_ABNORMAL) && TREE_CODE (op) != SSA_NAME)
980 {
981 error ("PHI argument on abnormal edge is not SSA_NAME");
982 err = true;
983 }
984
985 if (TREE_CODE (op) == SSA_NAME)
986 {
987 err = verify_ssa_name (ssa_name: op, is_virtual: virtual_operand_p (op: gimple_phi_result (gs: phi)));
988 err |= verify_use (bb: e->src, def_bb: definition_block[SSA_NAME_VERSION (op)],
989 use_p: op_p, stmt: phi, check_abnormal: e->flags & EDGE_ABNORMAL, NULL);
990 }
991
992 if (TREE_CODE (op) == ADDR_EXPR)
993 {
994 tree base = TREE_OPERAND (op, 0);
995 while (handled_component_p (t: base))
996 base = TREE_OPERAND (base, 0);
997 if ((VAR_P (base)
998 || TREE_CODE (base) == PARM_DECL
999 || TREE_CODE (base) == RESULT_DECL)
1000 && !TREE_ADDRESSABLE (base))
1001 {
1002 error ("address taken, but ADDRESSABLE bit not set");
1003 err = true;
1004 }
1005 }
1006
1007 if (e->dest != bb)
1008 {
1009 error ("wrong edge %d->%d for PHI argument",
1010 e->src->index, e->dest->index);
1011 err = true;
1012 }
1013
1014 if (err)
1015 {
1016 fprintf (stderr, format: "PHI argument\n");
1017 print_generic_stmt (stderr, op, TDF_VOPS);
1018 goto error;
1019 }
1020 }
1021
1022error:
1023 if (err)
1024 {
1025 fprintf (stderr, format: "for PHI node\n");
1026 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1027 }
1028
1029
1030 return err;
1031}
1032
1033
1034/* Verify common invariants in the SSA web.
1035 TODO: verify the variable annotations. */
1036
1037DEBUG_FUNCTION void
1038verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1039{
1040 basic_block bb;
1041 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1042 ssa_op_iter iter;
1043 tree op;
1044 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1045 auto_bitmap names_defined_in_bb;
1046
1047 gcc_assert (!need_ssa_update_p (cfun));
1048
1049 timevar_push (tv: TV_TREE_SSA_VERIFY);
1050
1051 {
1052 /* Keep track of SSA names present in the IL. */
1053 size_t i;
1054 tree name;
1055 hash_map <void *, tree> ssa_info;
1056
1057 FOR_EACH_SSA_NAME (i, name, cfun)
1058 {
1059 gimple *stmt;
1060 TREE_VISITED (name) = 0;
1061
1062 verify_ssa_name (ssa_name: name, is_virtual: virtual_operand_p (op: name));
1063
1064 stmt = SSA_NAME_DEF_STMT (name);
1065 if (!gimple_nop_p (g: stmt))
1066 {
1067 basic_block bb = gimple_bb (g: stmt);
1068 if (verify_def (bb, definition_block,
1069 ssa_name: name, stmt, is_virtual: virtual_operand_p (op: name)))
1070 goto err;
1071 }
1072
1073 void *info = NULL;
1074 if (POINTER_TYPE_P (TREE_TYPE (name)))
1075 info = SSA_NAME_PTR_INFO (name);
1076 else if (INTEGRAL_TYPE_P (TREE_TYPE (name)))
1077 info = SSA_NAME_RANGE_INFO (name);
1078 if (info)
1079 {
1080 bool existed;
1081 tree &val = ssa_info.get_or_insert (k: info, existed: &existed);
1082 if (existed)
1083 {
1084 error ("shared SSA name info");
1085 print_generic_expr (stderr, val);
1086 fprintf (stderr, format: " and ");
1087 print_generic_expr (stderr, name);
1088 fprintf (stderr, format: "\n");
1089 goto err;
1090 }
1091 else
1092 val = name;
1093 }
1094 }
1095 }
1096
1097 calculate_dominance_info (CDI_DOMINATORS);
1098
1099 /* Now verify all the uses and make sure they agree with the definitions
1100 found in the previous pass. */
1101 FOR_EACH_BB_FN (bb, cfun)
1102 {
1103 edge e;
1104 edge_iterator ei;
1105
1106 /* Make sure that all edges have a clear 'aux' field. */
1107 FOR_EACH_EDGE (e, ei, bb->preds)
1108 {
1109 if (e->aux)
1110 {
1111 error ("AUX pointer initialized for edge %d->%d", e->src->index,
1112 e->dest->index);
1113 goto err;
1114 }
1115 }
1116
1117 /* Verify the arguments for every PHI node in the block. */
1118 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
1119 {
1120 gphi *phi = gsi.phi ();
1121 if (verify_phi_args (phi, bb, definition_block))
1122 goto err;
1123
1124 bitmap_set_bit (names_defined_in_bb,
1125 SSA_NAME_VERSION (gimple_phi_result (phi)));
1126 }
1127
1128 /* Now verify all the uses and vuses in every statement of the block. */
1129 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi);
1130 gsi_next (i: &gsi))
1131 {
1132 gimple *stmt = gsi_stmt (i: gsi);
1133 use_operand_p use_p;
1134
1135 if (check_modified_stmt && gimple_modified_p (g: stmt))
1136 {
1137 error ("stmt (%p) marked modified after optimization pass: ",
1138 (void *)stmt);
1139 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1140 goto err;
1141 }
1142
1143 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1144 {
1145 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1146 goto err;
1147 }
1148
1149 if (gimple_debug_bind_p (s: stmt)
1150 && !gimple_debug_bind_has_value_p (dbg: stmt))
1151 continue;
1152
1153 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1154 {
1155 op = USE_FROM_PTR (use_p);
1156 if (verify_use (bb, def_bb: definition_block[SSA_NAME_VERSION (op)],
1157 use_p, stmt, check_abnormal: false, names_defined_in_bb))
1158 goto err;
1159 }
1160
1161 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1162 {
1163 if (SSA_NAME_DEF_STMT (op) != stmt)
1164 {
1165 error ("SSA_NAME_DEF_STMT is wrong");
1166 fprintf (stderr, format: "Expected definition statement:\n");
1167 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1168 fprintf (stderr, format: "\nActual definition statement:\n");
1169 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1170 4, TDF_VOPS);
1171 goto err;
1172 }
1173 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1174 }
1175 }
1176
1177 bitmap_clear (names_defined_in_bb);
1178 }
1179
1180 free (ptr: definition_block);
1181
1182 if (gimple_vop (cfun)
1183 && ssa_default_def (cfun, gimple_vop (cfun)))
1184 {
1185 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1186 bitmap_clear (visited);
1187 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1188 current_vdef: ssa_default_def (cfun, gimple_vop (cfun)), visited))
1189 goto err;
1190 }
1191
1192 /* Restore the dominance information to its prior known state, so
1193 that we do not perturb the compiler's subsequent behavior. */
1194 if (orig_dom_state == DOM_NONE)
1195 free_dominance_info (CDI_DOMINATORS);
1196 else
1197 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1198
1199 timevar_pop (tv: TV_TREE_SSA_VERIFY);
1200 return;
1201
1202err:
1203 internal_error ("verify_ssa failed");
1204}
1205
1206#if __GNUC__ >= 10
1207# pragma GCC diagnostic pop
1208#endif
1209
1210/* Initialize global DFA and SSA structures.
1211 If SIZE is non-zero allocated ssa names array of a given size. */
1212
1213void
1214init_tree_ssa (struct function *fn, int size)
1215{
1216 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1217 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (n: 20);
1218 pt_solution_reset (&fn->gimple_df->escaped);
1219 pt_solution_reset (&fn->gimple_df->escaped_return);
1220 init_ssanames (fn, size);
1221}
1222
1223/* Deallocate memory associated with SSA data structures for FNDECL. */
1224
1225void
1226delete_tree_ssa (struct function *fn)
1227{
1228 fini_ssanames (fn);
1229
1230 /* We no longer maintain the SSA operand cache at this point. */
1231 if (ssa_operands_active (fn))
1232 fini_ssa_operands (fn);
1233
1234 fn->gimple_df->default_defs->empty ();
1235 fn->gimple_df->default_defs = NULL;
1236 pt_solution_reset (&fn->gimple_df->escaped);
1237 pt_solution_reset (&fn->gimple_df->escaped_return);
1238 if (fn->gimple_df->decls_to_pointers != NULL)
1239 delete fn->gimple_df->decls_to_pointers;
1240 fn->gimple_df->decls_to_pointers = NULL;
1241 fn->gimple_df = NULL;
1242
1243 /* We no longer need the edge variable maps. */
1244 redirect_edge_var_map_empty ();
1245}
1246
1247/* Return true if EXPR is a useless type conversion, otherwise return
1248 false. */
1249
1250bool
1251tree_ssa_useless_type_conversion (tree expr)
1252{
1253 tree outer_type, inner_type;
1254
1255 /* If we have an assignment that merely uses a NOP_EXPR to change
1256 the top of the RHS to the type of the LHS and the type conversion
1257 is "safe", then strip away the type conversion so that we can
1258 enter LHS = RHS into the const_and_copies table. */
1259 if (!CONVERT_EXPR_P (expr)
1260 && TREE_CODE (expr) != VIEW_CONVERT_EXPR
1261 && TREE_CODE (expr) != NON_LVALUE_EXPR)
1262 return false;
1263
1264 outer_type = TREE_TYPE (expr);
1265 inner_type = TREE_TYPE (TREE_OPERAND (expr, 0));
1266
1267 if (inner_type == error_mark_node)
1268 return false;
1269
1270 return useless_type_conversion_p (outer_type, inner_type);
1271}
1272
1273/* Strip conversions from EXP according to
1274 tree_ssa_useless_type_conversion and return the resulting
1275 expression. */
1276
1277tree
1278tree_ssa_strip_useless_type_conversions (tree exp)
1279{
1280 while (tree_ssa_useless_type_conversion (expr: exp))
1281 exp = TREE_OPERAND (exp, 0);
1282 return exp;
1283}
1284
1285/* Return true if T, as SSA_NAME, has an implicit default defined value. */
1286
1287bool
1288ssa_defined_default_def_p (tree t)
1289{
1290 tree var = SSA_NAME_VAR (t);
1291
1292 if (!var)
1293 ;
1294 /* Parameters get their initial value from the function entry. */
1295 else if (TREE_CODE (var) == PARM_DECL)
1296 return true;
1297 /* When returning by reference the return address is actually a hidden
1298 parameter. */
1299 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1300 return true;
1301 /* Hard register variables get their initial value from the ether. */
1302 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1303 return true;
1304
1305 return false;
1306}
1307
1308
1309/* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1310 should be returned if the value is only partially undefined. */
1311
1312bool
1313ssa_undefined_value_p (tree t, bool partial)
1314{
1315 gimple *def_stmt;
1316
1317 gcc_checking_assert (!virtual_operand_p (t));
1318
1319 if (ssa_defined_default_def_p (t))
1320 return false;
1321
1322 /* The value is undefined iff its definition statement is empty. */
1323 def_stmt = SSA_NAME_DEF_STMT (t);
1324 if (gimple_nop_p (g: def_stmt))
1325 return true;
1326
1327 /* The value is undefined if the definition statement is a call
1328 to .DEFERRED_INIT function. */
1329 if (gimple_call_internal_p (gs: def_stmt, fn: IFN_DEFERRED_INIT))
1330 return true;
1331
1332 /* The value is partially undefined if the definition statement is
1333 a REALPART_EXPR or IMAGPART_EXPR and its operand is defined by
1334 the call to .DEFERRED_INIT function. This is for handling the
1335 following case:
1336
1337 1 typedef _Complex float C;
1338 2 C foo (int cond)
1339 3 {
1340 4 C f;
1341 5 __imag__ f = 0;
1342 6 if (cond)
1343 7 {
1344 8 __real__ f = 1;
1345 9 return f;
1346 10 }
1347 11 return f;
1348 12 }
1349
1350 with -ftrivial-auto-var-init, compiler will insert the following
1351 artificial initialization:
1352 f = .DEFERRED_INIT (f, 2);
1353 _1 = REALPART_EXPR <f>;
1354
1355 we should treat the definition _1 = REALPART_EXPR <f> as undefined. */
1356 if (partial && is_gimple_assign (gs: def_stmt)
1357 && (gimple_assign_rhs_code (gs: def_stmt) == REALPART_EXPR
1358 || gimple_assign_rhs_code (gs: def_stmt) == IMAGPART_EXPR))
1359 {
1360 tree real_imag_part = TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0);
1361 if (TREE_CODE (real_imag_part) == SSA_NAME
1362 && gimple_call_internal_p (SSA_NAME_DEF_STMT (real_imag_part),
1363 fn: IFN_DEFERRED_INIT))
1364 return true;
1365 }
1366
1367 /* Check if the complex was not only partially defined. */
1368 if (partial && is_gimple_assign (gs: def_stmt)
1369 && gimple_assign_rhs_code (gs: def_stmt) == COMPLEX_EXPR)
1370 {
1371 tree rhs1, rhs2;
1372
1373 rhs1 = gimple_assign_rhs1 (gs: def_stmt);
1374 rhs2 = gimple_assign_rhs2 (gs: def_stmt);
1375 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (t: rhs1))
1376 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (t: rhs2));
1377 }
1378 return false;
1379}
1380
1381
1382/* Return TRUE iff there are any non-PHI uses of VAR that dominate the
1383 end of BB. If we return TRUE and BB is a loop header, then VAR we
1384 be assumed to be defined within the loop, even if it is marked as
1385 maybe-undefined. */
1386
1387bool
1388ssa_name_any_use_dominates_bb_p (tree var, basic_block bb)
1389{
1390 imm_use_iterator iter;
1391 use_operand_p use_p;
1392 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1393 {
1394 if (is_a <gphi *> (USE_STMT (use_p))
1395 || is_gimple_debug (USE_STMT (use_p)))
1396 continue;
1397 basic_block dombb = gimple_bb (USE_STMT (use_p));
1398 if (dominated_by_p (CDI_DOMINATORS, bb, dombb))
1399 return true;
1400 }
1401
1402 return false;
1403}
1404
1405/* Mark as maybe_undef any SSA_NAMEs that are unsuitable as ivopts
1406 candidates for potentially involving undefined behavior. */
1407
1408void
1409mark_ssa_maybe_undefs (void)
1410{
1411 auto_vec<tree> queue;
1412
1413 /* Scan all SSA_NAMEs, marking the definitely-undefined ones as
1414 maybe-undefined and queuing them for propagation, while clearing
1415 the mark on others. */
1416 unsigned int i;
1417 tree var;
1418 FOR_EACH_SSA_NAME (i, var, cfun)
1419 {
1420 if (SSA_NAME_IS_VIRTUAL_OPERAND (var)
1421 || !ssa_undefined_value_p (t: var, partial: false))
1422 ssa_name_set_maybe_undef (var, value: false);
1423 else
1424 {
1425 ssa_name_set_maybe_undef (var);
1426 queue.safe_push (obj: var);
1427 if (dump_file && (dump_flags & TDF_DETAILS))
1428 fprintf (stream: dump_file, format: "marking _%i as maybe-undef\n",
1429 SSA_NAME_VERSION (var));
1430 }
1431 }
1432
1433 /* Now propagate maybe-undefined from a DEF to any other PHI that
1434 uses it, as long as there isn't any intervening use of DEF. */
1435 while (!queue.is_empty ())
1436 {
1437 var = queue.pop ();
1438 imm_use_iterator iter;
1439 use_operand_p use_p;
1440 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1441 {
1442 /* Any uses of VAR that aren't PHI args imply VAR must be
1443 defined, otherwise undefined behavior would have been
1444 definitely invoked. Only PHI args may hold
1445 maybe-undefined values without invoking undefined
1446 behavior for that reason alone. */
1447 if (!is_a <gphi *> (USE_STMT (use_p)))
1448 continue;
1449 gphi *phi = as_a <gphi *> (USE_STMT (use_p));
1450
1451 tree def = gimple_phi_result (gs: phi);
1452 if (ssa_name_maybe_undef_p (var: def))
1453 continue;
1454
1455 /* Look for any uses of the maybe-unused SSA_NAME that
1456 dominates the block that reaches the incoming block
1457 corresponding to the PHI arg in which it is mentioned.
1458 That means we can assume the SSA_NAME is defined in that
1459 path, so we only mark a PHI result as maybe-undef if we
1460 find an unused reaching SSA_NAME. */
1461 int idx = phi_arg_index_from_use (use: use_p);
1462 basic_block bb = gimple_phi_arg_edge (phi, i: idx)->src;
1463 if (ssa_name_any_use_dominates_bb_p (var, bb))
1464 continue;
1465
1466 ssa_name_set_maybe_undef (var: def);
1467 queue.safe_push (obj: def);
1468 if (dump_file && (dump_flags & TDF_DETAILS))
1469 fprintf (stream: dump_file, format: "marking _%i as maybe-undef because of _%i\n",
1470 SSA_NAME_VERSION (def), SSA_NAME_VERSION (var));
1471 }
1472 }
1473}
1474
1475
1476/* If necessary, rewrite the base of the reference tree *TP from
1477 a MEM_REF to a plain or converted symbol. */
1478
1479static void
1480maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1481{
1482 tree sym;
1483
1484 while (handled_component_p (t: *tp))
1485 tp = &TREE_OPERAND (*tp, 0);
1486 if (TREE_CODE (*tp) == MEM_REF
1487 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1488 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1489 && DECL_P (sym)
1490 && !TREE_ADDRESSABLE (sym)
1491 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1492 && is_gimple_reg_type (TREE_TYPE (*tp))
1493 && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1494 {
1495 if (VECTOR_TYPE_P (TREE_TYPE (sym))
1496 && useless_type_conversion_p (TREE_TYPE (*tp),
1497 TREE_TYPE (TREE_TYPE (sym)))
1498 && multiple_p (a: mem_ref_offset (*tp),
1499 b: wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp)))))
1500 {
1501 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1502 TYPE_SIZE (TREE_TYPE (*tp)),
1503 int_const_binop (MULT_EXPR,
1504 bitsize_int (BITS_PER_UNIT),
1505 TREE_OPERAND (*tp, 1)));
1506 }
1507 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1508 && useless_type_conversion_p (TREE_TYPE (*tp),
1509 TREE_TYPE (TREE_TYPE (sym))))
1510 {
1511 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1512 ? REALPART_EXPR : IMAGPART_EXPR,
1513 TREE_TYPE (*tp), sym);
1514 }
1515 else if (integer_zerop (TREE_OPERAND (*tp, 1))
1516 && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1517 {
1518 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1519 TREE_TYPE (sym)))
1520 *tp = build1 (VIEW_CONVERT_EXPR,
1521 TREE_TYPE (*tp), sym);
1522 else
1523 *tp = sym;
1524 }
1525 else if (DECL_SIZE (sym)
1526 && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1527 && (known_subrange_p
1528 (pos1: mem_ref_offset (*tp),
1529 size1: wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1530 pos2: 0, size2: wi::to_offset (DECL_SIZE_UNIT (sym))))
1531 && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1532 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1533 == TYPE_PRECISION (TREE_TYPE (*tp))))
1534 && (! INTEGRAL_TYPE_P (TREE_TYPE (sym))
1535 || type_has_mode_precision_p (TREE_TYPE (sym)))
1536 && wi::umod_trunc (x: wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1537 BITS_PER_UNIT) == 0)
1538 {
1539 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1540 TYPE_SIZE (TREE_TYPE (*tp)),
1541 wide_int_to_tree (bitsizetype,
1542 cst: mem_ref_offset (*tp)
1543 << LOG2_BITS_PER_UNIT));
1544 }
1545 }
1546}
1547
1548/* For a tree REF return its base if it is the base of a MEM_REF
1549 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1550
1551static tree
1552non_rewritable_mem_ref_base (tree ref)
1553{
1554 tree base;
1555
1556 /* A plain decl does not need it set. */
1557 if (DECL_P (ref))
1558 return NULL_TREE;
1559
1560 switch (TREE_CODE (ref))
1561 {
1562 case REALPART_EXPR:
1563 case IMAGPART_EXPR:
1564 case BIT_FIELD_REF:
1565 if (DECL_P (TREE_OPERAND (ref, 0)))
1566 return NULL_TREE;
1567 break;
1568 case VIEW_CONVERT_EXPR:
1569 if (DECL_P (TREE_OPERAND (ref, 0)))
1570 {
1571 if (TYPE_SIZE (TREE_TYPE (ref))
1572 != TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 0))))
1573 return TREE_OPERAND (ref, 0);
1574 return NULL_TREE;
1575 }
1576 break;
1577 /* We would need to rewrite ARRAY_REFs or COMPONENT_REFs and even
1578 more so multiple levels of handled components. */
1579 default:;
1580 }
1581
1582 base = ref;
1583
1584 /* But watch out for MEM_REFs we cannot lower to a
1585 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1586 if (TREE_CODE (base) == MEM_REF
1587 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1588 {
1589 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1590 if (! DECL_P (decl))
1591 return NULL_TREE;
1592 if (! is_gimple_reg_type (TREE_TYPE (base))
1593 || VOID_TYPE_P (TREE_TYPE (base))
1594 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base))
1595 return decl;
1596 if ((VECTOR_TYPE_P (TREE_TYPE (decl))
1597 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1598 && useless_type_conversion_p (TREE_TYPE (base),
1599 TREE_TYPE (TREE_TYPE (decl)))
1600 && known_ge (mem_ref_offset (base), 0)
1601 && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1602 mem_ref_offset (base))
1603 && multiple_p (a: mem_ref_offset (base),
1604 b: wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base)))))
1605 return NULL_TREE;
1606 /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */
1607 if (integer_zerop (TREE_OPERAND (base, 1))
1608 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1609 return NULL_TREE;
1610 /* For integral typed extracts we can use a BIT_FIELD_REF. */
1611 if (DECL_SIZE (decl)
1612 && TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1613 && (known_subrange_p
1614 (pos1: mem_ref_offset (base),
1615 size1: wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1616 pos2: 0, size2: wi::to_poly_offset (DECL_SIZE_UNIT (decl))))
1617 /* ??? We can't handle bitfield precision extracts without
1618 either using an alternate type for the BIT_FIELD_REF and
1619 then doing a conversion or possibly adjusting the offset
1620 according to endianness. */
1621 && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1622 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1623 == TYPE_PRECISION (TREE_TYPE (base))))
1624 /* ??? Likewise for extracts from bitfields, we'd have
1625 to pun the base object to a size precision mode first. */
1626 && (! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1627 || type_has_mode_precision_p (TREE_TYPE (decl)))
1628 && wi::umod_trunc (x: wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1629 BITS_PER_UNIT) == 0)
1630 return NULL_TREE;
1631 return decl;
1632 }
1633
1634 /* We cannot rewrite a decl in the base. */
1635 base = get_base_address (t: ref);
1636 if (DECL_P (base))
1637 return base;
1638
1639 /* We cannot rewrite TARGET_MEM_REFs. */
1640 else if (TREE_CODE (base) == TARGET_MEM_REF
1641 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1642 {
1643 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1644 if (! DECL_P (decl))
1645 return NULL_TREE;
1646 return decl;
1647 }
1648
1649 return NULL_TREE;
1650}
1651
1652/* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1653 Otherwise return true. */
1654
1655static bool
1656non_rewritable_lvalue_p (tree lhs)
1657{
1658 /* A plain decl is always rewritable. */
1659 if (DECL_P (lhs))
1660 return false;
1661
1662 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1663 a reasonably efficient manner... */
1664 if ((TREE_CODE (lhs) == REALPART_EXPR
1665 || TREE_CODE (lhs) == IMAGPART_EXPR)
1666 && DECL_P (TREE_OPERAND (lhs, 0)))
1667 return false;
1668
1669 /* ??? The following could be relaxed allowing component
1670 references that do not change the access size. */
1671 if (TREE_CODE (lhs) == MEM_REF
1672 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1673 {
1674 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1675
1676 /* A decl that is wrapped inside a MEM-REF that covers
1677 it full is also rewritable. */
1678 if (integer_zerop (TREE_OPERAND (lhs, 1))
1679 && DECL_P (decl)
1680 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1681 /* If the dynamic type of the decl has larger precision than
1682 the decl itself we can't use the decls type for SSA rewriting. */
1683 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1684 || compare_tree_int (DECL_SIZE (decl),
1685 TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1686 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1687 && (TYPE_PRECISION (TREE_TYPE (decl))
1688 >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1689 /* Make sure we are not re-writing non-float copying into float
1690 copying as that can incur normalization. */
1691 && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1692 || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1693 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1694 return false;
1695
1696 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1697 using a BIT_INSERT_EXPR. */
1698 if (DECL_P (decl)
1699 && VECTOR_TYPE_P (TREE_TYPE (decl))
1700 && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1701 && known_ge (mem_ref_offset (lhs), 0)
1702 && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1703 mem_ref_offset (lhs))
1704 && multiple_p (a: mem_ref_offset (lhs),
1705 b: wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (lhs))))
1706 && known_ge (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (decl))),
1707 wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (lhs)))))
1708 {
1709 poly_uint64 lhs_bits, nelts;
1710 if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs)), value: &lhs_bits)
1711 && multiple_p (a: lhs_bits,
1712 b: tree_to_uhwi
1713 (TYPE_SIZE (TREE_TYPE (TREE_TYPE (decl)))),
1714 multiple: &nelts)
1715 && valid_vector_subparts_p (subparts: nelts))
1716 {
1717 if (known_eq (nelts, 1u))
1718 return false;
1719 /* For sub-vector inserts the insert vector mode has to be
1720 supported. */
1721 tree vtype = build_vector_type (TREE_TYPE (TREE_TYPE (decl)),
1722 nelts);
1723 if (TYPE_MODE (vtype) != BLKmode)
1724 return false;
1725 }
1726 }
1727 }
1728
1729 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1730 BIT_INSERT_EXPR. */
1731 if (TREE_CODE (lhs) == BIT_FIELD_REF
1732 && DECL_P (TREE_OPERAND (lhs, 0))
1733 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1734 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1735 && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1736 TYPE_SIZE_UNIT
1737 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0)))), flags: 0)
1738 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1739 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1740 return false;
1741
1742 return true;
1743}
1744
1745/* When possible, clear TREE_ADDRESSABLE bit, set or clear DECL_NOT_GIMPLE_REG_P
1746 and mark the variable VAR for conversion into SSA. Return true when updating
1747 stmts is required. */
1748
1749static void
1750maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1751 bitmap suitable_for_renaming)
1752{
1753 /* Global Variables, result decls cannot be changed. */
1754 if (is_global_var (t: var)
1755 || TREE_CODE (var) == RESULT_DECL
1756 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1757 return;
1758
1759 bool maybe_reg = false;
1760 if (TREE_ADDRESSABLE (var))
1761 {
1762 TREE_ADDRESSABLE (var) = 0;
1763 maybe_reg = true;
1764 if (dump_file)
1765 {
1766 fprintf (stream: dump_file, format: "No longer having address taken: ");
1767 print_generic_expr (dump_file, var);
1768 fprintf (stream: dump_file, format: "\n");
1769 }
1770 }
1771
1772 /* For register type decls if we do not have any partial defs
1773 we cannot express in SSA form mark them as DECL_NOT_GIMPLE_REG_P
1774 as to avoid SSA rewrite. For the others go ahead and mark
1775 them for renaming. */
1776 if (is_gimple_reg_type (TREE_TYPE (var)))
1777 {
1778 if (bitmap_bit_p (not_reg_needs, DECL_UID (var)))
1779 {
1780 DECL_NOT_GIMPLE_REG_P (var) = 1;
1781 if (dump_file)
1782 {
1783 fprintf (stream: dump_file, format: "Has partial defs: ");
1784 print_generic_expr (dump_file, var);
1785 fprintf (stream: dump_file, format: "\n");
1786 }
1787 }
1788 else if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
1789 && (cfun->curr_properties & PROP_gimple_lbitint) != 0
1790 && TYPE_PRECISION (TREE_TYPE (var)) > MAX_FIXED_MODE_SIZE)
1791 {
1792 /* Don't rewrite large/huge _BitInt vars after _BitInt lowering
1793 into SSA form. */
1794 DECL_NOT_GIMPLE_REG_P (var) = 1;
1795 if (dump_file)
1796 {
1797 fprintf (stream: dump_file, format: "_BitInt var after its lowering: ");
1798 print_generic_expr (dump_file, var);
1799 fprintf (stream: dump_file, format: "\n");
1800 }
1801 }
1802 else if (DECL_NOT_GIMPLE_REG_P (var))
1803 {
1804 maybe_reg = true;
1805 DECL_NOT_GIMPLE_REG_P (var) = 0;
1806 }
1807 if (maybe_reg)
1808 {
1809 if (is_gimple_reg (var))
1810 {
1811 if (dump_file)
1812 {
1813 fprintf (stream: dump_file, format: "Now a gimple register: ");
1814 print_generic_expr (dump_file, var);
1815 fprintf (stream: dump_file, format: "\n");
1816 }
1817 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1818 }
1819 else
1820 DECL_NOT_GIMPLE_REG_P (var) = 1;
1821 }
1822 }
1823}
1824
1825/* Return true when STMT is ASAN mark where second argument is an address
1826 of a local variable. */
1827
1828static bool
1829is_asan_mark_p (gimple *stmt)
1830{
1831 if (!gimple_call_internal_p (gs: stmt, fn: IFN_ASAN_MARK))
1832 return false;
1833
1834 tree addr = get_base_address (t: gimple_call_arg (gs: stmt, index: 1));
1835 if (TREE_CODE (addr) == ADDR_EXPR
1836 && VAR_P (TREE_OPERAND (addr, 0)))
1837 {
1838 tree var = TREE_OPERAND (addr, 0);
1839 if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1840 DECL_ATTRIBUTES (var)))
1841 return false;
1842
1843 unsigned addressable = TREE_ADDRESSABLE (var);
1844 TREE_ADDRESSABLE (var) = 0;
1845 bool r = is_gimple_reg (var);
1846 TREE_ADDRESSABLE (var) = addressable;
1847 return r;
1848 }
1849
1850 return false;
1851}
1852
1853/* Compute TREE_ADDRESSABLE and whether we have unhandled partial defs
1854 for local variables. */
1855
1856void
1857execute_update_addresses_taken (void)
1858{
1859 basic_block bb;
1860 auto_bitmap addresses_taken;
1861 auto_bitmap not_reg_needs;
1862 auto_bitmap suitable_for_renaming;
1863 bool optimistic_not_addressable = false;
1864 tree var;
1865 unsigned i;
1866
1867 timevar_push (tv: TV_ADDRESS_TAKEN);
1868
1869 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1870 the function body. */
1871 FOR_EACH_BB_FN (bb, cfun)
1872 {
1873 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi);
1874 gsi_next (i: &gsi))
1875 {
1876 gimple *stmt = gsi_stmt (i: gsi);
1877 enum gimple_code code = gimple_code (g: stmt);
1878 tree decl;
1879
1880 if (code == GIMPLE_CALL)
1881 {
1882 if (optimize_atomic_compare_exchange_p (stmt))
1883 {
1884 /* For __atomic_compare_exchange_N if the second argument
1885 is &var, don't mark var addressable;
1886 if it becomes non-addressable, we'll rewrite it into
1887 ATOMIC_COMPARE_EXCHANGE call. */
1888 tree arg = gimple_call_arg (gs: stmt, index: 1);
1889 gimple_call_set_arg (gs: stmt, index: 1, null_pointer_node);
1890 gimple_ior_addresses_taken (addresses_taken, stmt);
1891 gimple_call_set_arg (gs: stmt, index: 1, arg);
1892 /* Remember we have to check again below. */
1893 optimistic_not_addressable = true;
1894 }
1895 else if (is_asan_mark_p (stmt)
1896 || gimple_call_internal_p (gs: stmt, fn: IFN_GOMP_SIMT_ENTER))
1897 ;
1898 else
1899 gimple_ior_addresses_taken (addresses_taken, stmt);
1900 }
1901 else
1902 /* Note all addresses taken by the stmt. */
1903 gimple_ior_addresses_taken (addresses_taken, stmt);
1904
1905 /* If we have a call or an assignment, see if the lhs contains
1906 a local decl that requires not to be a gimple register. */
1907 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1908 {
1909 tree lhs = gimple_get_lhs (stmt);
1910 if (lhs
1911 && TREE_CODE (lhs) != SSA_NAME
1912 && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1913 || non_rewritable_lvalue_p (lhs)))
1914 {
1915 decl = get_base_address (t: lhs);
1916 if (DECL_P (decl))
1917 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1918 }
1919 }
1920
1921 if (gimple_assign_single_p (gs: stmt))
1922 {
1923 tree rhs = gimple_assign_rhs1 (gs: stmt);
1924 if ((decl = non_rewritable_mem_ref_base (ref: rhs)))
1925 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1926 }
1927
1928 else if (code == GIMPLE_CALL)
1929 {
1930 for (i = 0; i < gimple_call_num_args (gs: stmt); ++i)
1931 {
1932 tree arg = gimple_call_arg (gs: stmt, index: i);
1933 if ((decl = non_rewritable_mem_ref_base (ref: arg)))
1934 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1935 }
1936 }
1937
1938 else if (code == GIMPLE_ASM)
1939 {
1940 gasm *asm_stmt = as_a <gasm *> (p: stmt);
1941 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1942 {
1943 tree link = gimple_asm_output_op (asm_stmt, index: i);
1944 tree lhs = TREE_VALUE (link);
1945 if (TREE_CODE (lhs) != SSA_NAME)
1946 {
1947 decl = get_base_address (t: lhs);
1948 if (DECL_P (decl)
1949 && (non_rewritable_lvalue_p (lhs)
1950 /* We cannot move required conversions from
1951 the lhs to the rhs in asm statements, so
1952 require we do not need any. */
1953 || !useless_type_conversion_p
1954 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1955 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1956 }
1957 }
1958 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1959 {
1960 tree link = gimple_asm_input_op (asm_stmt, index: i);
1961 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1962 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1963 }
1964 }
1965 }
1966
1967 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi);
1968 gsi_next (i: &gsi))
1969 {
1970 size_t i;
1971 gphi *phi = gsi.phi ();
1972
1973 for (i = 0; i < gimple_phi_num_args (gs: phi); i++)
1974 {
1975 tree op = PHI_ARG_DEF (phi, i), var;
1976 if (TREE_CODE (op) == ADDR_EXPR
1977 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1978 && DECL_P (var))
1979 bitmap_set_bit (addresses_taken, DECL_UID (var));
1980 }
1981 }
1982 }
1983
1984 /* We cannot iterate over all referenced vars because that can contain
1985 unused vars from BLOCK trees, which causes code generation differences
1986 for -g vs. -g0. */
1987 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1988 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1989 suitable_for_renaming);
1990
1991 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1992 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1993 suitable_for_renaming);
1994
1995 /* Operand caches need to be recomputed for operands referencing the updated
1996 variables and operands need to be rewritten to expose bare symbols. */
1997 if (!bitmap_empty_p (map: suitable_for_renaming)
1998 || optimistic_not_addressable)
1999 {
2000 FOR_EACH_BB_FN (bb, cfun)
2001 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi);)
2002 {
2003 gimple *stmt = gsi_stmt (i: gsi);
2004
2005 /* Re-write TARGET_MEM_REFs of symbols we want to
2006 rewrite into SSA form. */
2007 if (gimple_assign_single_p (gs: stmt))
2008 {
2009 tree lhs = gimple_assign_lhs (gs: stmt);
2010 tree rhs, *rhsp = gimple_assign_rhs1_ptr (gs: stmt);
2011 tree sym;
2012
2013 /* Rewrite LHS IMAG/REALPART_EXPR similar to
2014 gimplify_modify_expr_complex_part. */
2015 if ((TREE_CODE (lhs) == IMAGPART_EXPR
2016 || TREE_CODE (lhs) == REALPART_EXPR)
2017 && DECL_P (TREE_OPERAND (lhs, 0))
2018 && bitmap_bit_p (suitable_for_renaming,
2019 DECL_UID (TREE_OPERAND (lhs, 0))))
2020 {
2021 tree other = make_ssa_name (TREE_TYPE (lhs));
2022 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
2023 ? REALPART_EXPR : IMAGPART_EXPR,
2024 TREE_TYPE (other),
2025 TREE_OPERAND (lhs, 0));
2026 suppress_warning (lrhs);
2027 gimple *load = gimple_build_assign (other, lrhs);
2028 location_t loc = gimple_location (g: stmt);
2029 gimple_set_location (g: load, location: loc);
2030 gimple_set_vuse (g: load, vuse: gimple_vuse (g: stmt));
2031 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
2032 gimple_assign_set_lhs (gs: stmt, TREE_OPERAND (lhs, 0));
2033 gimple_assign_set_rhs_with_ops
2034 (&gsi, COMPLEX_EXPR,
2035 TREE_CODE (lhs) == IMAGPART_EXPR
2036 ? other : gimple_assign_rhs1 (gs: stmt),
2037 TREE_CODE (lhs) == IMAGPART_EXPR
2038 ? gimple_assign_rhs1 (gs: stmt) : other, NULL_TREE);
2039 stmt = gsi_stmt (i: gsi);
2040 unlink_stmt_vdef (stmt);
2041 update_stmt (s: stmt);
2042 continue;
2043 }
2044
2045 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
2046 into a BIT_INSERT_EXPR. */
2047 if (TREE_CODE (lhs) == BIT_FIELD_REF
2048 && DECL_P (TREE_OPERAND (lhs, 0))
2049 && bitmap_bit_p (suitable_for_renaming,
2050 DECL_UID (TREE_OPERAND (lhs, 0)))
2051 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
2052 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
2053 && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
2054 TYPE_SIZE_UNIT (TREE_TYPE
2055 (TREE_TYPE (TREE_OPERAND (lhs, 0)))),
2056 flags: 0)
2057 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
2058 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
2059 {
2060 tree var = TREE_OPERAND (lhs, 0);
2061 tree val = gimple_assign_rhs1 (gs: stmt);
2062 if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var)),
2063 TREE_TYPE (val)))
2064 {
2065 tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (var)));
2066 gimple *pun
2067 = gimple_build_assign (tem,
2068 build1 (VIEW_CONVERT_EXPR,
2069 TREE_TYPE (tem), val));
2070 gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
2071 val = tem;
2072 }
2073 tree bitpos = TREE_OPERAND (lhs, 2);
2074 gimple_assign_set_lhs (gs: stmt, lhs: var);
2075 gimple_assign_set_rhs_with_ops
2076 (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
2077 stmt = gsi_stmt (i: gsi);
2078 unlink_stmt_vdef (stmt);
2079 update_stmt (s: stmt);
2080 continue;
2081 }
2082
2083 /* Rewrite a vector insert using a MEM_REF on the LHS
2084 into a BIT_INSERT_EXPR. */
2085 if (TREE_CODE (lhs) == MEM_REF
2086 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
2087 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
2088 && DECL_P (sym)
2089 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
2090 && VECTOR_TYPE_P (TREE_TYPE (sym))
2091 && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
2092 /* If it is a full replacement we can do better below. */
2093 && maybe_ne (a: wi::to_poly_offset
2094 (TYPE_SIZE_UNIT (TREE_TYPE (lhs))),
2095 b: wi::to_poly_offset
2096 (TYPE_SIZE_UNIT (TREE_TYPE (sym))))
2097 && known_ge (mem_ref_offset (lhs), 0)
2098 && known_gt (wi::to_poly_offset
2099 (TYPE_SIZE_UNIT (TREE_TYPE (sym))),
2100 mem_ref_offset (lhs))
2101 && multiple_p (a: mem_ref_offset (lhs),
2102 b: wi::to_poly_offset
2103 (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))))
2104 {
2105 tree val = gimple_assign_rhs1 (gs: stmt);
2106 if (! types_compatible_p (TREE_TYPE (val),
2107 TREE_TYPE (TREE_TYPE (sym))))
2108 {
2109 poly_uint64 lhs_bits, nelts;
2110 tree temtype = TREE_TYPE (TREE_TYPE (sym));
2111 if (poly_int_tree_p (TYPE_SIZE (TREE_TYPE (lhs)),
2112 value: &lhs_bits)
2113 && multiple_p (a: lhs_bits,
2114 b: tree_to_uhwi
2115 (TYPE_SIZE (TREE_TYPE
2116 (TREE_TYPE (sym)))),
2117 multiple: &nelts)
2118 && maybe_ne (a: nelts, b: 1u)
2119 && valid_vector_subparts_p (subparts: nelts))
2120 temtype = build_vector_type (temtype, nelts);
2121 tree tem = make_ssa_name (var: temtype);
2122 gimple *pun
2123 = gimple_build_assign (tem,
2124 build1 (VIEW_CONVERT_EXPR,
2125 TREE_TYPE (tem), val));
2126 gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
2127 val = tem;
2128 }
2129 tree bitpos
2130 = wide_int_to_tree (bitsizetype,
2131 cst: mem_ref_offset (lhs) * BITS_PER_UNIT);
2132 gimple_assign_set_lhs (gs: stmt, lhs: sym);
2133 gimple_assign_set_rhs_with_ops
2134 (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
2135 stmt = gsi_stmt (i: gsi);
2136 unlink_stmt_vdef (stmt);
2137 update_stmt (s: stmt);
2138 continue;
2139 }
2140
2141 /* We shouldn't have any fancy wrapping of
2142 component-refs on the LHS, but look through
2143 VIEW_CONVERT_EXPRs as that is easy. */
2144 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
2145 lhs = TREE_OPERAND (lhs, 0);
2146 if (TREE_CODE (lhs) == MEM_REF
2147 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
2148 && integer_zerop (TREE_OPERAND (lhs, 1))
2149 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
2150 && DECL_P (sym)
2151 && !TREE_ADDRESSABLE (sym)
2152 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
2153 lhs = sym;
2154 else
2155 lhs = gimple_assign_lhs (gs: stmt);
2156
2157 /* Rewrite the RHS and make sure the resulting assignment
2158 is validly typed. */
2159 maybe_rewrite_mem_ref_base (tp: rhsp, suitable_for_renaming);
2160 rhs = gimple_assign_rhs1 (gs: stmt);
2161 if (gimple_assign_lhs (gs: stmt) != lhs
2162 && !useless_type_conversion_p (TREE_TYPE (lhs),
2163 TREE_TYPE (rhs)))
2164 {
2165 if (gimple_clobber_p (s: stmt))
2166 {
2167 rhs = build_constructor (TREE_TYPE (lhs), NULL);
2168 TREE_THIS_VOLATILE (rhs) = 1;
2169 }
2170 else
2171 rhs = fold_build1 (VIEW_CONVERT_EXPR,
2172 TREE_TYPE (lhs), rhs);
2173 }
2174 if (gimple_assign_lhs (gs: stmt) != lhs)
2175 gimple_assign_set_lhs (gs: stmt, lhs);
2176
2177 if (gimple_assign_rhs1 (gs: stmt) != rhs)
2178 {
2179 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2180 gimple_assign_set_rhs_from_tree (&gsi, rhs);
2181 }
2182 }
2183
2184 else if (gimple_code (g: stmt) == GIMPLE_CALL)
2185 {
2186 unsigned i;
2187 if (optimize_atomic_compare_exchange_p (stmt))
2188 {
2189 tree expected = gimple_call_arg (gs: stmt, index: 1);
2190 tree decl = TREE_OPERAND (expected, 0);
2191 if (bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
2192 {
2193 fold_builtin_atomic_compare_exchange (&gsi);
2194 continue;
2195 }
2196 else if (!TREE_ADDRESSABLE (decl))
2197 /* If there are partial defs of the decl we may
2198 have cleared the addressable bit but set
2199 DECL_NOT_GIMPLE_REG_P. We have to restore
2200 TREE_ADDRESSABLE here. */
2201 TREE_ADDRESSABLE (decl) = 1;
2202 }
2203 else if (is_asan_mark_p (stmt))
2204 {
2205 tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
2206 if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
2207 {
2208 unlink_stmt_vdef (stmt);
2209 if (asan_mark_p (stmt, flag: ASAN_MARK_POISON))
2210 {
2211 gcall *call
2212 = gimple_build_call_internal (IFN_ASAN_POISON, 0);
2213 gimple_call_set_lhs (gs: call, lhs: var);
2214 gsi_replace (&gsi, call, true);
2215 }
2216 else
2217 {
2218 /* In ASAN_MARK (UNPOISON, &b, ...) the variable
2219 is uninitialized. Avoid dependencies on
2220 previous out of scope value. */
2221 tree clobber = build_clobber (TREE_TYPE (var));
2222 gimple *g = gimple_build_assign (var, clobber);
2223 gsi_replace (&gsi, g, true);
2224 }
2225 continue;
2226 }
2227 }
2228 else if (gimple_call_internal_p (gs: stmt, fn: IFN_GOMP_SIMT_ENTER))
2229 for (i = 1; i < gimple_call_num_args (gs: stmt); i++)
2230 {
2231 tree *argp = gimple_call_arg_ptr (gs: stmt, index: i);
2232 if (*argp == null_pointer_node)
2233 continue;
2234 gcc_assert (TREE_CODE (*argp) == ADDR_EXPR
2235 && VAR_P (TREE_OPERAND (*argp, 0)));
2236 tree var = TREE_OPERAND (*argp, 0);
2237 if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
2238 *argp = null_pointer_node;
2239 }
2240 for (i = 0; i < gimple_call_num_args (gs: stmt); ++i)
2241 {
2242 tree *argp = gimple_call_arg_ptr (gs: stmt, index: i);
2243 maybe_rewrite_mem_ref_base (tp: argp, suitable_for_renaming);
2244 }
2245 }
2246
2247 else if (gimple_code (g: stmt) == GIMPLE_ASM)
2248 {
2249 gasm *asm_stmt = as_a <gasm *> (p: stmt);
2250 unsigned i;
2251 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
2252 {
2253 tree link = gimple_asm_output_op (asm_stmt, index: i);
2254 maybe_rewrite_mem_ref_base (tp: &TREE_VALUE (link),
2255 suitable_for_renaming);
2256 }
2257 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
2258 {
2259 tree link = gimple_asm_input_op (asm_stmt, index: i);
2260 maybe_rewrite_mem_ref_base (tp: &TREE_VALUE (link),
2261 suitable_for_renaming);
2262 }
2263 }
2264
2265 else if (gimple_debug_bind_p (s: stmt)
2266 && gimple_debug_bind_has_value_p (dbg: stmt))
2267 {
2268 tree *valuep = gimple_debug_bind_get_value_ptr (dbg: stmt);
2269 tree decl;
2270 maybe_rewrite_mem_ref_base (tp: valuep, suitable_for_renaming);
2271 decl = non_rewritable_mem_ref_base (ref: *valuep);
2272 if (decl
2273 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
2274 gimple_debug_bind_reset_value (dbg: stmt);
2275 }
2276
2277 if (gimple_references_memory_p (stmt)
2278 || is_gimple_debug (gs: stmt))
2279 update_stmt (s: stmt);
2280
2281 gsi_next (i: &gsi);
2282 }
2283
2284 /* Update SSA form here, we are called as non-pass as well. */
2285 if (number_of_loops (cfun) > 1
2286 && loops_state_satisfies_p (flags: LOOP_CLOSED_SSA))
2287 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2288 else
2289 update_ssa (TODO_update_ssa);
2290 }
2291
2292 timevar_pop (tv: TV_ADDRESS_TAKEN);
2293}
2294
2295namespace {
2296
2297const pass_data pass_data_update_address_taken =
2298{
2299 .type: GIMPLE_PASS, /* type */
2300 .name: "addressables", /* name */
2301 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
2302 .tv_id: TV_ADDRESS_TAKEN, /* tv_id */
2303 PROP_ssa, /* properties_required */
2304 .properties_provided: 0, /* properties_provided */
2305 .properties_destroyed: 0, /* properties_destroyed */
2306 .todo_flags_start: 0, /* todo_flags_start */
2307 TODO_update_address_taken, /* todo_flags_finish */
2308};
2309
2310class pass_update_address_taken : public gimple_opt_pass
2311{
2312public:
2313 pass_update_address_taken (gcc::context *ctxt)
2314 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
2315 {}
2316
2317 /* opt_pass methods: */
2318
2319}; // class pass_update_address_taken
2320
2321} // anon namespace
2322
2323gimple_opt_pass *
2324make_pass_update_address_taken (gcc::context *ctxt)
2325{
2326 return new pass_update_address_taken (ctxt);
2327}
2328

source code of gcc/tree-ssa.cc