1/* Data flow functions for trees.
2 Copyright (C) 2001-2024 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "rtl.h"
26#include "tree.h"
27#include "gimple.h"
28#include "tree-pass.h"
29#include "ssa.h"
30#include "tree-pretty-print.h"
31#include "fold-const.h"
32#include "stor-layout.h"
33#include "langhooks.h"
34#include "gimple-iterator.h"
35#include "gimple-walk.h"
36#include "tree-dfa.h"
37#include "gimple-range.h"
38
39/* Build and maintain data flow information for trees. */
40
41/* Counters used to display DFA and SSA statistics. */
42struct dfa_stats_d
43{
44 long num_defs;
45 long num_uses;
46 long num_phis;
47 long num_phi_args;
48 size_t max_num_phi_args;
49 long num_vdefs;
50 long num_vuses;
51};
52
53
54/* Local functions. */
55static void collect_dfa_stats (struct dfa_stats_d *);
56
57
58/*---------------------------------------------------------------------------
59 Dataflow analysis (DFA) routines
60---------------------------------------------------------------------------*/
61
62/* Renumber the gimple stmt uids in one block. The caller is responsible
63 of calling set_gimple_stmt_max_uid (fun, 0) at some point. */
64
65void
66renumber_gimple_stmt_uids_in_block (struct function *fun, basic_block bb)
67{
68 gimple_stmt_iterator bsi;
69 for (bsi = gsi_start_phis (bb); !gsi_end_p (i: bsi); gsi_next (i: &bsi))
70 {
71 gimple *stmt = gsi_stmt (i: bsi);
72 gimple_set_uid (g: stmt, uid: inc_gimple_stmt_max_uid (fn: fun));
73 }
74 for (bsi = gsi_start_bb (bb); !gsi_end_p (i: bsi); gsi_next (i: &bsi))
75 {
76 gimple *stmt = gsi_stmt (i: bsi);
77 gimple_set_uid (g: stmt, uid: inc_gimple_stmt_max_uid (fn: fun));
78 }
79}
80
81/* Renumber all of the gimple stmt uids. */
82
83void
84renumber_gimple_stmt_uids (struct function *fun)
85{
86 basic_block bb;
87
88 set_gimple_stmt_max_uid (fn: fun, maxid: 0);
89 FOR_ALL_BB_FN (bb, fun)
90 renumber_gimple_stmt_uids_in_block (fun, bb);
91}
92
93/* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
94 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
95
96void
97renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
98{
99 int i;
100
101 set_gimple_stmt_max_uid (cfun, maxid: 0);
102 for (i = 0; i < n_blocks; i++)
103 renumber_gimple_stmt_uids_in_block (cfun, bb: blocks[i]);
104}
105
106
107
108/*---------------------------------------------------------------------------
109 Debugging functions
110---------------------------------------------------------------------------*/
111
112/* Dump variable VAR and its may-aliases to FILE. */
113
114void
115dump_variable (FILE *file, tree var)
116{
117 if (TREE_CODE (var) == SSA_NAME)
118 {
119 if (POINTER_TYPE_P (TREE_TYPE (var)))
120 dump_points_to_info_for (file, var);
121 var = SSA_NAME_VAR (var);
122 }
123
124 if (var == NULL_TREE)
125 {
126 fprintf (stream: file, format: "<nil>");
127 return;
128 }
129
130 print_generic_expr (file, var, dump_flags);
131
132 fprintf (stream: file, format: ", UID D.%u", (unsigned) DECL_UID (var));
133 if (DECL_PT_UID (var) != DECL_UID (var))
134 fprintf (stream: file, format: ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
135
136 fprintf (stream: file, format: ", ");
137 print_generic_expr (file, TREE_TYPE (var), dump_flags);
138
139 if (TREE_ADDRESSABLE (var))
140 fprintf (stream: file, format: ", is addressable");
141
142 if (is_global_var (t: var))
143 fprintf (stream: file, format: ", is global");
144
145 if (TREE_THIS_VOLATILE (var))
146 fprintf (stream: file, format: ", is volatile");
147
148 if (cfun && ssa_default_def (cfun, var))
149 {
150 fprintf (stream: file, format: ", default def: ");
151 print_generic_expr (file, ssa_default_def (cfun, var), dump_flags);
152 }
153
154 if (DECL_INITIAL (var))
155 {
156 fprintf (stream: file, format: ", initial: ");
157 print_generic_expr (file, DECL_INITIAL (var), dump_flags);
158 }
159
160 fprintf (stream: file, format: "\n");
161}
162
163
164/* Dump variable VAR and its may-aliases to stderr. */
165
166DEBUG_FUNCTION void
167debug_variable (tree var)
168{
169 dump_variable (stderr, var);
170}
171
172
173/* Dump various DFA statistics to FILE. */
174
175void
176dump_dfa_stats (FILE *file)
177{
178 struct dfa_stats_d dfa_stats;
179
180 unsigned long size, total = 0;
181 const char * const fmt_str = "%-30s%-13s%12s\n";
182 const char * const fmt_str_1 = "%-30s%13lu" PRsa (11) "\n";
183 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
184 const char *funcname
185 = lang_hooks.decl_printable_name (current_function_decl, 2);
186
187 collect_dfa_stats (&dfa_stats);
188
189 fprintf (stream: file, format: "\nDFA Statistics for %s\n\n", funcname);
190
191 fprintf (stream: file, format: "---------------------------------------------------------\n");
192 fprintf (stream: file, format: fmt_str, "", " Number of ", "Memory");
193 fprintf (stream: file, format: fmt_str, "", " instances ", "used ");
194 fprintf (stream: file, format: "---------------------------------------------------------\n");
195
196 size = dfa_stats.num_uses * sizeof (tree *);
197 total += size;
198 fprintf (stream: file, format: fmt_str_1, "USE operands", dfa_stats.num_uses,
199 SIZE_AMOUNT (size));
200
201 size = dfa_stats.num_defs * sizeof (tree *);
202 total += size;
203 fprintf (stream: file, format: fmt_str_1, "DEF operands", dfa_stats.num_defs,
204 SIZE_AMOUNT (size));
205
206 size = dfa_stats.num_vuses * sizeof (tree *);
207 total += size;
208 fprintf (stream: file, format: fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
209 SIZE_AMOUNT (size));
210
211 size = dfa_stats.num_vdefs * sizeof (tree *);
212 total += size;
213 fprintf (stream: file, format: fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
214 SIZE_AMOUNT (size));
215
216 size = dfa_stats.num_phis * sizeof (struct gphi);
217 total += size;
218 fprintf (stream: file, format: fmt_str_1, "PHI nodes", dfa_stats.num_phis,
219 SIZE_AMOUNT (size));
220
221 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
222 total += size;
223 fprintf (stream: file, format: fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
224 SIZE_AMOUNT (size));
225
226 fprintf (stream: file, format: "---------------------------------------------------------\n");
227 fprintf (stream: file, format: fmt_str_3, "Total memory used by DFA/SSA data",
228 SIZE_AMOUNT (total));
229 fprintf (stream: file, format: "---------------------------------------------------------\n");
230 fprintf (stream: file, format: "\n");
231
232 if (dfa_stats.num_phis)
233 fprintf (stream: file, format: "Average number of arguments per PHI node: %.1f (max: "
234 HOST_SIZE_T_PRINT_DEC ")\n",
235 (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
236 (fmt_size_t) dfa_stats.max_num_phi_args);
237
238 fprintf (stream: file, format: "\n");
239}
240
241
242/* Dump DFA statistics on stderr. */
243
244DEBUG_FUNCTION void
245debug_dfa_stats (void)
246{
247 dump_dfa_stats (stderr);
248}
249
250
251/* Collect DFA statistics and store them in the structure pointed to by
252 DFA_STATS_P. */
253
254static void
255collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
256{
257 basic_block bb;
258
259 gcc_assert (dfa_stats_p);
260
261 memset (s: (void *)dfa_stats_p, c: 0, n: sizeof (struct dfa_stats_d));
262
263 /* Walk all the statements in the function counting references. */
264 FOR_EACH_BB_FN (bb, cfun)
265 {
266 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (i: si);
267 gsi_next (i: &si))
268 {
269 gphi *phi = si.phi ();
270 dfa_stats_p->num_phis++;
271 dfa_stats_p->num_phi_args += gimple_phi_num_args (gs: phi);
272 if (gimple_phi_num_args (gs: phi) > dfa_stats_p->max_num_phi_args)
273 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (gs: phi);
274 }
275
276 for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (i: si);
277 gsi_next (i: &si))
278 {
279 gimple *stmt = gsi_stmt (i: si);
280 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
281 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
282 dfa_stats_p->num_vdefs += gimple_vdef (g: stmt) ? 1 : 0;
283 dfa_stats_p->num_vuses += gimple_vuse (g: stmt) ? 1 : 0;
284 }
285 }
286}
287
288
289/*---------------------------------------------------------------------------
290 Miscellaneous helpers
291---------------------------------------------------------------------------*/
292
293/* Lookup VAR UID in the default_defs hashtable and return the associated
294 variable. */
295
296tree
297ssa_default_def (struct function *fn, tree var)
298{
299 struct tree_decl_minimal ind;
300 struct tree_ssa_name in;
301 gcc_assert (VAR_P (var)
302 || TREE_CODE (var) == PARM_DECL
303 || TREE_CODE (var) == RESULT_DECL);
304
305 /* Always NULL_TREE for rtl function dumps. */
306 if (!fn->gimple_df)
307 return NULL_TREE;
308
309 in.var = (tree)&ind;
310 ind.uid = DECL_UID (var);
311 return DEFAULT_DEFS (fn)->find_with_hash (comparable: (tree)&in, DECL_UID (var));
312}
313
314/* Insert the pair VAR's UID, DEF into the default_defs hashtable
315 of function FN. */
316
317void
318set_ssa_default_def (struct function *fn, tree var, tree def)
319{
320 struct tree_decl_minimal ind;
321 struct tree_ssa_name in;
322
323 gcc_assert (VAR_P (var)
324 || TREE_CODE (var) == PARM_DECL
325 || TREE_CODE (var) == RESULT_DECL);
326 in.var = (tree)&ind;
327 ind.uid = DECL_UID (var);
328 if (!def)
329 {
330 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash (comparable: (tree)&in,
331 DECL_UID (var),
332 insert: NO_INSERT);
333 if (loc)
334 {
335 SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false;
336 DEFAULT_DEFS (fn)->clear_slot (slot: loc);
337 }
338 return;
339 }
340 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
341 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash (comparable: (tree)&in,
342 DECL_UID (var), insert: INSERT);
343
344 /* Default definition might be changed by tail call optimization. */
345 if (*loc)
346 SSA_NAME_IS_DEFAULT_DEF (*loc) = false;
347
348 /* Mark DEF as the default definition for VAR. */
349 *loc = def;
350 SSA_NAME_IS_DEFAULT_DEF (def) = true;
351}
352
353/* Retrieve or create a default definition for VAR. */
354
355tree
356get_or_create_ssa_default_def (struct function *fn, tree var)
357{
358 tree ddef = ssa_default_def (fn, var);
359 if (ddef == NULL_TREE)
360 {
361 ddef = make_ssa_name_fn (fn, var, gimple_build_nop ());
362 set_ssa_default_def (fn, var, def: ddef);
363 }
364 return ddef;
365}
366
367
368/* If EXP is a handled component reference for a structure, return the
369 base variable. The access range is delimited by bit positions *POFFSET and
370 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
371 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
372 and *PMAX_SIZE are equal, the access is non-variable. If *PREVERSE is
373 true, the storage order of the reference is reversed. */
374
375tree
376get_ref_base_and_extent (tree exp, poly_int64 *poffset,
377 poly_int64 *psize,
378 poly_int64 *pmax_size,
379 bool *preverse)
380{
381 poly_offset_int bitsize = -1;
382 poly_offset_int maxsize;
383 tree size_tree = NULL_TREE;
384 poly_offset_int bit_offset = 0;
385 bool seen_variable_array_ref = false;
386
387 /* First get the final access size and the storage order from just the
388 outermost expression. */
389 if (TREE_CODE (exp) == COMPONENT_REF)
390 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
391 else if (TREE_CODE (exp) == BIT_FIELD_REF)
392 size_tree = TREE_OPERAND (exp, 1);
393 else if (TREE_CODE (exp) == WITH_SIZE_EXPR)
394 {
395 size_tree = TREE_OPERAND (exp, 1);
396 exp = TREE_OPERAND (exp, 0);
397 }
398 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
399 {
400 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
401 if (mode == BLKmode)
402 size_tree = TYPE_SIZE (TREE_TYPE (exp));
403 else
404 bitsize = GET_MODE_BITSIZE (mode);
405 }
406 if (size_tree != NULL_TREE
407 && poly_int_tree_p (t: size_tree))
408 bitsize = wi::to_poly_offset (t: size_tree);
409
410 *preverse = reverse_storage_order_for_component_p (t: exp);
411
412 /* Initially, maxsize is the same as the accessed element size.
413 In the following it will only grow (or become -1). */
414 maxsize = bitsize;
415
416 /* Compute cumulative bit-offset for nested component-refs and array-refs,
417 and find the ultimate containing object. */
418 while (1)
419 {
420 switch (TREE_CODE (exp))
421 {
422 case BIT_FIELD_REF:
423 bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
424 break;
425
426 case COMPONENT_REF:
427 {
428 tree field = TREE_OPERAND (exp, 1);
429 tree this_offset = component_ref_field_offset (exp);
430
431 if (this_offset && poly_int_tree_p (t: this_offset))
432 {
433 poly_offset_int woffset = (wi::to_poly_offset (t: this_offset)
434 << LOG2_BITS_PER_UNIT);
435 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
436 bit_offset += woffset;
437
438 /* If we had seen a variable array ref already and we just
439 referenced the last field of a struct or a union member
440 then we have to adjust maxsize by the padding at the end
441 of our field. */
442 if (seen_variable_array_ref)
443 {
444 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
445 tree next = DECL_CHAIN (field);
446 while (next && TREE_CODE (next) != FIELD_DECL)
447 next = DECL_CHAIN (next);
448 if (!next
449 || TREE_CODE (stype) != RECORD_TYPE)
450 {
451 tree fsize = DECL_SIZE (field);
452 tree ssize = TYPE_SIZE (stype);
453 if (fsize == NULL
454 || !poly_int_tree_p (t: fsize)
455 || ssize == NULL
456 || !poly_int_tree_p (t: ssize))
457 maxsize = -1;
458 else if (known_size_p (a: maxsize))
459 {
460 poly_offset_int tem
461 = (wi::to_poly_offset (t: ssize)
462 - wi::to_poly_offset (t: fsize));
463 tem -= woffset;
464 maxsize += tem;
465 }
466 }
467 /* An component ref with an adjacent field up in the
468 structure hierarchy constrains the size of any variable
469 array ref lower in the access hierarchy. */
470 else
471 seen_variable_array_ref = false;
472 }
473 }
474 else
475 {
476 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
477 /* We need to adjust maxsize to the whole structure bitsize.
478 But we can subtract any constant offset seen so far,
479 because that would get us out of the structure otherwise. */
480 if (known_size_p (a: maxsize)
481 && csize
482 && poly_int_tree_p (t: csize))
483 maxsize = wi::to_poly_offset (t: csize) - bit_offset;
484 else
485 maxsize = -1;
486 }
487 }
488 break;
489
490 case ARRAY_REF:
491 case ARRAY_RANGE_REF:
492 {
493 tree index = TREE_OPERAND (exp, 1);
494 tree low_bound, unit_size;
495
496 /* If the resulting bit-offset is constant, track it. */
497 if (poly_int_tree_p (t: index)
498 && (low_bound = array_ref_low_bound (exp),
499 poly_int_tree_p (t: low_bound))
500 && (unit_size = array_ref_element_size (exp),
501 TREE_CODE (unit_size) == INTEGER_CST))
502 {
503 poly_offset_int woffset
504 = wi::sext (a: wi::to_poly_offset (t: index)
505 - wi::to_poly_offset (t: low_bound),
506 TYPE_PRECISION (sizetype));
507 woffset *= wi::to_offset (t: unit_size);
508 woffset <<= LOG2_BITS_PER_UNIT;
509 bit_offset += woffset;
510
511 /* An array ref with a constant index up in the structure
512 hierarchy will constrain the size of any variable array ref
513 lower in the access hierarchy. */
514 seen_variable_array_ref = false;
515 }
516 else
517 {
518 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
519 /* We need to adjust maxsize to the whole array bitsize.
520 But we can subtract any constant offset seen so far,
521 because that would get us outside of the array otherwise. */
522 if (known_size_p (a: maxsize)
523 && asize
524 && poly_int_tree_p (t: asize))
525 maxsize = wi::to_poly_offset (t: asize) - bit_offset;
526 else
527 maxsize = -1;
528
529 /* Remember that we have seen an array ref with a variable
530 index. */
531 seen_variable_array_ref = true;
532
533 value_range vr;
534 range_query *query;
535 query = get_range_query (cfun);
536
537 if (TREE_CODE (index) == SSA_NAME
538 && (low_bound = array_ref_low_bound (exp),
539 poly_int_tree_p (t: low_bound))
540 && (unit_size = array_ref_element_size (exp),
541 TREE_CODE (unit_size) == INTEGER_CST)
542 && query->range_of_expr (r&: vr, expr: index)
543 && !vr.varying_p ()
544 && !vr.undefined_p ())
545 {
546 wide_int min = vr.lower_bound ();
547 wide_int max = vr.upper_bound ();
548 poly_offset_int lbound = wi::to_poly_offset (t: low_bound);
549 /* Try to constrain maxsize with range information. */
550 offset_int omax
551 = offset_int::from (x: max, TYPE_SIGN (TREE_TYPE (index)));
552 if (wi::get_precision (x: max) <= ADDR_MAX_BITSIZE
553 && known_lt (lbound, omax))
554 {
555 poly_offset_int rmaxsize;
556 rmaxsize = (omax - lbound + 1)
557 * wi::to_offset (t: unit_size) << LOG2_BITS_PER_UNIT;
558 if (!known_size_p (a: maxsize)
559 || known_lt (rmaxsize, maxsize))
560 {
561 /* If we know an upper bound below the declared
562 one this is no longer variable. */
563 if (known_size_p (a: maxsize))
564 seen_variable_array_ref = false;
565 maxsize = rmaxsize;
566 }
567 }
568 /* Try to adjust bit_offset with range information. */
569 offset_int omin
570 = offset_int::from (x: min, TYPE_SIGN (TREE_TYPE (index)));
571 if (wi::get_precision (x: min) <= ADDR_MAX_BITSIZE
572 && known_le (lbound, omin))
573 {
574 poly_offset_int woffset
575 = wi::sext (a: omin - lbound,
576 TYPE_PRECISION (sizetype));
577 woffset *= wi::to_offset (t: unit_size);
578 woffset <<= LOG2_BITS_PER_UNIT;
579 bit_offset += woffset;
580 if (known_size_p (a: maxsize))
581 maxsize -= woffset;
582 }
583 }
584 }
585 }
586 break;
587
588 case REALPART_EXPR:
589 break;
590
591 case IMAGPART_EXPR:
592 bit_offset += bitsize;
593 break;
594
595 case VIEW_CONVERT_EXPR:
596 break;
597
598 case TARGET_MEM_REF:
599 /* Via the variable index or index2 we can reach the
600 whole object. Still hand back the decl here. */
601 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
602 && (TMR_INDEX (exp) || TMR_INDEX2 (exp)))
603 {
604 exp = TREE_OPERAND (TMR_BASE (exp), 0);
605 bit_offset = 0;
606 maxsize = -1;
607 goto done;
608 }
609 /* Fallthru. */
610 case MEM_REF:
611 /* We need to deal with variable arrays ending structures such as
612 struct { int length; int a[1]; } x; x.a[d]
613 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
614 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
615 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
616 where we do not know maxsize for variable index accesses to
617 the array. The simplest way to conservatively deal with this
618 is to punt in the case that offset + maxsize reaches the
619 base type boundary. This needs to include possible trailing
620 padding that is there for alignment purposes. */
621 if (seen_variable_array_ref
622 && known_size_p (a: maxsize)
623 && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
624 || !poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
625 || (maybe_eq
626 (a: bit_offset + maxsize,
627 b: wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))))))
628 maxsize = -1;
629
630 /* Hand back the decl for MEM[&decl, off]. */
631 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
632 {
633 if (integer_zerop (TREE_OPERAND (exp, 1)))
634 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
635 else
636 {
637 poly_offset_int off = mem_ref_offset (exp);
638 off <<= LOG2_BITS_PER_UNIT;
639 off += bit_offset;
640 poly_int64 off_hwi;
641 if (off.to_shwi (r: &off_hwi))
642 {
643 bit_offset = off_hwi;
644 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
645 }
646 }
647 }
648 goto done;
649
650 default:
651 goto done;
652 }
653
654 exp = TREE_OPERAND (exp, 0);
655 }
656
657 done:
658 if (!bitsize.to_shwi (r: psize) || maybe_lt (a: *psize, b: 0))
659 {
660 *poffset = 0;
661 *psize = -1;
662 *pmax_size = -1;
663
664 return exp;
665 }
666
667 /* ??? Due to negative offsets in ARRAY_REF we can end up with
668 negative bit_offset here. We might want to store a zero offset
669 in this case. */
670 if (!bit_offset.to_shwi (r: poffset))
671 {
672 *poffset = 0;
673 *pmax_size = -1;
674
675 return exp;
676 }
677
678 /* In case of a decl or constant base object we can do better. */
679
680 if (DECL_P (exp))
681 {
682 if (VAR_P (exp)
683 && ((flag_unconstrained_commons && DECL_COMMON (exp))
684 || (DECL_EXTERNAL (exp) && seen_variable_array_ref)))
685 {
686 tree sz_tree = TYPE_SIZE (TREE_TYPE (exp));
687 /* If size is unknown, or we have read to the end, assume there
688 may be more to the structure than we are told. */
689 if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
690 || (seen_variable_array_ref
691 && (sz_tree == NULL_TREE
692 || !poly_int_tree_p (t: sz_tree)
693 || maybe_eq (a: bit_offset + maxsize,
694 b: wi::to_poly_offset (t: sz_tree)))))
695 maxsize = -1;
696 }
697 /* If maxsize is unknown adjust it according to the size of the
698 base decl. */
699 else if (!known_size_p (a: maxsize)
700 && DECL_SIZE (exp)
701 && poly_int_tree_p (DECL_SIZE (exp)))
702 maxsize = wi::to_poly_offset (DECL_SIZE (exp)) - bit_offset;
703 }
704 else if (CONSTANT_CLASS_P (exp))
705 {
706 /* If maxsize is unknown adjust it according to the size of the
707 base type constant. */
708 if (!known_size_p (a: maxsize)
709 && TYPE_SIZE (TREE_TYPE (exp))
710 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))))
711 maxsize = (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))
712 - bit_offset);
713 }
714
715 if (!maxsize.to_shwi (r: pmax_size)
716 || maybe_lt (a: *pmax_size, b: 0)
717 || !endpoint_representable_p (pos: *poffset, size: *pmax_size))
718 *pmax_size = -1;
719
720 /* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't
721 check for such overflows individually and assume it works. */
722 if (!endpoint_representable_p (pos: *poffset, size: *psize))
723 {
724 *poffset = 0;
725 *psize = -1;
726 *pmax_size = -1;
727
728 return exp;
729 }
730
731 return exp;
732}
733
734/* Like get_ref_base_and_extent, but for cases in which we only care
735 about constant-width accesses at constant offsets. Return null
736 if the access is anything else. */
737
738tree
739get_ref_base_and_extent_hwi (tree exp, HOST_WIDE_INT *poffset,
740 HOST_WIDE_INT *psize, bool *preverse)
741{
742 poly_int64 offset, size, max_size;
743 HOST_WIDE_INT const_offset, const_size;
744 bool reverse;
745 tree decl = get_ref_base_and_extent (exp, poffset: &offset, psize: &size, pmax_size: &max_size,
746 preverse: &reverse);
747 if (!offset.is_constant (const_value: &const_offset)
748 || !size.is_constant (const_value: &const_size)
749 || const_offset < 0
750 || !known_size_p (a: max_size)
751 || maybe_ne (a: max_size, b: const_size))
752 return NULL_TREE;
753
754 *poffset = const_offset;
755 *psize = const_size;
756 *preverse = reverse;
757 return decl;
758}
759
760/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
761 denotes the starting address of the memory access EXP.
762 Returns NULL_TREE if the offset is not constant or any component
763 is not BITS_PER_UNIT-aligned.
764 VALUEIZE if non-NULL is used to valueize SSA names. It should return
765 its argument or a constant if the argument is known to be constant. */
766
767tree
768get_addr_base_and_unit_offset_1 (tree exp, poly_int64 *poffset,
769 tree (*valueize) (tree))
770{
771 poly_int64 byte_offset = 0;
772
773 /* Compute cumulative byte-offset for nested component-refs and array-refs,
774 and find the ultimate containing object. */
775 while (1)
776 {
777 switch (TREE_CODE (exp))
778 {
779 case BIT_FIELD_REF:
780 {
781 poly_int64 this_byte_offset;
782 poly_uint64 this_bit_offset;
783 if (!poly_int_tree_p (TREE_OPERAND (exp, 2), value: &this_bit_offset)
784 || !multiple_p (a: this_bit_offset, BITS_PER_UNIT,
785 multiple: &this_byte_offset))
786 return NULL_TREE;
787 byte_offset += this_byte_offset;
788 }
789 break;
790
791 case COMPONENT_REF:
792 {
793 tree field = TREE_OPERAND (exp, 1);
794 tree this_offset = component_ref_field_offset (exp);
795 poly_int64 hthis_offset;
796
797 if (!this_offset
798 || !poly_int_tree_p (t: this_offset, value: &hthis_offset)
799 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
800 % BITS_PER_UNIT))
801 return NULL_TREE;
802
803 hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
804 / BITS_PER_UNIT);
805 byte_offset += hthis_offset;
806 }
807 break;
808
809 case ARRAY_REF:
810 case ARRAY_RANGE_REF:
811 {
812 tree index = TREE_OPERAND (exp, 1);
813 tree low_bound, unit_size;
814
815 if (valueize
816 && TREE_CODE (index) == SSA_NAME)
817 index = (*valueize) (index);
818 if (!poly_int_tree_p (t: index))
819 return NULL_TREE;
820 low_bound = array_ref_low_bound (exp);
821 if (valueize
822 && TREE_CODE (low_bound) == SSA_NAME)
823 low_bound = (*valueize) (low_bound);
824 if (!poly_int_tree_p (t: low_bound))
825 return NULL_TREE;
826 unit_size = array_ref_element_size (exp);
827 if (TREE_CODE (unit_size) != INTEGER_CST)
828 return NULL_TREE;
829
830 /* If the resulting bit-offset is constant, track it. */
831 poly_offset_int woffset
832 = wi::sext (a: wi::to_poly_offset (t: index)
833 - wi::to_poly_offset (t: low_bound),
834 TYPE_PRECISION (sizetype));
835 woffset *= wi::to_offset (t: unit_size);
836 byte_offset += woffset.force_shwi ();
837 }
838 break;
839
840 case REALPART_EXPR:
841 break;
842
843 case IMAGPART_EXPR:
844 byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
845 break;
846
847 case VIEW_CONVERT_EXPR:
848 break;
849
850 case MEM_REF:
851 {
852 tree base = TREE_OPERAND (exp, 0);
853 if (valueize
854 && TREE_CODE (base) == SSA_NAME)
855 base = (*valueize) (base);
856
857 /* Hand back the decl for MEM[&decl, off]. */
858 if (TREE_CODE (base) == ADDR_EXPR)
859 {
860 if (!integer_zerop (TREE_OPERAND (exp, 1)))
861 {
862 poly_offset_int off = mem_ref_offset (exp);
863 byte_offset += off.force_shwi ();
864 }
865 exp = TREE_OPERAND (base, 0);
866 }
867 goto done;
868 }
869
870 case TARGET_MEM_REF:
871 {
872 tree base = TREE_OPERAND (exp, 0);
873 if (valueize
874 && TREE_CODE (base) == SSA_NAME)
875 base = (*valueize) (base);
876
877 /* Hand back the decl for MEM[&decl, off]. */
878 if (TREE_CODE (base) == ADDR_EXPR)
879 {
880 if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
881 return NULL_TREE;
882 if (!integer_zerop (TMR_OFFSET (exp)))
883 {
884 poly_offset_int off = mem_ref_offset (exp);
885 byte_offset += off.force_shwi ();
886 }
887 exp = TREE_OPERAND (base, 0);
888 }
889 goto done;
890 }
891
892 default:
893 goto done;
894 }
895
896 exp = TREE_OPERAND (exp, 0);
897 }
898done:
899
900 *poffset = byte_offset;
901 return exp;
902}
903
904/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
905 denotes the starting address of the memory access EXP.
906 Returns NULL_TREE if the offset is not constant or any component
907 is not BITS_PER_UNIT-aligned. */
908
909tree
910get_addr_base_and_unit_offset (tree exp, poly_int64 *poffset)
911{
912 return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
913}
914
915/* Returns true if STMT references an SSA_NAME that has
916 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
917
918bool
919stmt_references_abnormal_ssa_name (gimple *stmt)
920{
921 ssa_op_iter oi;
922 use_operand_p use_p;
923
924 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
925 {
926 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
927 return true;
928 }
929
930 return false;
931}
932
933/* If STMT takes any abnormal PHI values as input, replace them with
934 local copies. */
935
936void
937replace_abnormal_ssa_names (gimple *stmt)
938{
939 ssa_op_iter oi;
940 use_operand_p use_p;
941
942 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
943 {
944 tree op = USE_FROM_PTR (use_p);
945 if (TREE_CODE (op) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op))
946 {
947 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
948 tree new_name = make_ssa_name (TREE_TYPE (op));
949 gassign *assign = gimple_build_assign (new_name, op);
950 gsi_insert_before (&gsi, assign, GSI_SAME_STMT);
951 SET_USE (use_p, new_name);
952 }
953 }
954}
955
956/* Pair of tree and a sorting index, for dump_enumerated_decls. */
957struct GTY(()) numbered_tree
958{
959 tree t;
960 int num;
961};
962
963
964/* Compare two declarations references by their DECL_UID / sequence number.
965 Called via qsort. */
966
967static int
968compare_decls_by_uid (const void *pa, const void *pb)
969{
970 const numbered_tree *nt_a = ((const numbered_tree *)pa);
971 const numbered_tree *nt_b = ((const numbered_tree *)pb);
972
973 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
974 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
975 return nt_a->num - nt_b->num;
976}
977
978/* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
979static tree
980dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
981{
982 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
983 vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info;
984 numbered_tree nt;
985
986 if (!DECL_P (*tp))
987 return NULL_TREE;
988 nt.t = *tp;
989 nt.num = list->length ();
990 list->safe_push (obj: nt);
991 *walk_subtrees = 0;
992 return NULL_TREE;
993}
994
995/* Find all the declarations used by the current function, sort them by uid,
996 and emit the sorted list. Each declaration is tagged with a sequence
997 number indicating when it was found during statement / tree walking,
998 so that TDF_NOUID comparisons of anonymous declarations are still
999 meaningful. Where a declaration was encountered more than once, we
1000 emit only the sequence number of the first encounter.
1001 FILE is the dump file where to output the list and FLAGS is as in
1002 print_generic_expr. */
1003void
1004dump_enumerated_decls (FILE *file, dump_flags_t flags)
1005{
1006 if (!cfun->cfg)
1007 return;
1008
1009 basic_block bb;
1010 struct walk_stmt_info wi;
1011 auto_vec<numbered_tree, 40> decl_list;
1012
1013 memset (s: &wi, c: '\0', n: sizeof (wi));
1014 wi.info = (void *) &decl_list;
1015 FOR_EACH_BB_FN (bb, cfun)
1016 {
1017 gimple_stmt_iterator gsi;
1018
1019 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
1020 if (!is_gimple_debug (gs: gsi_stmt (i: gsi)))
1021 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
1022 }
1023 decl_list.qsort (compare_decls_by_uid);
1024 if (decl_list.length ())
1025 {
1026 unsigned ix;
1027 numbered_tree *ntp;
1028 tree last = NULL_TREE;
1029
1030 fprintf (stream: file, format: "Declarations used by %s, sorted by DECL_UID:\n",
1031 current_function_name ());
1032 FOR_EACH_VEC_ELT (decl_list, ix, ntp)
1033 {
1034 if (ntp->t == last)
1035 continue;
1036 fprintf (stream: file, format: "%d: ", ntp->num);
1037 print_generic_decl (file, ntp->t, flags);
1038 fprintf (stream: file, format: "\n");
1039 last = ntp->t;
1040 }
1041 }
1042}
1043

source code of gcc/tree-dfa.cc