1/* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2024 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
11Free Software Foundation; either version 3, or (at your option) any
12later version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23/* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45#include "config.h"
46#include "system.h"
47#include "coretypes.h"
48#include "backend.h"
49#include "rtl.h"
50#include "tree.h"
51#include "gimple.h"
52#include "cfghooks.h"
53#include "tree-pass.h"
54#include "ssa.h"
55#include "gimple-pretty-print.h"
56#include "fold-const.h"
57#include "calls.h"
58#include "cfganal.h"
59#include "tree-eh.h"
60#include "gimplify.h"
61#include "gimple-iterator.h"
62#include "tree-cfg.h"
63#include "tree-ssa-loop-niter.h"
64#include "tree-into-ssa.h"
65#include "tree-dfa.h"
66#include "cfgloop.h"
67#include "tree-scalar-evolution.h"
68#include "tree-ssa-propagate.h"
69#include "gimple-fold.h"
70#include "tree-ssa.h"
71
72static struct stmt_stats
73{
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78} stats;
79
80#define STMT_NECESSARY GF_PLF_1
81
82static vec<gimple *> worklist;
83
84/* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86static sbitmap processed;
87
88/* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90static sbitmap last_stmt_necessary;
91
92/* Vector indicating that BB contains statements that are live. */
93static sbitmap bb_contains_live_stmts;
94
95/* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
97
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102static control_dependences *cd;
103
104/* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106static sbitmap visited_control_parents;
107
108/* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
110
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113static bool cfg_altered;
114
115/* When non-NULL holds map from basic block index into the postorder. */
116static int *bb_postorder;
117
118
119/* True if we should treat any stmt with a vdef as necessary. */
120
121static inline bool
122keep_all_vdefs_p ()
123{
124 return optimize_debug;
125}
126
127/* If STMT is not already marked necessary, mark it, and add it to the
128 worklist if ADD_TO_WORKLIST is true. */
129
130static inline void
131mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
132{
133 gcc_assert (stmt);
134
135 if (gimple_plf (stmt, STMT_NECESSARY))
136 return;
137
138 if (dump_file && (dump_flags & TDF_DETAILS))
139 {
140 fprintf (stream: dump_file, format: "Marking useful stmt: ");
141 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
142 fprintf (stream: dump_file, format: "\n");
143 }
144
145 gimple_set_plf (stmt, STMT_NECESSARY, val_p: true);
146 if (add_to_worklist)
147 worklist.safe_push (obj: stmt);
148 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (gs: stmt))
149 bitmap_set_bit (map: bb_contains_live_stmts, bitno: gimple_bb (g: stmt)->index);
150}
151
152
153/* Mark the statement defining operand OP as necessary. */
154
155static inline void
156mark_operand_necessary (tree op)
157{
158 gimple *stmt;
159 int ver;
160
161 gcc_assert (op);
162
163 ver = SSA_NAME_VERSION (op);
164 if (bitmap_bit_p (map: processed, bitno: ver))
165 {
166 stmt = SSA_NAME_DEF_STMT (op);
167 gcc_assert (gimple_nop_p (stmt)
168 || gimple_plf (stmt, STMT_NECESSARY));
169 return;
170 }
171 bitmap_set_bit (map: processed, bitno: ver);
172
173 stmt = SSA_NAME_DEF_STMT (op);
174 gcc_assert (stmt);
175
176 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (g: stmt))
177 return;
178
179 if (dump_file && (dump_flags & TDF_DETAILS))
180 {
181 fprintf (stream: dump_file, format: "marking necessary through ");
182 print_generic_expr (dump_file, op);
183 fprintf (stream: dump_file, format: " stmt ");
184 print_gimple_stmt (dump_file, stmt, 0);
185 }
186
187 gimple_set_plf (stmt, STMT_NECESSARY, val_p: true);
188 if (bb_contains_live_stmts)
189 bitmap_set_bit (map: bb_contains_live_stmts, bitno: gimple_bb (g: stmt)->index);
190 worklist.safe_push (obj: stmt);
191}
192
193
194/* Mark STMT as necessary if it obviously is. Add it to the worklist if
195 it can make other statements necessary.
196
197 If AGGRESSIVE is false, control statements are conservatively marked as
198 necessary. */
199
200static void
201mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
202{
203 /* Statements that are implicitly live. Most function calls, asm
204 and return statements are required. Labels and GIMPLE_BIND nodes
205 are kept because they are control flow, and we have no way of
206 knowing whether they can be removed. DCE can eliminate all the
207 other statements in a block, and CFG can then remove the block
208 and labels. */
209 switch (gimple_code (g: stmt))
210 {
211 case GIMPLE_PREDICT:
212 case GIMPLE_LABEL:
213 mark_stmt_necessary (stmt, add_to_worklist: false);
214 return;
215
216 case GIMPLE_ASM:
217 case GIMPLE_RESX:
218 case GIMPLE_RETURN:
219 mark_stmt_necessary (stmt, add_to_worklist: true);
220 return;
221
222 case GIMPLE_CALL:
223 {
224 /* Never elide a noreturn call we pruned control-flow for. */
225 if ((gimple_call_flags (stmt) & ECF_NORETURN)
226 && gimple_call_ctrl_altering_p (gs: stmt))
227 {
228 mark_stmt_necessary (stmt, add_to_worklist: true);
229 return;
230 }
231
232 tree callee = gimple_call_fndecl (gs: stmt);
233 if (callee != NULL_TREE
234 && fndecl_built_in_p (node: callee, klass: BUILT_IN_NORMAL))
235 switch (DECL_FUNCTION_CODE (decl: callee))
236 {
237 case BUILT_IN_MALLOC:
238 case BUILT_IN_ALIGNED_ALLOC:
239 case BUILT_IN_CALLOC:
240 CASE_BUILT_IN_ALLOCA:
241 case BUILT_IN_STRDUP:
242 case BUILT_IN_STRNDUP:
243 case BUILT_IN_GOMP_ALLOC:
244 return;
245
246 default:;
247 }
248
249 if (callee != NULL_TREE
250 && flag_allocation_dce
251 && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
252 return;
253
254 /* IFN_GOACC_LOOP calls are necessary in that they are used to
255 represent parameter (i.e. step, bound) of a lowered OpenACC
256 partitioned loop. But this kind of partitioned loop might not
257 survive from aggressive loop removal for it has loop exit and
258 is assumed to be finite. Therefore, we need to explicitly mark
259 these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
260 if (gimple_call_internal_p (gs: stmt, fn: IFN_GOACC_LOOP))
261 {
262 mark_stmt_necessary (stmt, add_to_worklist: true);
263 return;
264 }
265 break;
266 }
267
268 case GIMPLE_DEBUG:
269 /* Debug temps without a value are not useful. ??? If we could
270 easily locate the debug temp bind stmt for a use thereof,
271 would could refrain from marking all debug temps here, and
272 mark them only if they're used. */
273 if (gimple_debug_nonbind_marker_p (s: stmt)
274 || !gimple_debug_bind_p (s: stmt)
275 || gimple_debug_bind_has_value_p (dbg: stmt)
276 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
277 mark_stmt_necessary (stmt, add_to_worklist: false);
278 return;
279
280 case GIMPLE_GOTO:
281 gcc_assert (!simple_goto_p (stmt));
282 mark_stmt_necessary (stmt, add_to_worklist: true);
283 return;
284
285 case GIMPLE_COND:
286 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
287 /* Fall through. */
288
289 case GIMPLE_SWITCH:
290 if (! aggressive)
291 mark_stmt_necessary (stmt, add_to_worklist: true);
292 break;
293
294 case GIMPLE_ASSIGN:
295 /* Mark indirect CLOBBERs to be lazily removed if their SSA operands
296 do not prevail. That also makes control flow leading to them
297 not necessary in aggressive mode. */
298 if (gimple_clobber_p (s: stmt) && !zero_ssa_operands (stmt, SSA_OP_USE))
299 return;
300 break;
301
302 default:
303 break;
304 }
305
306 /* If the statement has volatile operands, it needs to be preserved.
307 Same for statements that can alter control flow in unpredictable
308 ways. */
309 if (gimple_has_side_effects (stmt) || is_ctrl_altering_stmt (stmt))
310 {
311 mark_stmt_necessary (stmt, add_to_worklist: true);
312 return;
313 }
314
315 /* If a statement could throw, it can be deemed necessary unless we
316 are allowed to remove dead EH. Test this after checking for
317 new/delete operators since we always elide their EH. */
318 if (!cfun->can_delete_dead_exceptions
319 && stmt_could_throw_p (cfun, stmt))
320 {
321 mark_stmt_necessary (stmt, add_to_worklist: true);
322 return;
323 }
324
325 if ((gimple_vdef (g: stmt) && keep_all_vdefs_p ())
326 || stmt_may_clobber_global_p (stmt, false))
327 {
328 mark_stmt_necessary (stmt, add_to_worklist: true);
329 return;
330 }
331
332 return;
333}
334
335
336/* Mark the last statement of BB as necessary. */
337
338static bool
339mark_last_stmt_necessary (basic_block bb)
340{
341 if (!bitmap_set_bit (map: last_stmt_necessary, bitno: bb->index))
342 return true;
343
344 bitmap_set_bit (map: bb_contains_live_stmts, bitno: bb->index);
345
346 /* We actually mark the statement only if it is a control statement. */
347 gimple *stmt = *gsi_last_bb (bb);
348 if (stmt && is_ctrl_stmt (stmt))
349 {
350 mark_stmt_necessary (stmt, add_to_worklist: true);
351 return true;
352 }
353 return false;
354}
355
356
357/* Mark control dependent edges of BB as necessary. We have to do this only
358 once for each basic block so we set the appropriate bit after we're done.
359
360 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
361
362static void
363mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
364{
365 bitmap_iterator bi;
366 unsigned edge_number;
367 bool skipped = false;
368
369 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
370
371 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
372 return;
373
374 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
375 0, edge_number, bi)
376 {
377 basic_block cd_bb = cd->get_edge_src (edge_number);
378
379 if (ignore_self && cd_bb == bb)
380 {
381 skipped = true;
382 continue;
383 }
384
385 if (!mark_last_stmt_necessary (bb: cd_bb))
386 mark_control_dependent_edges_necessary (bb: cd_bb, ignore_self: false);
387 }
388
389 if (!skipped)
390 bitmap_set_bit (map: visited_control_parents, bitno: bb->index);
391}
392
393
394/* Find obviously necessary statements. These are things like most function
395 calls, and stores to file level variables.
396
397 If EL is NULL, control statements are conservatively marked as
398 necessary. Otherwise it contains the list of edges used by control
399 dependence analysis. */
400
401static void
402find_obviously_necessary_stmts (bool aggressive)
403{
404 basic_block bb;
405 gimple_stmt_iterator gsi;
406 edge e;
407 gimple *phi, *stmt;
408 int flags;
409
410 FOR_EACH_BB_FN (bb, cfun)
411 {
412 /* PHI nodes are never inherently necessary. */
413 for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
414 {
415 phi = gsi_stmt (i: gsi);
416 gimple_set_plf (stmt: phi, STMT_NECESSARY, val_p: false);
417 }
418
419 /* Check all statements in the block. */
420 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
421 {
422 stmt = gsi_stmt (i: gsi);
423 gimple_set_plf (stmt, STMT_NECESSARY, val_p: false);
424 mark_stmt_if_obviously_necessary (stmt, aggressive);
425 }
426 }
427
428 /* Pure and const functions are finite and thus have no infinite loops in
429 them. */
430 flags = flags_from_decl_or_type (current_function_decl);
431 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
432 return;
433
434 /* Prevent the empty possibly infinite loops from being removed. This is
435 needed to make the logic in remove_dead_stmt work to identify the
436 correct edge to keep when removing a controlling condition. */
437 if (aggressive)
438 {
439 if (mark_irreducible_loops ())
440 FOR_EACH_BB_FN (bb, cfun)
441 {
442 edge_iterator ei;
443 FOR_EACH_EDGE (e, ei, bb->succs)
444 if ((e->flags & EDGE_DFS_BACK)
445 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
446 {
447 if (dump_file)
448 fprintf (stream: dump_file, format: "Marking back edge of irreducible "
449 "loop %i->%i\n", e->src->index, e->dest->index);
450 mark_control_dependent_edges_necessary (bb: e->dest, ignore_self: false);
451 }
452 }
453
454 for (auto loop : loops_list (cfun, 0))
455 /* For loops without an exit do not mark any condition. */
456 if (loop->exits->next->e && !finite_loop_p (loop))
457 {
458 if (dump_file)
459 fprintf (stream: dump_file, format: "cannot prove finiteness of loop %i\n",
460 loop->num);
461 mark_control_dependent_edges_necessary (bb: loop->latch, ignore_self: false);
462 }
463 }
464}
465
466
467/* Return true if REF is based on an aliased base, otherwise false. */
468
469static bool
470ref_may_be_aliased (tree ref)
471{
472 if (TREE_CODE (ref) == WITH_SIZE_EXPR)
473 ref = TREE_OPERAND (ref, 0);
474 while (handled_component_p (t: ref))
475 ref = TREE_OPERAND (ref, 0);
476 if ((TREE_CODE (ref) == MEM_REF || TREE_CODE (ref) == TARGET_MEM_REF)
477 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
478 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
479 return !(DECL_P (ref)
480 && !may_be_aliased (var: ref));
481}
482
483static bitmap visited = NULL;
484static unsigned int longest_chain = 0;
485static unsigned int total_chain = 0;
486static unsigned int nr_walks = 0;
487static bool chain_ovfl = false;
488
489/* Worker for the walker that marks reaching definitions of REF,
490 which is based on a non-aliased decl, necessary. It returns
491 true whenever the defining statement of the current VDEF is
492 a kill for REF, as no dominating may-defs are necessary for REF
493 anymore. DATA points to the basic-block that contains the
494 stmt that refers to REF. */
495
496static bool
497mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
498{
499 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
500
501 /* All stmts we visit are necessary. */
502 if (! gimple_clobber_p (s: def_stmt))
503 mark_operand_necessary (op: vdef);
504
505 /* If the stmt lhs kills ref, then we can stop walking. */
506 if (gimple_has_lhs (stmt: def_stmt)
507 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
508 /* The assignment is not necessarily carried out if it can throw
509 and we can catch it in the current function where we could inspect
510 the previous value.
511 ??? We only need to care about the RHS throwing. For aggregate
512 assignments or similar calls and non-call exceptions the LHS
513 might throw as well. */
514 && !stmt_can_throw_internal (cfun, def_stmt))
515 {
516 tree base, lhs = gimple_get_lhs (def_stmt);
517 poly_int64 size, offset, max_size;
518 bool reverse;
519 ao_ref_base (ref);
520 base
521 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
522 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
523 so base == refd->base does not always hold. */
524 if (base == ref->base)
525 {
526 /* For a must-alias check we need to be able to constrain
527 the accesses properly. */
528 if (known_eq (size, max_size)
529 && known_subrange_p (pos1: ref->offset, size1: ref->max_size, pos2: offset, size2: size))
530 return true;
531 /* Or they need to be exactly the same. */
532 else if (ref->ref
533 /* Make sure there is no induction variable involved
534 in the references (gcc.c-torture/execute/pr42142.c).
535 The simplest way is to check if the kill dominates
536 the use. */
537 /* But when both are in the same block we cannot
538 easily tell whether we came from a backedge
539 unless we decide to compute stmt UIDs
540 (see PR58246). */
541 && (basic_block) data != gimple_bb (g: def_stmt)
542 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
543 gimple_bb (g: def_stmt))
544 && operand_equal_p (ref->ref, lhs, flags: 0))
545 return true;
546 }
547 }
548
549 /* Otherwise keep walking. */
550 return false;
551}
552
553static void
554mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
555{
556 /* Should have been caught before calling this function. */
557 gcc_checking_assert (!keep_all_vdefs_p ());
558
559 unsigned int chain;
560 ao_ref refd;
561 gcc_assert (!chain_ovfl);
562 ao_ref_init (&refd, ref);
563 chain = walk_aliased_vdefs (&refd, gimple_vuse (g: stmt),
564 mark_aliased_reaching_defs_necessary_1,
565 gimple_bb (g: stmt), NULL);
566 if (chain > longest_chain)
567 longest_chain = chain;
568 total_chain += chain;
569 nr_walks++;
570}
571
572/* Worker for the walker that marks reaching definitions of REF, which
573 is not based on a non-aliased decl. For simplicity we need to end
574 up marking all may-defs necessary that are not based on a non-aliased
575 decl. The only job of this walker is to skip may-defs based on
576 a non-aliased decl. */
577
578static bool
579mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
580 tree vdef, void *data ATTRIBUTE_UNUSED)
581{
582 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
583
584 /* We have to skip already visited (and thus necessary) statements
585 to make the chaining work after we dropped back to simple mode. */
586 if (chain_ovfl
587 && bitmap_bit_p (map: processed, SSA_NAME_VERSION (vdef)))
588 {
589 gcc_assert (gimple_nop_p (def_stmt)
590 || gimple_plf (def_stmt, STMT_NECESSARY));
591 return false;
592 }
593
594 /* We want to skip stores to non-aliased variables. */
595 if (!chain_ovfl
596 && gimple_assign_single_p (gs: def_stmt))
597 {
598 tree lhs = gimple_assign_lhs (gs: def_stmt);
599 if (!ref_may_be_aliased (ref: lhs))
600 return false;
601 }
602
603 /* We want to skip statments that do not constitute stores but have
604 a virtual definition. */
605 if (gcall *call = dyn_cast <gcall *> (p: def_stmt))
606 {
607 tree callee = gimple_call_fndecl (gs: call);
608 if (callee != NULL_TREE
609 && fndecl_built_in_p (node: callee, klass: BUILT_IN_NORMAL))
610 switch (DECL_FUNCTION_CODE (decl: callee))
611 {
612 case BUILT_IN_MALLOC:
613 case BUILT_IN_ALIGNED_ALLOC:
614 case BUILT_IN_CALLOC:
615 CASE_BUILT_IN_ALLOCA:
616 case BUILT_IN_FREE:
617 case BUILT_IN_GOMP_ALLOC:
618 case BUILT_IN_GOMP_FREE:
619 return false;
620
621 default:;
622 }
623
624 if (callee != NULL_TREE
625 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
626 || DECL_IS_OPERATOR_DELETE_P (callee))
627 && gimple_call_from_new_or_delete (s: call))
628 return false;
629 }
630
631 if (! gimple_clobber_p (s: def_stmt))
632 mark_operand_necessary (op: vdef);
633
634 return false;
635}
636
637static void
638mark_all_reaching_defs_necessary (gimple *stmt)
639{
640 /* Should have been caught before calling this function. */
641 gcc_checking_assert (!keep_all_vdefs_p ());
642 walk_aliased_vdefs (NULL, gimple_vuse (g: stmt),
643 mark_all_reaching_defs_necessary_1, NULL, &visited);
644}
645
646/* Return true for PHI nodes with one or identical arguments
647 can be removed. */
648static bool
649degenerate_phi_p (gimple *phi)
650{
651 unsigned int i;
652 tree op = gimple_phi_arg_def (gs: phi, index: 0);
653 for (i = 1; i < gimple_phi_num_args (gs: phi); i++)
654 if (gimple_phi_arg_def (gs: phi, index: i) != op)
655 return false;
656 return true;
657}
658
659/* Return that NEW_CALL and DELETE_CALL are a valid pair of new
660 and delete operators. */
661
662static bool
663valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
664{
665 tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
666 tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
667 return valid_new_delete_pair_p (new_asm, delete_asm);
668}
669
670/* Propagate necessity using the operands of necessary statements.
671 Process the uses on each statement in the worklist, and add all
672 feeding statements which contribute to the calculation of this
673 value to the worklist.
674
675 In conservative mode, EL is NULL. */
676
677static void
678propagate_necessity (bool aggressive)
679{
680 gimple *stmt;
681
682 if (dump_file && (dump_flags & TDF_DETAILS))
683 fprintf (stream: dump_file, format: "\nProcessing worklist:\n");
684
685 while (worklist.length () > 0)
686 {
687 /* Take STMT from worklist. */
688 stmt = worklist.pop ();
689
690 if (dump_file && (dump_flags & TDF_DETAILS))
691 {
692 fprintf (stream: dump_file, format: "processing: ");
693 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
694 fprintf (stream: dump_file, format: "\n");
695 }
696
697 if (aggressive)
698 {
699 /* Mark the last statement of the basic blocks on which the block
700 containing STMT is control dependent, but only if we haven't
701 already done so. */
702 basic_block bb = gimple_bb (g: stmt);
703 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
704 && !bitmap_bit_p (map: visited_control_parents, bitno: bb->index))
705 mark_control_dependent_edges_necessary (bb, ignore_self: false);
706 }
707
708 if (gimple_code (g: stmt) == GIMPLE_PHI
709 /* We do not process virtual PHI nodes nor do we track their
710 necessity. */
711 && !virtual_operand_p (op: gimple_phi_result (gs: stmt)))
712 {
713 /* PHI nodes are somewhat special in that each PHI alternative has
714 data and control dependencies. All the statements feeding the
715 PHI node's arguments are always necessary. In aggressive mode,
716 we also consider the control dependent edges leading to the
717 predecessor block associated with each PHI alternative as
718 necessary. */
719 gphi *phi = as_a <gphi *> (p: stmt);
720 size_t k;
721
722 for (k = 0; k < gimple_phi_num_args (gs: stmt); k++)
723 {
724 tree arg = PHI_ARG_DEF (stmt, k);
725 if (TREE_CODE (arg) == SSA_NAME)
726 mark_operand_necessary (op: arg);
727 }
728
729 /* For PHI operands it matters from where the control flow arrives
730 to the BB. Consider the following example:
731
732 a=exp1;
733 b=exp2;
734 if (test)
735 ;
736 else
737 ;
738 c=PHI(a,b)
739
740 We need to mark control dependence of the empty basic blocks, since they
741 contains computation of PHI operands.
742
743 Doing so is too restrictive in the case the predecestor block is in
744 the loop. Consider:
745
746 if (b)
747 {
748 int i;
749 for (i = 0; i<1000; ++i)
750 ;
751 j = 0;
752 }
753 return j;
754
755 There is PHI for J in the BB containing return statement.
756 In this case the control dependence of predecestor block (that is
757 within the empty loop) also contains the block determining number
758 of iterations of the block that would prevent removing of empty
759 loop in this case.
760
761 This scenario can be avoided by splitting critical edges.
762 To save the critical edge splitting pass we identify how the control
763 dependence would look like if the edge was split.
764
765 Consider the modified CFG created from current CFG by splitting
766 edge B->C. In the postdominance tree of modified CFG, C' is
767 always child of C. There are two cases how chlids of C' can look
768 like:
769
770 1) C' is leaf
771
772 In this case the only basic block C' is control dependent on is B.
773
774 2) C' has single child that is B
775
776 In this case control dependence of C' is same as control
777 dependence of B in original CFG except for block B itself.
778 (since C' postdominate B in modified CFG)
779
780 Now how to decide what case happens? There are two basic options:
781
782 a) C postdominate B. Then C immediately postdominate B and
783 case 2 happens iff there is no other way from B to C except
784 the edge B->C.
785
786 There is other way from B to C iff there is succesor of B that
787 is not postdominated by B. Testing this condition is somewhat
788 expensive, because we need to iterate all succesors of B.
789 We are safe to assume that this does not happen: we will mark B
790 as needed when processing the other path from B to C that is
791 conrol dependent on B and marking control dependencies of B
792 itself is harmless because they will be processed anyway after
793 processing control statement in B.
794
795 b) C does not postdominate B. Always case 1 happens since there is
796 path from C to exit that does not go through B and thus also C'. */
797
798 if (aggressive && !degenerate_phi_p (phi: stmt))
799 {
800 for (k = 0; k < gimple_phi_num_args (gs: stmt); k++)
801 {
802 basic_block arg_bb = gimple_phi_arg_edge (phi, i: k)->src;
803
804 if (gimple_bb (g: stmt)
805 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
806 {
807 if (!mark_last_stmt_necessary (bb: arg_bb))
808 mark_control_dependent_edges_necessary (bb: arg_bb, ignore_self: false);
809 }
810 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
811 && !bitmap_bit_p (map: visited_control_parents,
812 bitno: arg_bb->index))
813 mark_control_dependent_edges_necessary (bb: arg_bb, ignore_self: true);
814 }
815 }
816 }
817 else
818 {
819 /* Propagate through the operands. Examine all the USE, VUSE and
820 VDEF operands in this statement. Mark all the statements
821 which feed this statement's uses as necessary. */
822 ssa_op_iter iter;
823 tree use;
824
825 /* If this is a call to free which is directly fed by an
826 allocation function do not mark that necessary through
827 processing the argument. */
828 bool is_delete_operator
829 = (is_gimple_call (gs: stmt)
830 && gimple_call_from_new_or_delete (s: as_a <gcall *> (p: stmt))
831 && gimple_call_operator_delete_p (as_a <gcall *> (p: stmt)));
832 if (is_delete_operator
833 || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
834 || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
835 {
836 tree ptr = gimple_call_arg (gs: stmt, index: 0);
837 gcall *def_stmt;
838 tree def_callee;
839 /* If the pointer we free is defined by an allocation
840 function do not add the call to the worklist. */
841 if (TREE_CODE (ptr) == SSA_NAME
842 && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
843 && (def_callee = gimple_call_fndecl (gs: def_stmt))
844 && ((DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
845 && (DECL_FUNCTION_CODE (decl: def_callee) == BUILT_IN_ALIGNED_ALLOC
846 || DECL_FUNCTION_CODE (decl: def_callee) == BUILT_IN_MALLOC
847 || DECL_FUNCTION_CODE (decl: def_callee) == BUILT_IN_CALLOC
848 || DECL_FUNCTION_CODE (decl: def_callee) == BUILT_IN_GOMP_ALLOC))
849 || (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee)
850 && gimple_call_from_new_or_delete (s: def_stmt))))
851 {
852 if (is_delete_operator
853 && !valid_new_delete_pair_p (new_call: def_stmt, delete_call: stmt))
854 mark_operand_necessary (op: gimple_call_arg (gs: stmt, index: 0));
855
856 /* Delete operators can have alignment and (or) size
857 as next arguments. When being a SSA_NAME, they
858 must be marked as necessary. Similarly GOMP_free. */
859 if (gimple_call_num_args (gs: stmt) >= 2)
860 for (unsigned i = 1; i < gimple_call_num_args (gs: stmt);
861 i++)
862 {
863 tree arg = gimple_call_arg (gs: stmt, index: i);
864 if (TREE_CODE (arg) == SSA_NAME)
865 mark_operand_necessary (op: arg);
866 }
867
868 continue;
869 }
870 }
871
872 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
873 mark_operand_necessary (op: use);
874
875 use = gimple_vuse (g: stmt);
876 if (!use)
877 continue;
878
879 /* No need to search for vdefs if we intrinsicly keep them all. */
880 if (keep_all_vdefs_p ())
881 continue;
882
883 /* If we dropped to simple mode make all immediately
884 reachable definitions necessary. */
885 if (chain_ovfl)
886 {
887 mark_all_reaching_defs_necessary (stmt);
888 continue;
889 }
890
891 /* For statements that may load from memory (have a VUSE) we
892 have to mark all reaching (may-)definitions as necessary.
893 We partition this task into two cases:
894 1) explicit loads based on decls that are not aliased
895 2) implicit loads (like calls) and explicit loads not
896 based on decls that are not aliased (like indirect
897 references or loads from globals)
898 For 1) we mark all reaching may-defs as necessary, stopping
899 at dominating kills. For 2) we want to mark all dominating
900 references necessary, but non-aliased ones which we handle
901 in 1). By keeping a global visited bitmap for references
902 we walk for 2) we avoid quadratic behavior for those. */
903
904 if (gcall *call = dyn_cast <gcall *> (p: stmt))
905 {
906 tree callee = gimple_call_fndecl (gs: call);
907 unsigned i;
908
909 /* Calls to functions that are merely acting as barriers
910 or that only store to memory do not make any previous
911 stores necessary. */
912 if (callee != NULL_TREE
913 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
914 && (DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_MEMSET
915 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_MEMSET_CHK
916 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_MALLOC
917 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_ALIGNED_ALLOC
918 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_CALLOC
919 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_FREE
920 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_VA_END
921 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
922 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_STACK_SAVE
923 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_STACK_RESTORE
924 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_ASSUME_ALIGNED))
925 continue;
926
927 if (callee != NULL_TREE
928 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
929 || DECL_IS_OPERATOR_DELETE_P (callee))
930 && gimple_call_from_new_or_delete (s: call))
931 continue;
932
933 /* Calls implicitly load from memory, their arguments
934 in addition may explicitly perform memory loads. */
935 mark_all_reaching_defs_necessary (stmt: call);
936 for (i = 0; i < gimple_call_num_args (gs: call); ++i)
937 {
938 tree arg = gimple_call_arg (gs: call, index: i);
939 if (TREE_CODE (arg) == SSA_NAME
940 || is_gimple_min_invariant (arg))
941 continue;
942 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
943 arg = TREE_OPERAND (arg, 0);
944 if (!ref_may_be_aliased (ref: arg))
945 mark_aliased_reaching_defs_necessary (stmt: call, ref: arg);
946 }
947 }
948 else if (gimple_assign_single_p (gs: stmt))
949 {
950 tree rhs;
951 /* If this is a load mark things necessary. */
952 rhs = gimple_assign_rhs1 (gs: stmt);
953 if (TREE_CODE (rhs) != SSA_NAME
954 && !is_gimple_min_invariant (rhs)
955 && TREE_CODE (rhs) != CONSTRUCTOR)
956 {
957 if (!ref_may_be_aliased (ref: rhs))
958 mark_aliased_reaching_defs_necessary (stmt, ref: rhs);
959 else
960 mark_all_reaching_defs_necessary (stmt);
961 }
962 }
963 else if (greturn *return_stmt = dyn_cast <greturn *> (p: stmt))
964 {
965 tree rhs = gimple_return_retval (gs: return_stmt);
966 /* A return statement may perform a load. */
967 if (rhs
968 && TREE_CODE (rhs) != SSA_NAME
969 && !is_gimple_min_invariant (rhs)
970 && TREE_CODE (rhs) != CONSTRUCTOR)
971 {
972 if (!ref_may_be_aliased (ref: rhs))
973 mark_aliased_reaching_defs_necessary (stmt, ref: rhs);
974 else
975 mark_all_reaching_defs_necessary (stmt);
976 }
977 }
978 else if (gasm *asm_stmt = dyn_cast <gasm *> (p: stmt))
979 {
980 unsigned i;
981 mark_all_reaching_defs_necessary (stmt);
982 /* Inputs may perform loads. */
983 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
984 {
985 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
986 if (TREE_CODE (op) != SSA_NAME
987 && !is_gimple_min_invariant (op)
988 && TREE_CODE (op) != CONSTRUCTOR
989 && !ref_may_be_aliased (ref: op))
990 mark_aliased_reaching_defs_necessary (stmt, ref: op);
991 }
992 }
993 else if (gimple_code (g: stmt) == GIMPLE_TRANSACTION)
994 {
995 /* The beginning of a transaction is a memory barrier. */
996 /* ??? If we were really cool, we'd only be a barrier
997 for the memories touched within the transaction. */
998 mark_all_reaching_defs_necessary (stmt);
999 }
1000 else
1001 gcc_unreachable ();
1002
1003 /* If we over-used our alias oracle budget drop to simple
1004 mode. The cost metric allows quadratic behavior
1005 (number of uses times number of may-defs queries) up to
1006 a constant maximal number of queries and after that falls back to
1007 super-linear complexity. */
1008 if (/* Constant but quadratic for small functions. */
1009 total_chain > 128 * 128
1010 /* Linear in the number of may-defs. */
1011 && total_chain > 32 * longest_chain
1012 /* Linear in the number of uses. */
1013 && total_chain > nr_walks * 32)
1014 {
1015 chain_ovfl = true;
1016 if (visited)
1017 bitmap_clear (visited);
1018 }
1019 }
1020 }
1021}
1022
1023/* Remove dead PHI nodes from block BB. */
1024
1025static bool
1026remove_dead_phis (basic_block bb)
1027{
1028 bool something_changed = false;
1029 gphi *phi;
1030 gphi_iterator gsi;
1031
1032 for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi);)
1033 {
1034 stats.total_phis++;
1035 phi = gsi.phi ();
1036
1037 /* We do not track necessity of virtual PHI nodes. Instead do
1038 very simple dead PHI removal here. */
1039 if (virtual_operand_p (op: gimple_phi_result (gs: phi)))
1040 {
1041 /* Virtual PHI nodes with one or identical arguments
1042 can be removed. */
1043 if (!loops_state_satisfies_p (flags: LOOP_CLOSED_SSA)
1044 && degenerate_phi_p (phi))
1045 {
1046 tree vdef = gimple_phi_result (gs: phi);
1047 tree vuse = gimple_phi_arg_def (gs: phi, index: 0);
1048
1049 use_operand_p use_p;
1050 imm_use_iterator iter;
1051 gimple *use_stmt;
1052 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1053 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1054 SET_USE (use_p, vuse);
1055 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1056 && TREE_CODE (vuse) == SSA_NAME)
1057 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1058 }
1059 else
1060 gimple_set_plf (stmt: phi, STMT_NECESSARY, val_p: true);
1061 }
1062
1063 if (!gimple_plf (stmt: phi, STMT_NECESSARY))
1064 {
1065 something_changed = true;
1066 if (dump_file && (dump_flags & TDF_DETAILS))
1067 {
1068 fprintf (stream: dump_file, format: "Deleting : ");
1069 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1070 fprintf (stream: dump_file, format: "\n");
1071 }
1072
1073 remove_phi_node (&gsi, true);
1074 stats.removed_phis++;
1075 continue;
1076 }
1077
1078 gsi_next (i: &gsi);
1079 }
1080 return something_changed;
1081}
1082
1083
1084/* Remove dead statement pointed to by iterator I. Receives the basic block BB
1085 containing I so that we don't have to look it up. */
1086
1087static void
1088remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1089 vec<edge> &to_remove_edges)
1090{
1091 gimple *stmt = gsi_stmt (i: *i);
1092
1093 if (dump_file && (dump_flags & TDF_DETAILS))
1094 {
1095 fprintf (stream: dump_file, format: "Deleting : ");
1096 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1097 fprintf (stream: dump_file, format: "\n");
1098 }
1099
1100 stats.removed++;
1101
1102 /* If we have determined that a conditional branch statement contributes
1103 nothing to the program, then we not only remove it, but we need to update
1104 the CFG. We can chose any of edges out of BB as long as we are sure to not
1105 close infinite loops. This is done by always choosing the edge closer to
1106 exit in inverted_rev_post_order_compute order. */
1107 if (is_ctrl_stmt (stmt))
1108 {
1109 edge_iterator ei;
1110 edge e = NULL, e2;
1111
1112 /* See if there is only one non-abnormal edge. */
1113 if (single_succ_p (bb))
1114 e = single_succ_edge (bb);
1115 /* Otherwise chose one that is closer to bb with live statement in it.
1116 To be able to chose one, we compute inverted post order starting from
1117 all BBs with live statements. */
1118 if (!e)
1119 {
1120 if (!bb_postorder)
1121 {
1122 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
1123 int n = inverted_rev_post_order_compute (cfun, rpo,
1124 start_points: &bb_contains_live_stmts);
1125 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1126 for (int i = 0; i < n; ++i)
1127 bb_postorder[rpo[i]] = i;
1128 free (ptr: rpo);
1129 }
1130 FOR_EACH_EDGE (e2, ei, bb->succs)
1131 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1132 || bb_postorder [e->dest->index]
1133 >= bb_postorder [e2->dest->index])
1134 e = e2;
1135 }
1136 gcc_assert (e);
1137 e->probability = profile_probability::always ();
1138
1139 /* The edge is no longer associated with a conditional, so it does
1140 not have TRUE/FALSE flags.
1141 We are also safe to drop EH/ABNORMAL flags and turn them into
1142 normal control flow, because we know that all the destinations (including
1143 those odd edges) are equivalent for program execution. */
1144 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1145
1146 /* The lone outgoing edge from BB will be a fallthru edge. */
1147 e->flags |= EDGE_FALLTHRU;
1148
1149 /* Remove the remaining outgoing edges. */
1150 FOR_EACH_EDGE (e2, ei, bb->succs)
1151 if (e != e2)
1152 {
1153 /* If we made a BB unconditionally exit a loop or removed
1154 an entry into an irreducible region, then this transform
1155 alters the set of BBs in the loop. Schedule a fixup. */
1156 if (loop_exit_edge_p (bb->loop_father, e)
1157 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1158 loops_state_set (flags: LOOPS_NEED_FIXUP);
1159 to_remove_edges.safe_push (obj: e2);
1160 }
1161 }
1162
1163 /* If this is a store into a variable that is being optimized away,
1164 add a debug bind stmt if possible. */
1165 if (MAY_HAVE_DEBUG_BIND_STMTS
1166 && gimple_assign_single_p (gs: stmt)
1167 && is_gimple_val (gimple_assign_rhs1 (gs: stmt)))
1168 {
1169 tree lhs = gimple_assign_lhs (gs: stmt);
1170 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1171 && !DECL_IGNORED_P (lhs)
1172 && is_gimple_reg_type (TREE_TYPE (lhs))
1173 && !is_global_var (t: lhs)
1174 && !DECL_HAS_VALUE_EXPR_P (lhs))
1175 {
1176 tree rhs = gimple_assign_rhs1 (gs: stmt);
1177 gdebug *note
1178 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1179 gsi_insert_after (i, note, GSI_SAME_STMT);
1180 }
1181 }
1182
1183 unlink_stmt_vdef (stmt);
1184 gsi_remove (i, true);
1185 release_defs (stmt);
1186}
1187
1188/* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1189 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1190
1191static tree
1192find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1193{
1194 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1195 *walk_subtrees = 0;
1196 if (*tp == (tree) data)
1197 return *tp;
1198 return NULL_TREE;
1199}
1200
1201/* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1202 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1203 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1204 uses. */
1205
1206static void
1207maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1208 enum tree_code subcode)
1209{
1210 gimple *stmt = gsi_stmt (i: *gsi);
1211 tree lhs = gimple_call_lhs (gs: stmt);
1212
1213 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1214 return;
1215
1216 imm_use_iterator imm_iter;
1217 use_operand_p use_p;
1218 bool has_debug_uses = false;
1219 bool has_realpart_uses = false;
1220 bool has_other_uses = false;
1221 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1222 {
1223 gimple *use_stmt = USE_STMT (use_p);
1224 if (is_gimple_debug (gs: use_stmt))
1225 has_debug_uses = true;
1226 else if (is_gimple_assign (gs: use_stmt)
1227 && gimple_assign_rhs_code (gs: use_stmt) == REALPART_EXPR
1228 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1229 has_realpart_uses = true;
1230 else
1231 {
1232 has_other_uses = true;
1233 break;
1234 }
1235 }
1236
1237 if (!has_realpart_uses || has_other_uses)
1238 return;
1239
1240 tree arg0 = gimple_call_arg (gs: stmt, index: 0);
1241 tree arg1 = gimple_call_arg (gs: stmt, index: 1);
1242 location_t loc = gimple_location (g: stmt);
1243 tree type = TREE_TYPE (TREE_TYPE (lhs));
1244 tree utype = unsigned_type_for (type);
1245 tree result = fold_build2_loc (loc, subcode, utype,
1246 fold_convert_loc (loc, utype, arg0),
1247 fold_convert_loc (loc, utype, arg1));
1248 result = fold_convert_loc (loc, type, result);
1249
1250 if (has_debug_uses)
1251 {
1252 gimple *use_stmt;
1253 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1254 {
1255 if (!gimple_debug_bind_p (s: use_stmt))
1256 continue;
1257 tree v = gimple_debug_bind_get_value (dbg: use_stmt);
1258 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1259 {
1260 gimple_debug_bind_reset_value (dbg: use_stmt);
1261 update_stmt (s: use_stmt);
1262 }
1263 }
1264 }
1265
1266 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1267 result = drop_tree_overflow (result);
1268 tree overflow = build_zero_cst (type);
1269 tree ctype = build_complex_type (type);
1270 if (TREE_CODE (result) == INTEGER_CST)
1271 result = build_complex (ctype, result, overflow);
1272 else
1273 result = build2_loc (loc: gimple_location (g: stmt), code: COMPLEX_EXPR,
1274 type: ctype, arg0: result, arg1: overflow);
1275
1276 if (dump_file && (dump_flags & TDF_DETAILS))
1277 {
1278 fprintf (stream: dump_file, format: "Transforming call: ");
1279 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1280 fprintf (stream: dump_file, format: "because the overflow result is never used into: ");
1281 print_generic_stmt (dump_file, result, TDF_SLIM);
1282 fprintf (stream: dump_file, format: "\n");
1283 }
1284
1285 gimplify_and_update_call_from_tree (gsi, result);
1286}
1287
1288/* Returns whether the control parents of BB are preserved. */
1289
1290static bool
1291control_parents_preserved_p (basic_block bb)
1292{
1293 /* If we marked the control parents from BB they are preserved. */
1294 if (bitmap_bit_p (map: visited_control_parents, bitno: bb->index))
1295 return true;
1296
1297 /* But they can also end up being marked from elsewhere. */
1298 bitmap_iterator bi;
1299 unsigned edge_number;
1300 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
1301 0, edge_number, bi)
1302 {
1303 basic_block cd_bb = cd->get_edge_src (edge_number);
1304 if (cd_bb != bb
1305 && !bitmap_bit_p (map: last_stmt_necessary, bitno: cd_bb->index))
1306 return false;
1307 }
1308 /* And cache the result. */
1309 bitmap_set_bit (map: visited_control_parents, bitno: bb->index);
1310 return true;
1311}
1312
1313/* Eliminate unnecessary statements. Any instruction not marked as necessary
1314 contributes nothing to the program, and can be deleted. */
1315
1316static bool
1317eliminate_unnecessary_stmts (bool aggressive)
1318{
1319 bool something_changed = false;
1320 basic_block bb;
1321 gimple_stmt_iterator gsi, psi;
1322 gimple *stmt;
1323 tree call;
1324 auto_vec<edge> to_remove_edges;
1325
1326 if (dump_file && (dump_flags & TDF_DETAILS))
1327 fprintf (stream: dump_file, format: "\nEliminating unnecessary statements:\n");
1328
1329 bool had_setjmp = cfun->calls_setjmp;
1330 clear_special_calls ();
1331
1332 /* Walking basic blocks and statements in reverse order avoids
1333 releasing SSA names before any other DEFs that refer to them are
1334 released. This helps avoid loss of debug information, as we get
1335 a chance to propagate all RHSs of removed SSAs into debug uses,
1336 rather than only the latest ones. E.g., consider:
1337
1338 x_3 = y_1 + z_2;
1339 a_5 = x_3 - b_4;
1340 # DEBUG a => a_5
1341
1342 If we were to release x_3 before a_5, when we reached a_5 and
1343 tried to substitute it into the debug stmt, we'd see x_3 there,
1344 but x_3's DEF, type, etc would have already been disconnected.
1345 By going backwards, the debug stmt first changes to:
1346
1347 # DEBUG a => x_3 - b_4
1348
1349 and then to:
1350
1351 # DEBUG a => y_1 + z_2 - b_4
1352
1353 as desired. */
1354 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1355 auto_vec<basic_block> h;
1356 h = get_all_dominated_blocks (CDI_DOMINATORS,
1357 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1358
1359 while (h.length ())
1360 {
1361 bb = h.pop ();
1362
1363 /* Remove dead statements. */
1364 auto_bitmap debug_seen;
1365 for (gsi = gsi_last_bb (bb); !gsi_end_p (i: gsi); gsi = psi)
1366 {
1367 stmt = gsi_stmt (i: gsi);
1368
1369 psi = gsi;
1370 gsi_prev (i: &psi);
1371
1372 stats.total++;
1373
1374 /* We can mark a call to free as not necessary if the
1375 defining statement of its argument is not necessary
1376 (and thus is getting removed). */
1377 if (gimple_plf (stmt, STMT_NECESSARY)
1378 && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1379 || (is_gimple_call (gs: stmt)
1380 && gimple_call_from_new_or_delete (s: as_a <gcall *> (p: stmt))
1381 && gimple_call_operator_delete_p (as_a <gcall *> (p: stmt)))))
1382 {
1383 tree ptr = gimple_call_arg (gs: stmt, index: 0);
1384 if (TREE_CODE (ptr) == SSA_NAME)
1385 {
1386 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1387 if (!gimple_nop_p (g: def_stmt)
1388 && !gimple_plf (stmt: def_stmt, STMT_NECESSARY))
1389 gimple_set_plf (stmt, STMT_NECESSARY, val_p: false);
1390 }
1391 }
1392
1393 /* If GSI is not necessary then remove it. */
1394 if (!gimple_plf (stmt, STMT_NECESSARY))
1395 {
1396 /* Keep clobbers that we can keep live live. */
1397 if (gimple_clobber_p (s: stmt))
1398 {
1399 ssa_op_iter iter;
1400 use_operand_p use_p;
1401 bool dead = false;
1402 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1403 {
1404 tree name = USE_FROM_PTR (use_p);
1405 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1406 && !bitmap_bit_p (map: processed, SSA_NAME_VERSION (name)))
1407 {
1408 dead = true;
1409 break;
1410 }
1411 }
1412 if (!dead
1413 /* When doing CD-DCE we have to ensure all controls
1414 of the stmt are still live. */
1415 && (!aggressive || control_parents_preserved_p (bb)))
1416 {
1417 bitmap_clear (debug_seen);
1418 continue;
1419 }
1420 }
1421 if (!is_gimple_debug (gs: stmt))
1422 something_changed = true;
1423 remove_dead_stmt (i: &gsi, bb, to_remove_edges);
1424 continue;
1425 }
1426 else if (is_gimple_call (gs: stmt))
1427 {
1428 tree name = gimple_call_lhs (gs: stmt);
1429
1430 notice_special_calls (as_a <gcall *> (p: stmt));
1431
1432 /* When LHS of var = call (); is dead, simplify it into
1433 call (); saving one operand. */
1434 if (name
1435 && TREE_CODE (name) == SSA_NAME
1436 && !bitmap_bit_p (map: processed, SSA_NAME_VERSION (name))
1437 /* Avoid doing so for allocation calls which we
1438 did not mark as necessary, it will confuse the
1439 special logic we apply to malloc/free pair removal. */
1440 && (!(call = gimple_call_fndecl (gs: stmt))
1441 || ((DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1442 || (DECL_FUNCTION_CODE (decl: call) != BUILT_IN_ALIGNED_ALLOC
1443 && DECL_FUNCTION_CODE (decl: call) != BUILT_IN_MALLOC
1444 && DECL_FUNCTION_CODE (decl: call) != BUILT_IN_CALLOC
1445 && !ALLOCA_FUNCTION_CODE_P
1446 (DECL_FUNCTION_CODE (call))))
1447 && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call))))
1448 {
1449 something_changed = true;
1450 if (dump_file && (dump_flags & TDF_DETAILS))
1451 {
1452 fprintf (stream: dump_file, format: "Deleting LHS of call: ");
1453 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1454 fprintf (stream: dump_file, format: "\n");
1455 }
1456
1457 gimple_call_set_lhs (gs: stmt, NULL_TREE);
1458 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1459 update_stmt (s: stmt);
1460 release_ssa_name (name);
1461
1462 /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1463 without lhs is not needed. */
1464 if (gimple_call_internal_p (gs: stmt))
1465 switch (gimple_call_internal_fn (gs: stmt))
1466 {
1467 case IFN_GOMP_SIMD_LANE:
1468 if (gimple_call_num_args (gs: stmt) >= 3
1469 && !integer_nonzerop (gimple_call_arg (gs: stmt, index: 2)))
1470 break;
1471 /* FALLTHRU */
1472 case IFN_ASAN_POISON:
1473 remove_dead_stmt (i: &gsi, bb, to_remove_edges);
1474 break;
1475 default:
1476 break;
1477 }
1478 }
1479 else if (gimple_call_internal_p (gs: stmt))
1480 switch (gimple_call_internal_fn (gs: stmt))
1481 {
1482 case IFN_ADD_OVERFLOW:
1483 maybe_optimize_arith_overflow (gsi: &gsi, subcode: PLUS_EXPR);
1484 break;
1485 case IFN_SUB_OVERFLOW:
1486 maybe_optimize_arith_overflow (gsi: &gsi, subcode: MINUS_EXPR);
1487 break;
1488 case IFN_MUL_OVERFLOW:
1489 maybe_optimize_arith_overflow (gsi: &gsi, subcode: MULT_EXPR);
1490 break;
1491 case IFN_UADDC:
1492 if (integer_zerop (gimple_call_arg (gs: stmt, index: 2)))
1493 maybe_optimize_arith_overflow (gsi: &gsi, subcode: PLUS_EXPR);
1494 break;
1495 case IFN_USUBC:
1496 if (integer_zerop (gimple_call_arg (gs: stmt, index: 2)))
1497 maybe_optimize_arith_overflow (gsi: &gsi, subcode: MINUS_EXPR);
1498 break;
1499 default:
1500 break;
1501 }
1502 }
1503 else if (gimple_debug_bind_p (s: stmt))
1504 {
1505 /* We are only keeping the last debug-bind of a
1506 non-DEBUG_EXPR_DECL variable in a series of
1507 debug-bind stmts. */
1508 tree var = gimple_debug_bind_get_var (dbg: stmt);
1509 if (TREE_CODE (var) != DEBUG_EXPR_DECL
1510 && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1511 remove_dead_stmt (i: &gsi, bb, to_remove_edges);
1512 continue;
1513 }
1514 bitmap_clear (debug_seen);
1515 }
1516
1517 /* Remove dead PHI nodes. */
1518 something_changed |= remove_dead_phis (bb);
1519 }
1520
1521 /* First remove queued edges. */
1522 if (!to_remove_edges.is_empty ())
1523 {
1524 /* Remove edges. We've delayed this to not get bogus debug stmts
1525 during PHI node removal. */
1526 for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1527 remove_edge (to_remove_edges[i]);
1528 cfg_altered = true;
1529 }
1530 /* When we cleared calls_setjmp we can purge all abnormal edges. Do so.
1531 ??? We'd like to assert that setjmp calls do not pop out of nothing
1532 but we currently lack a per-stmt way of noting whether a call was
1533 recognized as returns-twice (or rather receives-control). */
1534 if (!cfun->calls_setjmp && had_setjmp)
1535 {
1536 /* Make sure we only remove the edges, not dominated blocks. Using
1537 gimple_purge_dead_abnormal_call_edges would do that and we
1538 cannot free dominators yet. */
1539 FOR_EACH_BB_FN (bb, cfun)
1540 if (gcall *stmt = safe_dyn_cast <gcall *> (p: *gsi_last_bb (bb)))
1541 if (!stmt_can_make_abnormal_goto (stmt))
1542 {
1543 edge_iterator ei;
1544 edge e;
1545 for (ei = ei_start (bb->succs); (e = ei_safe_edge (i: ei)); )
1546 {
1547 if (e->flags & EDGE_ABNORMAL)
1548 {
1549 if (e->flags & EDGE_FALLTHRU)
1550 e->flags &= ~EDGE_ABNORMAL;
1551 else
1552 remove_edge (e);
1553 cfg_altered = true;
1554 }
1555 else
1556 ei_next (i: &ei);
1557 }
1558 }
1559 }
1560
1561 /* Now remove the unreachable blocks. */
1562 if (cfg_altered)
1563 {
1564 basic_block prev_bb;
1565
1566 find_unreachable_blocks ();
1567
1568 /* Delete all unreachable basic blocks in reverse dominator order. */
1569 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1570 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1571 {
1572 prev_bb = bb->prev_bb;
1573
1574 if ((bb_contains_live_stmts
1575 && !bitmap_bit_p (map: bb_contains_live_stmts, bitno: bb->index))
1576 || !(bb->flags & BB_REACHABLE))
1577 {
1578 /* Since we don't track liveness of virtual PHI nodes, it is
1579 possible that we rendered some PHI nodes unreachable while
1580 they are still in use. Mark them for renaming. */
1581 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi);
1582 gsi_next (i: &gsi))
1583 if (virtual_operand_p (op: gimple_phi_result (gs: gsi.phi ())))
1584 {
1585 bool found = false;
1586 imm_use_iterator iter;
1587
1588 FOR_EACH_IMM_USE_STMT (stmt, iter,
1589 gimple_phi_result (gsi.phi ()))
1590 {
1591 if (!(gimple_bb (g: stmt)->flags & BB_REACHABLE))
1592 continue;
1593 if (gimple_code (g: stmt) == GIMPLE_PHI
1594 || gimple_plf (stmt, STMT_NECESSARY))
1595 {
1596 found = true;
1597 break;
1598 }
1599 }
1600 if (found)
1601 mark_virtual_phi_result_for_renaming (gsi.phi ());
1602 }
1603
1604 if (!(bb->flags & BB_REACHABLE))
1605 {
1606 /* Speed up the removal of blocks that don't
1607 dominate others. Walking backwards, this should
1608 be the common case. ??? Do we need to recompute
1609 dominators because of cfg_altered? */
1610 if (!first_dom_son (CDI_DOMINATORS, bb))
1611 delete_basic_block (bb);
1612 else
1613 {
1614 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1615
1616 while (h.length ())
1617 {
1618 bb = h.pop ();
1619 prev_bb = bb->prev_bb;
1620 /* Rearrangements to the CFG may have failed
1621 to update the dominators tree, so that
1622 formerly-dominated blocks are now
1623 otherwise reachable. */
1624 if (!!(bb->flags & BB_REACHABLE))
1625 continue;
1626 delete_basic_block (bb);
1627 }
1628
1629 h.release ();
1630 }
1631 }
1632 }
1633 }
1634 }
1635
1636 if (bb_postorder)
1637 free (ptr: bb_postorder);
1638 bb_postorder = NULL;
1639
1640 return something_changed;
1641}
1642
1643
1644/* Print out removed statement statistics. */
1645
1646static void
1647print_stats (void)
1648{
1649 float percg;
1650
1651 percg = ((float) stats.removed / (float) stats.total) * 100;
1652 fprintf (stream: dump_file, format: "Removed %d of %d statements (%d%%)\n",
1653 stats.removed, stats.total, (int) percg);
1654
1655 if (stats.total_phis == 0)
1656 percg = 0;
1657 else
1658 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1659
1660 fprintf (stream: dump_file, format: "Removed %d of %d PHI nodes (%d%%)\n",
1661 stats.removed_phis, stats.total_phis, (int) percg);
1662}
1663
1664/* Initialization for this pass. Set up the used data structures. */
1665
1666static void
1667tree_dce_init (bool aggressive)
1668{
1669 memset (s: (void *) &stats, c: 0, n: sizeof (stats));
1670
1671 if (aggressive)
1672 {
1673 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1674 bitmap_clear (last_stmt_necessary);
1675 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1676 bitmap_clear (bb_contains_live_stmts);
1677 }
1678
1679 processed = sbitmap_alloc (num_ssa_names + 1);
1680 bitmap_clear (processed);
1681
1682 worklist.create (nelems: 64);
1683 cfg_altered = false;
1684}
1685
1686/* Cleanup after this pass. */
1687
1688static void
1689tree_dce_done (bool aggressive)
1690{
1691 if (aggressive)
1692 {
1693 delete cd;
1694 sbitmap_free (map: visited_control_parents);
1695 sbitmap_free (map: last_stmt_necessary);
1696 sbitmap_free (map: bb_contains_live_stmts);
1697 bb_contains_live_stmts = NULL;
1698 }
1699
1700 sbitmap_free (map: processed);
1701
1702 worklist.release ();
1703}
1704
1705/* Sort PHI argument values for make_forwarders_with_degenerate_phis. */
1706
1707static int
1708sort_phi_args (const void *a_, const void *b_)
1709{
1710 auto *a = (const std::pair<edge, hashval_t> *) a_;
1711 auto *b = (const std::pair<edge, hashval_t> *) b_;
1712 hashval_t ha = a->second;
1713 hashval_t hb = b->second;
1714 if (ha < hb)
1715 return -1;
1716 else if (ha > hb)
1717 return 1;
1718 else if (a->first->dest_idx < b->first->dest_idx)
1719 return -1;
1720 else if (a->first->dest_idx > b->first->dest_idx)
1721 return 1;
1722 else
1723 return 0;
1724}
1725
1726/* Look for a non-virtual PHIs and make a forwarder block when all PHIs
1727 have the same argument on a set of edges. This is to not consider
1728 control dependences of individual edges for same values but only for
1729 the common set. */
1730
1731static unsigned
1732make_forwarders_with_degenerate_phis (function *fn)
1733{
1734 unsigned todo = 0;
1735
1736 basic_block bb;
1737 FOR_EACH_BB_FN (bb, fn)
1738 {
1739 /* Only PHIs with three or more arguments have opportunities. */
1740 if (EDGE_COUNT (bb->preds) < 3)
1741 continue;
1742 /* Do not touch loop headers or blocks with abnormal predecessors.
1743 ??? This is to avoid creating valid loops here, see PR103458.
1744 We might want to improve things to either explicitely add those
1745 loops or at least consider blocks with no backedges. */
1746 if (bb->loop_father->header == bb
1747 || bb_has_abnormal_pred (bb))
1748 continue;
1749
1750 /* Take one PHI node as template to look for identical
1751 arguments. Build a vector of candidates forming sets
1752 of argument edges with equal values. Note optimality
1753 depends on the particular choice of the template PHI
1754 since equal arguments are unordered leaving other PHIs
1755 with more than one set of equal arguments within this
1756 argument range unsorted. We'd have to break ties by
1757 looking at other PHI nodes. */
1758 gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
1759 if (gsi_end_p (i: gsi))
1760 continue;
1761 gphi *phi = gsi.phi ();
1762 auto_vec<std::pair<edge, hashval_t>, 8> args;
1763 bool need_resort = false;
1764 for (unsigned i = 0; i < gimple_phi_num_args (gs: phi); ++i)
1765 {
1766 edge e = gimple_phi_arg_edge (phi, i);
1767 /* Skip abnormal edges since we cannot redirect them. */
1768 if (e->flags & EDGE_ABNORMAL)
1769 continue;
1770 /* Skip loop exit edges when we are in loop-closed SSA form
1771 since the forwarder we'd create does not have a PHI node. */
1772 if (loops_state_satisfies_p (flags: LOOP_CLOSED_SSA)
1773 && loop_exit_edge_p (e->src->loop_father, e))
1774 continue;
1775
1776 tree arg = gimple_phi_arg_def (gs: phi, index: i);
1777 if (!CONSTANT_CLASS_P (arg) && TREE_CODE (arg) != SSA_NAME)
1778 need_resort = true;
1779 args.safe_push (obj: std::make_pair (x&: e, y: iterative_hash_expr (tree: arg, seed: 0)));
1780 }
1781 if (args.length () < 2)
1782 continue;
1783 args.qsort (sort_phi_args);
1784 /* The above sorting can be different between -g and -g0, as e.g. decls
1785 can have different uids (-g could have bigger gaps in between them).
1786 So, only use that to determine which args are equal, then change
1787 second from hash value to smallest dest_idx of the edges which have
1788 equal argument and sort again. If all the phi arguments are
1789 constants or SSA_NAME, there is no need for the second sort, the hash
1790 values are stable in that case. */
1791 hashval_t hash = args[0].second;
1792 args[0].second = args[0].first->dest_idx;
1793 bool any_equal = false;
1794 for (unsigned i = 1; i < args.length (); ++i)
1795 if (hash == args[i].second
1796 && operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, args[i - 1].first),
1797 PHI_ARG_DEF_FROM_EDGE (phi, args[i].first)))
1798 {
1799 args[i].second = args[i - 1].second;
1800 any_equal = true;
1801 }
1802 else
1803 {
1804 hash = args[i].second;
1805 args[i].second = args[i].first->dest_idx;
1806 }
1807 if (!any_equal)
1808 continue;
1809 if (need_resort)
1810 args.qsort (sort_phi_args);
1811
1812 /* From the candidates vector now verify true candidates for
1813 forwarders and create them. */
1814 gphi *vphi = get_virtual_phi (bb);
1815 unsigned start = 0;
1816 while (start < args.length () - 1)
1817 {
1818 unsigned i;
1819 for (i = start + 1; i < args.length (); ++i)
1820 if (args[start].second != args[i].second)
1821 break;
1822 /* args[start]..args[i-1] are equal. */
1823 if (start != i - 1)
1824 {
1825 /* Check all PHI nodes for argument equality. */
1826 bool equal = true;
1827 gphi_iterator gsi2 = gsi;
1828 gsi_next (i: &gsi2);
1829 for (; !gsi_end_p (i: gsi2); gsi_next (i: &gsi2))
1830 {
1831 gphi *phi2 = gsi2.phi ();
1832 if (virtual_operand_p (op: gimple_phi_result (gs: phi2)))
1833 continue;
1834 tree start_arg
1835 = PHI_ARG_DEF_FROM_EDGE (phi2, args[start].first);
1836 for (unsigned j = start + 1; j < i; ++j)
1837 {
1838 if (!operand_equal_p (start_arg,
1839 PHI_ARG_DEF_FROM_EDGE
1840 (phi2, args[j].first)))
1841 {
1842 /* Another PHI might have a shorter set of
1843 equivalent args. Go for that. */
1844 i = j;
1845 if (j == start + 1)
1846 equal = false;
1847 break;
1848 }
1849 }
1850 if (!equal)
1851 break;
1852 }
1853 if (equal)
1854 {
1855 /* If we are asked to forward all edges the block
1856 has all degenerate PHIs. Do nothing in that case. */
1857 if (start == 0
1858 && i == args.length ()
1859 && args.length () == gimple_phi_num_args (gs: phi))
1860 break;
1861 /* Instead of using make_forwarder_block we are
1862 rolling our own variant knowing that the forwarder
1863 does not need PHI nodes apart from eventually
1864 a virtual one. */
1865 auto_vec<tree, 8> vphi_args;
1866 if (vphi)
1867 {
1868 vphi_args.reserve_exact (nelems: i - start);
1869 for (unsigned j = start; j < i; ++j)
1870 vphi_args.quick_push
1871 (PHI_ARG_DEF_FROM_EDGE (vphi, args[j].first));
1872 }
1873 free_dominance_info (fn, CDI_DOMINATORS);
1874 basic_block forwarder = split_edge (args[start].first);
1875 profile_count count = profile_count::zero ();
1876 for (unsigned j = start + 1; j < i; ++j)
1877 {
1878 edge e = args[j].first;
1879 redirect_edge_and_branch_force (e, forwarder);
1880 redirect_edge_var_map_clear (e);
1881 count += e->count ();
1882 }
1883 forwarder->count = count;
1884 if (vphi)
1885 {
1886 tree def = copy_ssa_name (var: vphi_args[0]);
1887 gphi *vphi_copy = create_phi_node (def, forwarder);
1888 for (unsigned j = start; j < i; ++j)
1889 add_phi_arg (vphi_copy, vphi_args[j - start],
1890 args[j].first, UNKNOWN_LOCATION);
1891 SET_PHI_ARG_DEF
1892 (vphi, single_succ_edge (forwarder)->dest_idx, def);
1893 }
1894 todo |= TODO_cleanup_cfg;
1895 }
1896 }
1897 /* Continue searching for more opportunities. */
1898 start = i;
1899 }
1900 }
1901 return todo;
1902}
1903
1904/* Main routine to eliminate dead code.
1905
1906 AGGRESSIVE controls the aggressiveness of the algorithm.
1907 In conservative mode, we ignore control dependence and simply declare
1908 all but the most trivially dead branches necessary. This mode is fast.
1909 In aggressive mode, control dependences are taken into account, which
1910 results in more dead code elimination, but at the cost of some time.
1911
1912 FIXME: Aggressive mode before PRE doesn't work currently because
1913 the dominance info is not invalidated after DCE1. This is
1914 not an issue right now because we only run aggressive DCE
1915 as the last tree SSA pass, but keep this in mind when you
1916 start experimenting with pass ordering. */
1917
1918static unsigned int
1919perform_tree_ssa_dce (bool aggressive)
1920{
1921 bool something_changed = 0;
1922 unsigned todo = 0;
1923
1924 /* Preheaders are needed for SCEV to work.
1925 Simple lateches and recorded exits improve chances that loop will
1926 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1927 bool in_loop_pipeline = scev_initialized_p ();
1928 if (aggressive && ! in_loop_pipeline)
1929 {
1930 loop_optimizer_init (LOOPS_NORMAL
1931 | LOOPS_HAVE_RECORDED_EXITS);
1932 scev_initialize ();
1933 }
1934
1935 if (aggressive)
1936 todo |= make_forwarders_with_degenerate_phis (cfun);
1937
1938 calculate_dominance_info (CDI_DOMINATORS);
1939
1940 tree_dce_init (aggressive);
1941
1942 if (aggressive)
1943 {
1944 /* Compute control dependence. */
1945 calculate_dominance_info (CDI_POST_DOMINATORS);
1946 cd = new control_dependences ();
1947
1948 visited_control_parents =
1949 sbitmap_alloc (last_basic_block_for_fn (cfun));
1950 bitmap_clear (visited_control_parents);
1951
1952 mark_dfs_back_edges ();
1953 }
1954
1955 find_obviously_necessary_stmts (aggressive);
1956
1957 if (aggressive && ! in_loop_pipeline)
1958 {
1959 scev_finalize ();
1960 loop_optimizer_finalize ();
1961 }
1962
1963 longest_chain = 0;
1964 total_chain = 0;
1965 nr_walks = 0;
1966 chain_ovfl = false;
1967 visited = BITMAP_ALLOC (NULL);
1968 propagate_necessity (aggressive);
1969 BITMAP_FREE (visited);
1970
1971 something_changed |= eliminate_unnecessary_stmts (aggressive);
1972 something_changed |= cfg_altered;
1973
1974 /* We do not update postdominators, so free them unconditionally. */
1975 free_dominance_info (CDI_POST_DOMINATORS);
1976
1977 /* If we removed paths in the CFG, then we need to update
1978 dominators as well. I haven't investigated the possibility
1979 of incrementally updating dominators. */
1980 if (cfg_altered)
1981 free_dominance_info (CDI_DOMINATORS);
1982
1983 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1984 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1985
1986 /* Debugging dumps. */
1987 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1988 print_stats ();
1989
1990 tree_dce_done (aggressive);
1991
1992 if (something_changed)
1993 {
1994 free_numbers_of_iterations_estimates (cfun);
1995 if (in_loop_pipeline)
1996 scev_reset ();
1997 todo |= TODO_update_ssa | TODO_cleanup_cfg;
1998 }
1999 return todo;
2000}
2001
2002/* Pass entry points. */
2003static unsigned int
2004tree_ssa_dce (void)
2005{
2006 return perform_tree_ssa_dce (/*aggressive=*/false);
2007}
2008
2009static unsigned int
2010tree_ssa_cd_dce (void)
2011{
2012 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
2013}
2014
2015namespace {
2016
2017const pass_data pass_data_dce =
2018{
2019 .type: GIMPLE_PASS, /* type */
2020 .name: "dce", /* name */
2021 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
2022 .tv_id: TV_TREE_DCE, /* tv_id */
2023 .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */
2024 .properties_provided: 0, /* properties_provided */
2025 .properties_destroyed: 0, /* properties_destroyed */
2026 .todo_flags_start: 0, /* todo_flags_start */
2027 .todo_flags_finish: 0, /* todo_flags_finish */
2028};
2029
2030class pass_dce : public gimple_opt_pass
2031{
2032public:
2033 pass_dce (gcc::context *ctxt)
2034 : gimple_opt_pass (pass_data_dce, ctxt), update_address_taken_p (false)
2035 {}
2036
2037 /* opt_pass methods: */
2038 opt_pass * clone () final override { return new pass_dce (m_ctxt); }
2039 void set_pass_param (unsigned n, bool param) final override
2040 {
2041 gcc_assert (n == 0);
2042 update_address_taken_p = param;
2043 }
2044 bool gate (function *) final override { return flag_tree_dce != 0; }
2045 unsigned int execute (function *) final override
2046 {
2047 return (tree_ssa_dce ()
2048 | (update_address_taken_p ? TODO_update_address_taken : 0));
2049 }
2050
2051private:
2052 bool update_address_taken_p;
2053}; // class pass_dce
2054
2055} // anon namespace
2056
2057gimple_opt_pass *
2058make_pass_dce (gcc::context *ctxt)
2059{
2060 return new pass_dce (ctxt);
2061}
2062
2063namespace {
2064
2065const pass_data pass_data_cd_dce =
2066{
2067 .type: GIMPLE_PASS, /* type */
2068 .name: "cddce", /* name */
2069 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
2070 .tv_id: TV_TREE_CD_DCE, /* tv_id */
2071 .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */
2072 .properties_provided: 0, /* properties_provided */
2073 .properties_destroyed: 0, /* properties_destroyed */
2074 .todo_flags_start: 0, /* todo_flags_start */
2075 .todo_flags_finish: 0, /* todo_flags_finish */
2076};
2077
2078class pass_cd_dce : public gimple_opt_pass
2079{
2080public:
2081 pass_cd_dce (gcc::context *ctxt)
2082 : gimple_opt_pass (pass_data_cd_dce, ctxt), update_address_taken_p (false)
2083 {}
2084
2085 /* opt_pass methods: */
2086 opt_pass * clone () final override { return new pass_cd_dce (m_ctxt); }
2087 void set_pass_param (unsigned n, bool param) final override
2088 {
2089 gcc_assert (n == 0);
2090 update_address_taken_p = param;
2091 }
2092 bool gate (function *) final override { return flag_tree_dce != 0; }
2093 unsigned int execute (function *) final override
2094 {
2095 return (tree_ssa_cd_dce ()
2096 | (update_address_taken_p ? TODO_update_address_taken : 0));
2097 }
2098
2099private:
2100 bool update_address_taken_p;
2101}; // class pass_cd_dce
2102
2103} // anon namespace
2104
2105gimple_opt_pass *
2106make_pass_cd_dce (gcc::context *ctxt)
2107{
2108 return new pass_cd_dce (ctxt);
2109}
2110
2111
2112/* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
2113 is consumed by this function. The function has linear complexity in
2114 the number of dead stmts with a constant factor like the average SSA
2115 use operands number. */
2116
2117void
2118simple_dce_from_worklist (bitmap worklist, bitmap need_eh_cleanup)
2119{
2120 int phiremoved = 0;
2121 int stmtremoved = 0;
2122 while (! bitmap_empty_p (map: worklist))
2123 {
2124 /* Pop item. */
2125 unsigned i = bitmap_clear_first_set_bit (worklist);
2126
2127 tree def = ssa_name (i);
2128 /* Removed by somebody else or still in use.
2129 Note use in itself for a phi node is not counted as still in use. */
2130 if (!def)
2131 continue;
2132 if (!has_zero_uses (var: def))
2133 {
2134 gimple *def_stmt = SSA_NAME_DEF_STMT (def);
2135
2136 if (gimple_code (g: def_stmt) != GIMPLE_PHI)
2137 continue;
2138
2139 gimple *use_stmt;
2140 imm_use_iterator use_iter;
2141 bool canremove = true;
2142
2143 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, def)
2144 {
2145 /* Ignore debug statements. */
2146 if (is_gimple_debug (gs: use_stmt))
2147 continue;
2148 if (use_stmt != def_stmt)
2149 {
2150 canremove = false;
2151 break;
2152 }
2153 }
2154 if (!canremove)
2155 continue;
2156 }
2157
2158 gimple *t = SSA_NAME_DEF_STMT (def);
2159 if (gimple_has_side_effects (t))
2160 continue;
2161
2162 /* The defining statement needs to be defining only this name.
2163 ASM is the only statement that can define more than one
2164 name. */
2165 if (is_a<gasm *>(p: t)
2166 && !single_ssa_def_operand (stmt: t, SSA_OP_ALL_DEFS))
2167 continue;
2168
2169 /* Don't remove statements that are needed for non-call
2170 eh to work. */
2171 if (stmt_unremovable_because_of_non_call_eh_p (cfun, t))
2172 continue;
2173
2174 /* Tell the caller that we removed a statement that might
2175 throw so it could cleanup the cfg for that block. */
2176 if (need_eh_cleanup && stmt_could_throw_p (cfun, t))
2177 bitmap_set_bit (need_eh_cleanup, gimple_bb (g: t)->index);
2178
2179 /* Add uses to the worklist. */
2180 ssa_op_iter iter;
2181 use_operand_p use_p;
2182 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
2183 {
2184 tree use = USE_FROM_PTR (use_p);
2185 if (TREE_CODE (use) == SSA_NAME
2186 && ! SSA_NAME_IS_DEFAULT_DEF (use))
2187 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
2188 }
2189
2190 /* Remove stmt. */
2191 if (dump_file && (dump_flags & TDF_DETAILS))
2192 {
2193 fprintf (stream: dump_file, format: "Removing dead stmt:");
2194 print_gimple_stmt (dump_file, t, 0);
2195 }
2196 gimple_stmt_iterator gsi = gsi_for_stmt (t);
2197 if (gimple_code (g: t) == GIMPLE_PHI)
2198 {
2199 remove_phi_node (&gsi, true);
2200 phiremoved++;
2201 }
2202 else
2203 {
2204 unlink_stmt_vdef (t);
2205 gsi_remove (&gsi, true);
2206 release_defs (t);
2207 stmtremoved++;
2208 }
2209 }
2210 statistics_counter_event (cfun, "PHIs removed",
2211 phiremoved);
2212 statistics_counter_event (cfun, "Statements removed",
2213 stmtremoved);
2214}
2215

source code of gcc/tree-ssa-dce.cc