1/* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "target.h"
26#include "rtl.h"
27#include "tree.h"
28#include "gimple.h"
29#include "cfghooks.h"
30#include "tree-pass.h"
31#include "ssa.h"
32#include "cgraph.h"
33#include "gimple-pretty-print.h"
34#include "diagnostic-core.h"
35#include "fold-const.h"
36#include "trans-mem.h"
37#include "stor-layout.h"
38#include "print-tree.h"
39#include "cfganal.h"
40#include "gimple-fold.h"
41#include "tree-eh.h"
42#include "gimple-iterator.h"
43#include "gimplify-me.h"
44#include "gimple-walk.h"
45#include "tree-cfg.h"
46#include "tree-ssa-loop-manip.h"
47#include "tree-ssa-loop-niter.h"
48#include "tree-into-ssa.h"
49#include "tree-dfa.h"
50#include "tree-ssa.h"
51#include "except.h"
52#include "cfgloop.h"
53#include "tree-ssa-propagate.h"
54#include "value-prof.h"
55#include "tree-inline.h"
56#include "tree-ssa-live.h"
57#include "omp-general.h"
58#include "omp-expand.h"
59#include "tree-cfgcleanup.h"
60#include "gimplify.h"
61#include "attribs.h"
62#include "selftest.h"
63#include "opts.h"
64#include "asan.h"
65
66/* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
68
69/* Local declarations. */
70
71/* Initial capacity for the basic block array. */
72static const int initial_cfg_capacity = 20;
73
74/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
78
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
81
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
86
87static hash_map<edge, tree> *edge_to_cases;
88
89/* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
92
93static bitmap touched_switch_bbs;
94
95/* CFG statistics. */
96struct cfg_stats_d
97{
98 long num_merged_labels;
99};
100
101static struct cfg_stats_d cfg_stats;
102
103/* Data to pass to replace_block_vars_by_duplicates_1. */
104struct replace_decls_d
105{
106 hash_map<tree, tree> *vars_map;
107 tree to_context;
108};
109
110/* Hash table to store last discriminator assigned for each locus. */
111struct locus_discrim_map
112{
113 location_t locus;
114 int discriminator;
115};
116
117/* Hashtable helpers. */
118
119struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
120{
121 static inline hashval_t hash (const locus_discrim_map *);
122 static inline bool equal (const locus_discrim_map *,
123 const locus_discrim_map *);
124};
125
126/* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
128
129inline hashval_t
130locus_discrim_hasher::hash (const locus_discrim_map *item)
131{
132 return LOCATION_LINE (item->locus);
133}
134
135/* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
137
138inline bool
139locus_discrim_hasher::equal (const locus_discrim_map *a,
140 const locus_discrim_map *b)
141{
142 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
143}
144
145static hash_table<locus_discrim_hasher> *discriminator_per_locus;
146
147/* Basic blocks and flowgraphs. */
148static void make_blocks (gimple_seq);
149
150/* Edges. */
151static void make_edges (void);
152static void assign_discriminators (void);
153static void make_cond_expr_edges (basic_block);
154static void make_gimple_switch_edges (gswitch *, basic_block);
155static bool make_goto_expr_edges (basic_block);
156static void make_gimple_asm_edges (basic_block);
157static edge gimple_redirect_edge_and_branch (edge, basic_block);
158static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
159
160/* Various helpers. */
161static inline bool stmt_starts_bb_p (gimple *, gimple *);
162static int gimple_verify_flow_info (void);
163static void gimple_make_forwarder_block (edge);
164static gimple *first_non_label_stmt (basic_block);
165static bool verify_gimple_transaction (gtransaction *);
166static bool call_can_make_abnormal_goto (gimple *);
167
168/* Flowgraph optimization and cleanup. */
169static void gimple_merge_blocks (basic_block, basic_block);
170static bool gimple_can_merge_blocks_p (basic_block, basic_block);
171static void remove_bb (basic_block);
172static edge find_taken_edge_computed_goto (basic_block, tree);
173static edge find_taken_edge_cond_expr (basic_block, tree);
174static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
175static tree find_case_label_for_value (gswitch *, tree);
176static void lower_phi_internal_fn ();
177
178void
179init_empty_tree_cfg_for_function (struct function *fn)
180{
181 /* Initialize the basic block array. */
182 init_flow (fn);
183 profile_status_for_fn (fn) = PROFILE_ABSENT;
184 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
185 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
186 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
188 initial_cfg_capacity);
189
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
193 initial_cfg_capacity);
194
195 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
196 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
197
198 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn);
200 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn);
202}
203
204void
205init_empty_tree_cfg (void)
206{
207 init_empty_tree_cfg_for_function (cfun);
208}
209
210/*---------------------------------------------------------------------------
211 Create basic blocks
212---------------------------------------------------------------------------*/
213
214/* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
216
217static void
218build_gimple_cfg (gimple_seq seq)
219{
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
222
223 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224
225 init_empty_tree_cfg ();
226
227 make_blocks (seq);
228
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
232
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
237 n_basic_blocks_for_fn (cfun));
238
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
241
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
246
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
249 make_edges ();
250 assign_discriminators ();
251 lower_phi_internal_fn ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
255}
256
257/* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
260
261static void
262replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
263{
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
266
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
269
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
271 {
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
278
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 {
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
302 }
303
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
307 }
308}
309
310/* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
313
314static void
315replace_loop_annotate (void)
316{
317 struct loop *loop;
318 basic_block bb;
319 gimple_stmt_iterator gsi;
320 gimple *stmt;
321
322 FOR_EACH_LOOP (loop, 0)
323 {
324 /* First look into the header. */
325 replace_loop_annotate_in_block (loop->header, loop);
326
327 /* Then look into the latch, if any. */
328 if (loop->latch)
329 replace_loop_annotate_in_block (loop->latch, loop);
330 }
331
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
334 {
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
336 {
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
339 continue;
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 continue;
343
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
345 {
346 case annot_expr_ivdep_kind:
347 case annot_expr_unroll_kind:
348 case annot_expr_no_vector_kind:
349 case annot_expr_vector_kind:
350 break;
351 default:
352 gcc_unreachable ();
353 }
354
355 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
356 stmt = gimple_build_assign (gimple_call_lhs (stmt),
357 gimple_call_arg (stmt, 0));
358 gsi_replace (&gsi, stmt, true);
359 }
360 }
361}
362
363/* Lower internal PHI function from GIMPLE FE. */
364
365static void
366lower_phi_internal_fn ()
367{
368 basic_block bb, pred = NULL;
369 gimple_stmt_iterator gsi;
370 tree lhs;
371 gphi *phi_node;
372 gimple *stmt;
373
374 /* After edge creation, handle __PHI function from GIMPLE FE. */
375 FOR_EACH_BB_FN (bb, cfun)
376 {
377 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
378 {
379 stmt = gsi_stmt (gsi);
380 if (! gimple_call_internal_p (stmt, IFN_PHI))
381 break;
382
383 lhs = gimple_call_lhs (stmt);
384 phi_node = create_phi_node (lhs, bb);
385
386 /* Add arguments to the PHI node. */
387 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
388 {
389 tree arg = gimple_call_arg (stmt, i);
390 if (TREE_CODE (arg) == LABEL_DECL)
391 pred = label_to_block (arg);
392 else
393 {
394 edge e = find_edge (pred, bb);
395 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
396 }
397 }
398
399 gsi_remove (&gsi, true);
400 }
401 }
402}
403
404static unsigned int
405execute_build_cfg (void)
406{
407 gimple_seq body = gimple_body (current_function_decl);
408
409 build_gimple_cfg (body);
410 gimple_set_body (current_function_decl, NULL);
411 if (dump_file && (dump_flags & TDF_DETAILS))
412 {
413 fprintf (dump_file, "Scope blocks:\n");
414 dump_scope_blocks (dump_file, dump_flags);
415 }
416 cleanup_tree_cfg ();
417 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
418 replace_loop_annotate ();
419 return 0;
420}
421
422namespace {
423
424const pass_data pass_data_build_cfg =
425{
426 GIMPLE_PASS, /* type */
427 "cfg", /* name */
428 OPTGROUP_NONE, /* optinfo_flags */
429 TV_TREE_CFG, /* tv_id */
430 PROP_gimple_leh, /* properties_required */
431 ( PROP_cfg | PROP_loops ), /* properties_provided */
432 0, /* properties_destroyed */
433 0, /* todo_flags_start */
434 0, /* todo_flags_finish */
435};
436
437class pass_build_cfg : public gimple_opt_pass
438{
439public:
440 pass_build_cfg (gcc::context *ctxt)
441 : gimple_opt_pass (pass_data_build_cfg, ctxt)
442 {}
443
444 /* opt_pass methods: */
445 virtual unsigned int execute (function *) { return execute_build_cfg (); }
446
447}; // class pass_build_cfg
448
449} // anon namespace
450
451gimple_opt_pass *
452make_pass_build_cfg (gcc::context *ctxt)
453{
454 return new pass_build_cfg (ctxt);
455}
456
457
458/* Return true if T is a computed goto. */
459
460bool
461computed_goto_p (gimple *t)
462{
463 return (gimple_code (t) == GIMPLE_GOTO
464 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
465}
466
467/* Returns true if the sequence of statements STMTS only contains
468 a call to __builtin_unreachable (). */
469
470bool
471gimple_seq_unreachable_p (gimple_seq stmts)
472{
473 if (stmts == NULL
474 /* Return false if -fsanitize=unreachable, we don't want to
475 optimize away those calls, but rather turn them into
476 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
477 later. */
478 || sanitize_flags_p (SANITIZE_UNREACHABLE))
479 return false;
480
481 gimple_stmt_iterator gsi = gsi_last (stmts);
482
483 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
484 return false;
485
486 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
487 {
488 gimple *stmt = gsi_stmt (gsi);
489 if (gimple_code (stmt) != GIMPLE_LABEL
490 && !is_gimple_debug (stmt)
491 && !gimple_clobber_p (stmt))
492 return false;
493 }
494 return true;
495}
496
497/* Returns true for edge E where e->src ends with a GIMPLE_COND and
498 the other edge points to a bb with just __builtin_unreachable ().
499 I.e. return true for C->M edge in:
500 <bb C>:
501 ...
502 if (something)
503 goto <bb N>;
504 else
505 goto <bb M>;
506 <bb N>:
507 __builtin_unreachable ();
508 <bb M>: */
509
510bool
511assert_unreachable_fallthru_edge_p (edge e)
512{
513 basic_block pred_bb = e->src;
514 gimple *last = last_stmt (pred_bb);
515 if (last && gimple_code (last) == GIMPLE_COND)
516 {
517 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
518 if (other_bb == e->dest)
519 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
520 if (EDGE_COUNT (other_bb->succs) == 0)
521 return gimple_seq_unreachable_p (bb_seq (other_bb));
522 }
523 return false;
524}
525
526
527/* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
528 could alter control flow except via eh. We initialize the flag at
529 CFG build time and only ever clear it later. */
530
531static void
532gimple_call_initialize_ctrl_altering (gimple *stmt)
533{
534 int flags = gimple_call_flags (stmt);
535
536 /* A call alters control flow if it can make an abnormal goto. */
537 if (call_can_make_abnormal_goto (stmt)
538 /* A call also alters control flow if it does not return. */
539 || flags & ECF_NORETURN
540 /* TM ending statements have backedges out of the transaction.
541 Return true so we split the basic block containing them.
542 Note that the TM_BUILTIN test is merely an optimization. */
543 || ((flags & ECF_TM_BUILTIN)
544 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
545 /* BUILT_IN_RETURN call is same as return statement. */
546 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
547 /* IFN_UNIQUE should be the last insn, to make checking for it
548 as cheap as possible. */
549 || (gimple_call_internal_p (stmt)
550 && gimple_call_internal_unique_p (stmt)))
551 gimple_call_set_ctrl_altering (stmt, true);
552 else
553 gimple_call_set_ctrl_altering (stmt, false);
554}
555
556
557/* Insert SEQ after BB and build a flowgraph. */
558
559static basic_block
560make_blocks_1 (gimple_seq seq, basic_block bb)
561{
562 gimple_stmt_iterator i = gsi_start (seq);
563 gimple *stmt = NULL;
564 gimple *prev_stmt = NULL;
565 bool start_new_block = true;
566 bool first_stmt_of_seq = true;
567
568 while (!gsi_end_p (i))
569 {
570 /* PREV_STMT should only be set to a debug stmt if the debug
571 stmt is before nondebug stmts. Once stmt reaches a nondebug
572 nonlabel, prev_stmt will be set to it, so that
573 stmt_starts_bb_p will know to start a new block if a label is
574 found. However, if stmt was a label after debug stmts only,
575 keep the label in prev_stmt even if we find further debug
576 stmts, for there may be other labels after them, and they
577 should land in the same block. */
578 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
579 prev_stmt = stmt;
580 stmt = gsi_stmt (i);
581
582 if (stmt && is_gimple_call (stmt))
583 gimple_call_initialize_ctrl_altering (stmt);
584
585 /* If the statement starts a new basic block or if we have determined
586 in a previous pass that we need to create a new block for STMT, do
587 so now. */
588 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
589 {
590 if (!first_stmt_of_seq)
591 gsi_split_seq_before (&i, &seq);
592 bb = create_basic_block (seq, bb);
593 start_new_block = false;
594 prev_stmt = NULL;
595 }
596
597 /* Now add STMT to BB and create the subgraphs for special statement
598 codes. */
599 gimple_set_bb (stmt, bb);
600
601 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
602 next iteration. */
603 if (stmt_ends_bb_p (stmt))
604 {
605 /* If the stmt can make abnormal goto use a new temporary
606 for the assignment to the LHS. This makes sure the old value
607 of the LHS is available on the abnormal edge. Otherwise
608 we will end up with overlapping life-ranges for abnormal
609 SSA names. */
610 if (gimple_has_lhs (stmt)
611 && stmt_can_make_abnormal_goto (stmt)
612 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
613 {
614 tree lhs = gimple_get_lhs (stmt);
615 tree tmp = create_tmp_var (TREE_TYPE (lhs));
616 gimple *s = gimple_build_assign (lhs, tmp);
617 gimple_set_location (s, gimple_location (stmt));
618 gimple_set_block (s, gimple_block (stmt));
619 gimple_set_lhs (stmt, tmp);
620 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
621 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
622 DECL_GIMPLE_REG_P (tmp) = 1;
623 gsi_insert_after (&i, s, GSI_SAME_STMT);
624 }
625 start_new_block = true;
626 }
627
628 gsi_next (&i);
629 first_stmt_of_seq = false;
630 }
631 return bb;
632}
633
634/* Build a flowgraph for the sequence of stmts SEQ. */
635
636static void
637make_blocks (gimple_seq seq)
638{
639 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
640}
641
642/* Create and return a new empty basic block after bb AFTER. */
643
644static basic_block
645create_bb (void *h, void *e, basic_block after)
646{
647 basic_block bb;
648
649 gcc_assert (!e);
650
651 /* Create and initialize a new basic block. Since alloc_block uses
652 GC allocation that clears memory to allocate a basic block, we do
653 not have to clear the newly allocated basic block here. */
654 bb = alloc_block ();
655
656 bb->index = last_basic_block_for_fn (cfun);
657 bb->flags = BB_NEW;
658 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
659
660 /* Add the new block to the linked list of blocks. */
661 link_block (bb, after);
662
663 /* Grow the basic block array if needed. */
664 if ((size_t) last_basic_block_for_fn (cfun)
665 == basic_block_info_for_fn (cfun)->length ())
666 {
667 size_t new_size =
668 (last_basic_block_for_fn (cfun)
669 + (last_basic_block_for_fn (cfun) + 3) / 4);
670 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
671 }
672
673 /* Add the newly created block to the array. */
674 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
675
676 n_basic_blocks_for_fn (cfun)++;
677 last_basic_block_for_fn (cfun)++;
678
679 return bb;
680}
681
682
683/*---------------------------------------------------------------------------
684 Edge creation
685---------------------------------------------------------------------------*/
686
687/* If basic block BB has an abnormal edge to a basic block
688 containing IFN_ABNORMAL_DISPATCHER internal call, return
689 that the dispatcher's basic block, otherwise return NULL. */
690
691basic_block
692get_abnormal_succ_dispatcher (basic_block bb)
693{
694 edge e;
695 edge_iterator ei;
696
697 FOR_EACH_EDGE (e, ei, bb->succs)
698 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
699 {
700 gimple_stmt_iterator gsi
701 = gsi_start_nondebug_after_labels_bb (e->dest);
702 gimple *g = gsi_stmt (gsi);
703 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
704 return e->dest;
705 }
706 return NULL;
707}
708
709/* Helper function for make_edges. Create a basic block with
710 with ABNORMAL_DISPATCHER internal call in it if needed, and
711 create abnormal edges from BBS to it and from it to FOR_BB
712 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
713
714static void
715handle_abnormal_edges (basic_block *dispatcher_bbs,
716 basic_block for_bb, int *bb_to_omp_idx,
717 auto_vec<basic_block> *bbs, bool computed_goto)
718{
719 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
720 unsigned int idx = 0;
721 basic_block bb;
722 bool inner = false;
723
724 if (bb_to_omp_idx)
725 {
726 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
727 if (bb_to_omp_idx[for_bb->index] != 0)
728 inner = true;
729 }
730
731 /* If the dispatcher has been created already, then there are basic
732 blocks with abnormal edges to it, so just make a new edge to
733 for_bb. */
734 if (*dispatcher == NULL)
735 {
736 /* Check if there are any basic blocks that need to have
737 abnormal edges to this dispatcher. If there are none, return
738 early. */
739 if (bb_to_omp_idx == NULL)
740 {
741 if (bbs->is_empty ())
742 return;
743 }
744 else
745 {
746 FOR_EACH_VEC_ELT (*bbs, idx, bb)
747 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
748 break;
749 if (bb == NULL)
750 return;
751 }
752
753 /* Create the dispatcher bb. */
754 *dispatcher = create_basic_block (NULL, for_bb);
755 if (computed_goto)
756 {
757 /* Factor computed gotos into a common computed goto site. Also
758 record the location of that site so that we can un-factor the
759 gotos after we have converted back to normal form. */
760 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
761
762 /* Create the destination of the factored goto. Each original
763 computed goto will put its desired destination into this
764 variable and jump to the label we create immediately below. */
765 tree var = create_tmp_var (ptr_type_node, "gotovar");
766
767 /* Build a label for the new block which will contain the
768 factored computed goto. */
769 tree factored_label_decl
770 = create_artificial_label (UNKNOWN_LOCATION);
771 gimple *factored_computed_goto_label
772 = gimple_build_label (factored_label_decl);
773 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
774
775 /* Build our new computed goto. */
776 gimple *factored_computed_goto = gimple_build_goto (var);
777 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
778
779 FOR_EACH_VEC_ELT (*bbs, idx, bb)
780 {
781 if (bb_to_omp_idx
782 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
783 continue;
784
785 gsi = gsi_last_bb (bb);
786 gimple *last = gsi_stmt (gsi);
787
788 gcc_assert (computed_goto_p (last));
789
790 /* Copy the original computed goto's destination into VAR. */
791 gimple *assignment
792 = gimple_build_assign (var, gimple_goto_dest (last));
793 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
794
795 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
796 e->goto_locus = gimple_location (last);
797 gsi_remove (&gsi, true);
798 }
799 }
800 else
801 {
802 tree arg = inner ? boolean_true_node : boolean_false_node;
803 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
804 1, arg);
805 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
806 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
807
808 /* Create predecessor edges of the dispatcher. */
809 FOR_EACH_VEC_ELT (*bbs, idx, bb)
810 {
811 if (bb_to_omp_idx
812 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
813 continue;
814 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
815 }
816 }
817 }
818
819 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
820}
821
822/* Creates outgoing edges for BB. Returns 1 when it ends with an
823 computed goto, returns 2 when it ends with a statement that
824 might return to this function via an nonlocal goto, otherwise
825 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
826
827static int
828make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
829{
830 gimple *last = last_stmt (bb);
831 bool fallthru = false;
832 int ret = 0;
833
834 if (!last)
835 return ret;
836
837 switch (gimple_code (last))
838 {
839 case GIMPLE_GOTO:
840 if (make_goto_expr_edges (bb))
841 ret = 1;
842 fallthru = false;
843 break;
844 case GIMPLE_RETURN:
845 {
846 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
847 e->goto_locus = gimple_location (last);
848 fallthru = false;
849 }
850 break;
851 case GIMPLE_COND:
852 make_cond_expr_edges (bb);
853 fallthru = false;
854 break;
855 case GIMPLE_SWITCH:
856 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
857 fallthru = false;
858 break;
859 case GIMPLE_RESX:
860 make_eh_edges (last);
861 fallthru = false;
862 break;
863 case GIMPLE_EH_DISPATCH:
864 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
865 break;
866
867 case GIMPLE_CALL:
868 /* If this function receives a nonlocal goto, then we need to
869 make edges from this call site to all the nonlocal goto
870 handlers. */
871 if (stmt_can_make_abnormal_goto (last))
872 ret = 2;
873
874 /* If this statement has reachable exception handlers, then
875 create abnormal edges to them. */
876 make_eh_edges (last);
877
878 /* BUILTIN_RETURN is really a return statement. */
879 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
880 {
881 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
882 fallthru = false;
883 }
884 /* Some calls are known not to return. */
885 else
886 fallthru = !gimple_call_noreturn_p (last);
887 break;
888
889 case GIMPLE_ASSIGN:
890 /* A GIMPLE_ASSIGN may throw internally and thus be considered
891 control-altering. */
892 if (is_ctrl_altering_stmt (last))
893 make_eh_edges (last);
894 fallthru = true;
895 break;
896
897 case GIMPLE_ASM:
898 make_gimple_asm_edges (bb);
899 fallthru = true;
900 break;
901
902 CASE_GIMPLE_OMP:
903 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
904 break;
905
906 case GIMPLE_TRANSACTION:
907 {
908 gtransaction *txn = as_a <gtransaction *> (last);
909 tree label1 = gimple_transaction_label_norm (txn);
910 tree label2 = gimple_transaction_label_uninst (txn);
911
912 if (label1)
913 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
914 if (label2)
915 make_edge (bb, label_to_block (label2),
916 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
917
918 tree label3 = gimple_transaction_label_over (txn);
919 if (gimple_transaction_subcode (txn)
920 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
921 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
922
923 fallthru = false;
924 }
925 break;
926
927 default:
928 gcc_assert (!stmt_ends_bb_p (last));
929 fallthru = true;
930 break;
931 }
932
933 if (fallthru)
934 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
935
936 return ret;
937}
938
939/* Join all the blocks in the flowgraph. */
940
941static void
942make_edges (void)
943{
944 basic_block bb;
945 struct omp_region *cur_region = NULL;
946 auto_vec<basic_block> ab_edge_goto;
947 auto_vec<basic_block> ab_edge_call;
948 int *bb_to_omp_idx = NULL;
949 int cur_omp_region_idx = 0;
950
951 /* Create an edge from entry to the first block with executable
952 statements in it. */
953 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
954 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
955 EDGE_FALLTHRU);
956
957 /* Traverse the basic block array placing edges. */
958 FOR_EACH_BB_FN (bb, cfun)
959 {
960 int mer;
961
962 if (bb_to_omp_idx)
963 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
964
965 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
966 if (mer == 1)
967 ab_edge_goto.safe_push (bb);
968 else if (mer == 2)
969 ab_edge_call.safe_push (bb);
970
971 if (cur_region && bb_to_omp_idx == NULL)
972 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
973 }
974
975 /* Computed gotos are hell to deal with, especially if there are
976 lots of them with a large number of destinations. So we factor
977 them to a common computed goto location before we build the
978 edge list. After we convert back to normal form, we will un-factor
979 the computed gotos since factoring introduces an unwanted jump.
980 For non-local gotos and abnormal edges from calls to calls that return
981 twice or forced labels, factor the abnormal edges too, by having all
982 abnormal edges from the calls go to a common artificial basic block
983 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
984 basic block to all forced labels and calls returning twice.
985 We do this per-OpenMP structured block, because those regions
986 are guaranteed to be single entry single exit by the standard,
987 so it is not allowed to enter or exit such regions abnormally this way,
988 thus all computed gotos, non-local gotos and setjmp/longjmp calls
989 must not transfer control across SESE region boundaries. */
990 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
991 {
992 gimple_stmt_iterator gsi;
993 basic_block dispatcher_bb_array[2] = { NULL, NULL };
994 basic_block *dispatcher_bbs = dispatcher_bb_array;
995 int count = n_basic_blocks_for_fn (cfun);
996
997 if (bb_to_omp_idx)
998 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
999
1000 FOR_EACH_BB_FN (bb, cfun)
1001 {
1002 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1003 {
1004 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1005 tree target;
1006
1007 if (!label_stmt)
1008 {
1009 if (is_gimple_debug (gsi_stmt (gsi)))
1010 continue;
1011 break;
1012 }
1013
1014 target = gimple_label_label (label_stmt);
1015
1016 /* Make an edge to every label block that has been marked as a
1017 potential target for a computed goto or a non-local goto. */
1018 if (FORCED_LABEL (target))
1019 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1020 &ab_edge_goto, true);
1021 if (DECL_NONLOCAL (target))
1022 {
1023 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1024 &ab_edge_call, false);
1025 break;
1026 }
1027 }
1028
1029 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1030 gsi_next_nondebug (&gsi);
1031 if (!gsi_end_p (gsi))
1032 {
1033 /* Make an edge to every setjmp-like call. */
1034 gimple *call_stmt = gsi_stmt (gsi);
1035 if (is_gimple_call (call_stmt)
1036 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1037 || gimple_call_builtin_p (call_stmt,
1038 BUILT_IN_SETJMP_RECEIVER)))
1039 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1040 &ab_edge_call, false);
1041 }
1042 }
1043
1044 if (bb_to_omp_idx)
1045 XDELETE (dispatcher_bbs);
1046 }
1047
1048 XDELETE (bb_to_omp_idx);
1049
1050 omp_free_regions ();
1051}
1052
1053/* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1054 needed. Returns true if new bbs were created.
1055 Note: This is transitional code, and should not be used for new code. We
1056 should be able to get rid of this by rewriting all target va-arg
1057 gimplification hooks to use an interface gimple_build_cond_value as described
1058 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1059
1060bool
1061gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1062{
1063 gimple *stmt = gsi_stmt (*gsi);
1064 basic_block bb = gimple_bb (stmt);
1065 basic_block lastbb, afterbb;
1066 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1067 edge e;
1068 lastbb = make_blocks_1 (seq, bb);
1069 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1070 return false;
1071 e = split_block (bb, stmt);
1072 /* Move e->dest to come after the new basic blocks. */
1073 afterbb = e->dest;
1074 unlink_block (afterbb);
1075 link_block (afterbb, lastbb);
1076 redirect_edge_succ (e, bb->next_bb);
1077 bb = bb->next_bb;
1078 while (bb != afterbb)
1079 {
1080 struct omp_region *cur_region = NULL;
1081 profile_count cnt = profile_count::zero ();
1082 bool all = true;
1083
1084 int cur_omp_region_idx = 0;
1085 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1086 gcc_assert (!mer && !cur_region);
1087 add_bb_to_loop (bb, afterbb->loop_father);
1088
1089 edge e;
1090 edge_iterator ei;
1091 FOR_EACH_EDGE (e, ei, bb->preds)
1092 {
1093 if (e->count ().initialized_p ())
1094 cnt += e->count ();
1095 else
1096 all = false;
1097 }
1098 tree_guess_outgoing_edge_probabilities (bb);
1099 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1100 bb->count = cnt;
1101
1102 bb = bb->next_bb;
1103 }
1104 return true;
1105}
1106
1107/* Find the next available discriminator value for LOCUS. The
1108 discriminator distinguishes among several basic blocks that
1109 share a common locus, allowing for more accurate sample-based
1110 profiling. */
1111
1112static int
1113next_discriminator_for_locus (location_t locus)
1114{
1115 struct locus_discrim_map item;
1116 struct locus_discrim_map **slot;
1117
1118 item.locus = locus;
1119 item.discriminator = 0;
1120 slot = discriminator_per_locus->find_slot_with_hash (
1121 &item, LOCATION_LINE (locus), INSERT);
1122 gcc_assert (slot);
1123 if (*slot == HTAB_EMPTY_ENTRY)
1124 {
1125 *slot = XNEW (struct locus_discrim_map);
1126 gcc_assert (*slot);
1127 (*slot)->locus = locus;
1128 (*slot)->discriminator = 0;
1129 }
1130 (*slot)->discriminator++;
1131 return (*slot)->discriminator;
1132}
1133
1134/* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1135
1136static bool
1137same_line_p (location_t locus1, location_t locus2)
1138{
1139 expanded_location from, to;
1140
1141 if (locus1 == locus2)
1142 return true;
1143
1144 from = expand_location (locus1);
1145 to = expand_location (locus2);
1146
1147 if (from.line != to.line)
1148 return false;
1149 if (from.file == to.file)
1150 return true;
1151 return (from.file != NULL
1152 && to.file != NULL
1153 && filename_cmp (from.file, to.file) == 0);
1154}
1155
1156/* Assign discriminators to each basic block. */
1157
1158static void
1159assign_discriminators (void)
1160{
1161 basic_block bb;
1162
1163 FOR_EACH_BB_FN (bb, cfun)
1164 {
1165 edge e;
1166 edge_iterator ei;
1167 gimple *last = last_stmt (bb);
1168 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1169
1170 if (locus == UNKNOWN_LOCATION)
1171 continue;
1172
1173 FOR_EACH_EDGE (e, ei, bb->succs)
1174 {
1175 gimple *first = first_non_label_stmt (e->dest);
1176 gimple *last = last_stmt (e->dest);
1177 if ((first && same_line_p (locus, gimple_location (first)))
1178 || (last && same_line_p (locus, gimple_location (last))))
1179 {
1180 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1181 bb->discriminator = next_discriminator_for_locus (locus);
1182 else
1183 e->dest->discriminator = next_discriminator_for_locus (locus);
1184 }
1185 }
1186 }
1187}
1188
1189/* Create the edges for a GIMPLE_COND starting at block BB. */
1190
1191static void
1192make_cond_expr_edges (basic_block bb)
1193{
1194 gcond *entry = as_a <gcond *> (last_stmt (bb));
1195 gimple *then_stmt, *else_stmt;
1196 basic_block then_bb, else_bb;
1197 tree then_label, else_label;
1198 edge e;
1199
1200 gcc_assert (entry);
1201 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1202
1203 /* Entry basic blocks for each component. */
1204 then_label = gimple_cond_true_label (entry);
1205 else_label = gimple_cond_false_label (entry);
1206 then_bb = label_to_block (then_label);
1207 else_bb = label_to_block (else_label);
1208 then_stmt = first_stmt (then_bb);
1209 else_stmt = first_stmt (else_bb);
1210
1211 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1212 e->goto_locus = gimple_location (then_stmt);
1213 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1214 if (e)
1215 e->goto_locus = gimple_location (else_stmt);
1216
1217 /* We do not need the labels anymore. */
1218 gimple_cond_set_true_label (entry, NULL_TREE);
1219 gimple_cond_set_false_label (entry, NULL_TREE);
1220}
1221
1222
1223/* Called for each element in the hash table (P) as we delete the
1224 edge to cases hash table.
1225
1226 Clear all the CASE_CHAINs to prevent problems with copying of
1227 SWITCH_EXPRs and structure sharing rules, then free the hash table
1228 element. */
1229
1230bool
1231edge_to_cases_cleanup (edge const &, tree const &value, void *)
1232{
1233 tree t, next;
1234
1235 for (t = value; t; t = next)
1236 {
1237 next = CASE_CHAIN (t);
1238 CASE_CHAIN (t) = NULL;
1239 }
1240
1241 return true;
1242}
1243
1244/* Start recording information mapping edges to case labels. */
1245
1246void
1247start_recording_case_labels (void)
1248{
1249 gcc_assert (edge_to_cases == NULL);
1250 edge_to_cases = new hash_map<edge, tree>;
1251 touched_switch_bbs = BITMAP_ALLOC (NULL);
1252}
1253
1254/* Return nonzero if we are recording information for case labels. */
1255
1256static bool
1257recording_case_labels_p (void)
1258{
1259 return (edge_to_cases != NULL);
1260}
1261
1262/* Stop recording information mapping edges to case labels and
1263 remove any information we have recorded. */
1264void
1265end_recording_case_labels (void)
1266{
1267 bitmap_iterator bi;
1268 unsigned i;
1269 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1270 delete edge_to_cases;
1271 edge_to_cases = NULL;
1272 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1273 {
1274 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1275 if (bb)
1276 {
1277 gimple *stmt = last_stmt (bb);
1278 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1279 group_case_labels_stmt (as_a <gswitch *> (stmt));
1280 }
1281 }
1282 BITMAP_FREE (touched_switch_bbs);
1283}
1284
1285/* If we are inside a {start,end}_recording_cases block, then return
1286 a chain of CASE_LABEL_EXPRs from T which reference E.
1287
1288 Otherwise return NULL. */
1289
1290static tree
1291get_cases_for_edge (edge e, gswitch *t)
1292{
1293 tree *slot;
1294 size_t i, n;
1295
1296 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1297 chains available. Return NULL so the caller can detect this case. */
1298 if (!recording_case_labels_p ())
1299 return NULL;
1300
1301 slot = edge_to_cases->get (e);
1302 if (slot)
1303 return *slot;
1304
1305 /* If we did not find E in the hash table, then this must be the first
1306 time we have been queried for information about E & T. Add all the
1307 elements from T to the hash table then perform the query again. */
1308
1309 n = gimple_switch_num_labels (t);
1310 for (i = 0; i < n; i++)
1311 {
1312 tree elt = gimple_switch_label (t, i);
1313 tree lab = CASE_LABEL (elt);
1314 basic_block label_bb = label_to_block (lab);
1315 edge this_edge = find_edge (e->src, label_bb);
1316
1317 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1318 a new chain. */
1319 tree &s = edge_to_cases->get_or_insert (this_edge);
1320 CASE_CHAIN (elt) = s;
1321 s = elt;
1322 }
1323
1324 return *edge_to_cases->get (e);
1325}
1326
1327/* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1328
1329static void
1330make_gimple_switch_edges (gswitch *entry, basic_block bb)
1331{
1332 size_t i, n;
1333
1334 n = gimple_switch_num_labels (entry);
1335
1336 for (i = 0; i < n; ++i)
1337 {
1338 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1339 basic_block label_bb = label_to_block (lab);
1340 make_edge (bb, label_bb, 0);
1341 }
1342}
1343
1344
1345/* Return the basic block holding label DEST. */
1346
1347basic_block
1348label_to_block_fn (struct function *ifun, tree dest)
1349{
1350 int uid = LABEL_DECL_UID (dest);
1351
1352 /* We would die hard when faced by an undefined label. Emit a label to
1353 the very first basic block. This will hopefully make even the dataflow
1354 and undefined variable warnings quite right. */
1355 if (seen_error () && uid < 0)
1356 {
1357 gimple_stmt_iterator gsi =
1358 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1359 gimple *stmt;
1360
1361 stmt = gimple_build_label (dest);
1362 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1363 uid = LABEL_DECL_UID (dest);
1364 }
1365 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1366 return NULL;
1367 return (*ifun->cfg->x_label_to_block_map)[uid];
1368}
1369
1370/* Create edges for a goto statement at block BB. Returns true
1371 if abnormal edges should be created. */
1372
1373static bool
1374make_goto_expr_edges (basic_block bb)
1375{
1376 gimple_stmt_iterator last = gsi_last_bb (bb);
1377 gimple *goto_t = gsi_stmt (last);
1378
1379 /* A simple GOTO creates normal edges. */
1380 if (simple_goto_p (goto_t))
1381 {
1382 tree dest = gimple_goto_dest (goto_t);
1383 basic_block label_bb = label_to_block (dest);
1384 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1385 e->goto_locus = gimple_location (goto_t);
1386 gsi_remove (&last, true);
1387 return false;
1388 }
1389
1390 /* A computed GOTO creates abnormal edges. */
1391 return true;
1392}
1393
1394/* Create edges for an asm statement with labels at block BB. */
1395
1396static void
1397make_gimple_asm_edges (basic_block bb)
1398{
1399 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1400 int i, n = gimple_asm_nlabels (stmt);
1401
1402 for (i = 0; i < n; ++i)
1403 {
1404 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1405 basic_block label_bb = label_to_block (label);
1406 make_edge (bb, label_bb, 0);
1407 }
1408}
1409
1410/*---------------------------------------------------------------------------
1411 Flowgraph analysis
1412---------------------------------------------------------------------------*/
1413
1414/* Cleanup useless labels in basic blocks. This is something we wish
1415 to do early because it allows us to group case labels before creating
1416 the edges for the CFG, and it speeds up block statement iterators in
1417 all passes later on.
1418 We rerun this pass after CFG is created, to get rid of the labels that
1419 are no longer referenced. After then we do not run it any more, since
1420 (almost) no new labels should be created. */
1421
1422/* A map from basic block index to the leading label of that block. */
1423static struct label_record
1424{
1425 /* The label. */
1426 tree label;
1427
1428 /* True if the label is referenced from somewhere. */
1429 bool used;
1430} *label_for_bb;
1431
1432/* Given LABEL return the first label in the same basic block. */
1433
1434static tree
1435main_block_label (tree label)
1436{
1437 basic_block bb = label_to_block (label);
1438 tree main_label = label_for_bb[bb->index].label;
1439
1440 /* label_to_block possibly inserted undefined label into the chain. */
1441 if (!main_label)
1442 {
1443 label_for_bb[bb->index].label = label;
1444 main_label = label;
1445 }
1446
1447 label_for_bb[bb->index].used = true;
1448 return main_label;
1449}
1450
1451/* Clean up redundant labels within the exception tree. */
1452
1453static void
1454cleanup_dead_labels_eh (void)
1455{
1456 eh_landing_pad lp;
1457 eh_region r;
1458 tree lab;
1459 int i;
1460
1461 if (cfun->eh == NULL)
1462 return;
1463
1464 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1465 if (lp && lp->post_landing_pad)
1466 {
1467 lab = main_block_label (lp->post_landing_pad);
1468 if (lab != lp->post_landing_pad)
1469 {
1470 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1471 EH_LANDING_PAD_NR (lab) = lp->index;
1472 }
1473 }
1474
1475 FOR_ALL_EH_REGION (r)
1476 switch (r->type)
1477 {
1478 case ERT_CLEANUP:
1479 case ERT_MUST_NOT_THROW:
1480 break;
1481
1482 case ERT_TRY:
1483 {
1484 eh_catch c;
1485 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1486 {
1487 lab = c->label;
1488 if (lab)
1489 c->label = main_block_label (lab);
1490 }
1491 }
1492 break;
1493
1494 case ERT_ALLOWED_EXCEPTIONS:
1495 lab = r->u.allowed.label;
1496 if (lab)
1497 r->u.allowed.label = main_block_label (lab);
1498 break;
1499 }
1500}
1501
1502
1503/* Cleanup redundant labels. This is a three-step process:
1504 1) Find the leading label for each block.
1505 2) Redirect all references to labels to the leading labels.
1506 3) Cleanup all useless labels. */
1507
1508void
1509cleanup_dead_labels (void)
1510{
1511 basic_block bb;
1512 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1513
1514 /* Find a suitable label for each block. We use the first user-defined
1515 label if there is one, or otherwise just the first label we see. */
1516 FOR_EACH_BB_FN (bb, cfun)
1517 {
1518 gimple_stmt_iterator i;
1519
1520 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1521 {
1522 if (is_gimple_debug (gsi_stmt (i)))
1523 continue;
1524
1525 tree label;
1526 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1527
1528 if (!label_stmt)
1529 break;
1530
1531 label = gimple_label_label (label_stmt);
1532
1533 /* If we have not yet seen a label for the current block,
1534 remember this one and see if there are more labels. */
1535 if (!label_for_bb[bb->index].label)
1536 {
1537 label_for_bb[bb->index].label = label;
1538 continue;
1539 }
1540
1541 /* If we did see a label for the current block already, but it
1542 is an artificially created label, replace it if the current
1543 label is a user defined label. */
1544 if (!DECL_ARTIFICIAL (label)
1545 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1546 {
1547 label_for_bb[bb->index].label = label;
1548 break;
1549 }
1550 }
1551 }
1552
1553 /* Now redirect all jumps/branches to the selected label.
1554 First do so for each block ending in a control statement. */
1555 FOR_EACH_BB_FN (bb, cfun)
1556 {
1557 gimple *stmt = last_stmt (bb);
1558 tree label, new_label;
1559
1560 if (!stmt)
1561 continue;
1562
1563 switch (gimple_code (stmt))
1564 {
1565 case GIMPLE_COND:
1566 {
1567 gcond *cond_stmt = as_a <gcond *> (stmt);
1568 label = gimple_cond_true_label (cond_stmt);
1569 if (label)
1570 {
1571 new_label = main_block_label (label);
1572 if (new_label != label)
1573 gimple_cond_set_true_label (cond_stmt, new_label);
1574 }
1575
1576 label = gimple_cond_false_label (cond_stmt);
1577 if (label)
1578 {
1579 new_label = main_block_label (label);
1580 if (new_label != label)
1581 gimple_cond_set_false_label (cond_stmt, new_label);
1582 }
1583 }
1584 break;
1585
1586 case GIMPLE_SWITCH:
1587 {
1588 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1589 size_t i, n = gimple_switch_num_labels (switch_stmt);
1590
1591 /* Replace all destination labels. */
1592 for (i = 0; i < n; ++i)
1593 {
1594 tree case_label = gimple_switch_label (switch_stmt, i);
1595 label = CASE_LABEL (case_label);
1596 new_label = main_block_label (label);
1597 if (new_label != label)
1598 CASE_LABEL (case_label) = new_label;
1599 }
1600 break;
1601 }
1602
1603 case GIMPLE_ASM:
1604 {
1605 gasm *asm_stmt = as_a <gasm *> (stmt);
1606 int i, n = gimple_asm_nlabels (asm_stmt);
1607
1608 for (i = 0; i < n; ++i)
1609 {
1610 tree cons = gimple_asm_label_op (asm_stmt, i);
1611 tree label = main_block_label (TREE_VALUE (cons));
1612 TREE_VALUE (cons) = label;
1613 }
1614 break;
1615 }
1616
1617 /* We have to handle gotos until they're removed, and we don't
1618 remove them until after we've created the CFG edges. */
1619 case GIMPLE_GOTO:
1620 if (!computed_goto_p (stmt))
1621 {
1622 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1623 label = gimple_goto_dest (goto_stmt);
1624 new_label = main_block_label (label);
1625 if (new_label != label)
1626 gimple_goto_set_dest (goto_stmt, new_label);
1627 }
1628 break;
1629
1630 case GIMPLE_TRANSACTION:
1631 {
1632 gtransaction *txn = as_a <gtransaction *> (stmt);
1633
1634 label = gimple_transaction_label_norm (txn);
1635 if (label)
1636 {
1637 new_label = main_block_label (label);
1638 if (new_label != label)
1639 gimple_transaction_set_label_norm (txn, new_label);
1640 }
1641
1642 label = gimple_transaction_label_uninst (txn);
1643 if (label)
1644 {
1645 new_label = main_block_label (label);
1646 if (new_label != label)
1647 gimple_transaction_set_label_uninst (txn, new_label);
1648 }
1649
1650 label = gimple_transaction_label_over (txn);
1651 if (label)
1652 {
1653 new_label = main_block_label (label);
1654 if (new_label != label)
1655 gimple_transaction_set_label_over (txn, new_label);
1656 }
1657 }
1658 break;
1659
1660 default:
1661 break;
1662 }
1663 }
1664
1665 /* Do the same for the exception region tree labels. */
1666 cleanup_dead_labels_eh ();
1667
1668 /* Finally, purge dead labels. All user-defined labels and labels that
1669 can be the target of non-local gotos and labels which have their
1670 address taken are preserved. */
1671 FOR_EACH_BB_FN (bb, cfun)
1672 {
1673 gimple_stmt_iterator i;
1674 tree label_for_this_bb = label_for_bb[bb->index].label;
1675
1676 if (!label_for_this_bb)
1677 continue;
1678
1679 /* If the main label of the block is unused, we may still remove it. */
1680 if (!label_for_bb[bb->index].used)
1681 label_for_this_bb = NULL;
1682
1683 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1684 {
1685 if (is_gimple_debug (gsi_stmt (i)))
1686 {
1687 gsi_next (&i);
1688 continue;
1689 }
1690
1691 tree label;
1692 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1693
1694 if (!label_stmt)
1695 break;
1696
1697 label = gimple_label_label (label_stmt);
1698
1699 if (label == label_for_this_bb
1700 || !DECL_ARTIFICIAL (label)
1701 || DECL_NONLOCAL (label)
1702 || FORCED_LABEL (label))
1703 gsi_next (&i);
1704 else
1705 gsi_remove (&i, true);
1706 }
1707 }
1708
1709 free (label_for_bb);
1710}
1711
1712/* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1713 the ones jumping to the same label.
1714 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1715
1716bool
1717group_case_labels_stmt (gswitch *stmt)
1718{
1719 int old_size = gimple_switch_num_labels (stmt);
1720 int i, next_index, new_size;
1721 basic_block default_bb = NULL;
1722
1723 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1724
1725 /* Look for possible opportunities to merge cases. */
1726 new_size = i = 1;
1727 while (i < old_size)
1728 {
1729 tree base_case, base_high;
1730 basic_block base_bb;
1731
1732 base_case = gimple_switch_label (stmt, i);
1733
1734 gcc_assert (base_case);
1735 base_bb = label_to_block (CASE_LABEL (base_case));
1736
1737 /* Discard cases that have the same destination as the default case or
1738 whose destiniation blocks have already been removed as unreachable. */
1739 if (base_bb == NULL || base_bb == default_bb)
1740 {
1741 i++;
1742 continue;
1743 }
1744
1745 base_high = CASE_HIGH (base_case)
1746 ? CASE_HIGH (base_case)
1747 : CASE_LOW (base_case);
1748 next_index = i + 1;
1749
1750 /* Try to merge case labels. Break out when we reach the end
1751 of the label vector or when we cannot merge the next case
1752 label with the current one. */
1753 while (next_index < old_size)
1754 {
1755 tree merge_case = gimple_switch_label (stmt, next_index);
1756 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1757 wide_int bhp1 = wi::to_wide (base_high) + 1;
1758
1759 /* Merge the cases if they jump to the same place,
1760 and their ranges are consecutive. */
1761 if (merge_bb == base_bb
1762 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1763 {
1764 base_high = CASE_HIGH (merge_case) ?
1765 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1766 CASE_HIGH (base_case) = base_high;
1767 next_index++;
1768 }
1769 else
1770 break;
1771 }
1772
1773 /* Discard cases that have an unreachable destination block. */
1774 if (EDGE_COUNT (base_bb->succs) == 0
1775 && gimple_seq_unreachable_p (bb_seq (base_bb))
1776 /* Don't optimize this if __builtin_unreachable () is the
1777 implicitly added one by the C++ FE too early, before
1778 -Wreturn-type can be diagnosed. We'll optimize it later
1779 during switchconv pass or any other cfg cleanup. */
1780 && (gimple_in_ssa_p (cfun)
1781 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1782 != BUILTINS_LOCATION)))
1783 {
1784 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1785 if (base_edge != NULL)
1786 remove_edge_and_dominated_blocks (base_edge);
1787 i = next_index;
1788 continue;
1789 }
1790
1791 if (new_size < i)
1792 gimple_switch_set_label (stmt, new_size,
1793 gimple_switch_label (stmt, i));
1794 i = next_index;
1795 new_size++;
1796 }
1797
1798 gcc_assert (new_size <= old_size);
1799
1800 if (new_size < old_size)
1801 gimple_switch_set_num_labels (stmt, new_size);
1802
1803 return new_size < old_size;
1804}
1805
1806/* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1807 and scan the sorted vector of cases. Combine the ones jumping to the
1808 same label. */
1809
1810bool
1811group_case_labels (void)
1812{
1813 basic_block bb;
1814 bool changed = false;
1815
1816 FOR_EACH_BB_FN (bb, cfun)
1817 {
1818 gimple *stmt = last_stmt (bb);
1819 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1820 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1821 }
1822
1823 return changed;
1824}
1825
1826/* Checks whether we can merge block B into block A. */
1827
1828static bool
1829gimple_can_merge_blocks_p (basic_block a, basic_block b)
1830{
1831 gimple *stmt;
1832
1833 if (!single_succ_p (a))
1834 return false;
1835
1836 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1837 return false;
1838
1839 if (single_succ (a) != b)
1840 return false;
1841
1842 if (!single_pred_p (b))
1843 return false;
1844
1845 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1846 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1847 return false;
1848
1849 /* If A ends by a statement causing exceptions or something similar, we
1850 cannot merge the blocks. */
1851 stmt = last_stmt (a);
1852 if (stmt && stmt_ends_bb_p (stmt))
1853 return false;
1854
1855 /* Do not allow a block with only a non-local label to be merged. */
1856 if (stmt)
1857 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1858 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1859 return false;
1860
1861 /* Examine the labels at the beginning of B. */
1862 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1863 gsi_next (&gsi))
1864 {
1865 tree lab;
1866 if (is_gimple_debug (gsi_stmt (gsi)))
1867 continue;
1868 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1869 if (!label_stmt)
1870 break;
1871 lab = gimple_label_label (label_stmt);
1872
1873 /* Do not remove user forced labels or for -O0 any user labels. */
1874 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1875 return false;
1876 }
1877
1878 /* Protect simple loop latches. We only want to avoid merging
1879 the latch with the loop header or with a block in another
1880 loop in this case. */
1881 if (current_loops
1882 && b->loop_father->latch == b
1883 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1884 && (b->loop_father->header == a
1885 || b->loop_father != a->loop_father))
1886 return false;
1887
1888 /* It must be possible to eliminate all phi nodes in B. If ssa form
1889 is not up-to-date and a name-mapping is registered, we cannot eliminate
1890 any phis. Symbols marked for renaming are never a problem though. */
1891 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1892 gsi_next (&gsi))
1893 {
1894 gphi *phi = gsi.phi ();
1895 /* Technically only new names matter. */
1896 if (name_registered_for_update_p (PHI_RESULT (phi)))
1897 return false;
1898 }
1899
1900 /* When not optimizing, don't merge if we'd lose goto_locus. */
1901 if (!optimize
1902 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1903 {
1904 location_t goto_locus = single_succ_edge (a)->goto_locus;
1905 gimple_stmt_iterator prev, next;
1906 prev = gsi_last_nondebug_bb (a);
1907 next = gsi_after_labels (b);
1908 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1909 gsi_next_nondebug (&next);
1910 if ((gsi_end_p (prev)
1911 || gimple_location (gsi_stmt (prev)) != goto_locus)
1912 && (gsi_end_p (next)
1913 || gimple_location (gsi_stmt (next)) != goto_locus))
1914 return false;
1915 }
1916
1917 return true;
1918}
1919
1920/* Replaces all uses of NAME by VAL. */
1921
1922void
1923replace_uses_by (tree name, tree val)
1924{
1925 imm_use_iterator imm_iter;
1926 use_operand_p use;
1927 gimple *stmt;
1928 edge e;
1929
1930 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1931 {
1932 /* Mark the block if we change the last stmt in it. */
1933 if (cfgcleanup_altered_bbs
1934 && stmt_ends_bb_p (stmt))
1935 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1936
1937 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1938 {
1939 replace_exp (use, val);
1940
1941 if (gimple_code (stmt) == GIMPLE_PHI)
1942 {
1943 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1944 PHI_ARG_INDEX_FROM_USE (use));
1945 if (e->flags & EDGE_ABNORMAL
1946 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1947 {
1948 /* This can only occur for virtual operands, since
1949 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1950 would prevent replacement. */
1951 gcc_checking_assert (virtual_operand_p (name));
1952 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1953 }
1954 }
1955 }
1956
1957 if (gimple_code (stmt) != GIMPLE_PHI)
1958 {
1959 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1960 gimple *orig_stmt = stmt;
1961 size_t i;
1962
1963 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1964 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1965 only change sth from non-invariant to invariant, and only
1966 when propagating constants. */
1967 if (is_gimple_min_invariant (val))
1968 for (i = 0; i < gimple_num_ops (stmt); i++)
1969 {
1970 tree op = gimple_op (stmt, i);
1971 /* Operands may be empty here. For example, the labels
1972 of a GIMPLE_COND are nulled out following the creation
1973 of the corresponding CFG edges. */
1974 if (op && TREE_CODE (op) == ADDR_EXPR)
1975 recompute_tree_invariant_for_addr_expr (op);
1976 }
1977
1978 if (fold_stmt (&gsi))
1979 stmt = gsi_stmt (gsi);
1980
1981 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1982 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1983
1984 update_stmt (stmt);
1985 }
1986 }
1987
1988 gcc_checking_assert (has_zero_uses (name));
1989
1990 /* Also update the trees stored in loop structures. */
1991 if (current_loops)
1992 {
1993 struct loop *loop;
1994
1995 FOR_EACH_LOOP (loop, 0)
1996 {
1997 substitute_in_loop_info (loop, name, val);
1998 }
1999 }
2000}
2001
2002/* Merge block B into block A. */
2003
2004static void
2005gimple_merge_blocks (basic_block a, basic_block b)
2006{
2007 gimple_stmt_iterator last, gsi;
2008 gphi_iterator psi;
2009
2010 if (dump_file)
2011 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2012
2013 /* Remove all single-valued PHI nodes from block B of the form
2014 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2015 gsi = gsi_last_bb (a);
2016 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2017 {
2018 gimple *phi = gsi_stmt (psi);
2019 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2020 gimple *copy;
2021 bool may_replace_uses = (virtual_operand_p (def)
2022 || may_propagate_copy (def, use));
2023
2024 /* In case we maintain loop closed ssa form, do not propagate arguments
2025 of loop exit phi nodes. */
2026 if (current_loops
2027 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2028 && !virtual_operand_p (def)
2029 && TREE_CODE (use) == SSA_NAME
2030 && a->loop_father != b->loop_father)
2031 may_replace_uses = false;
2032
2033 if (!may_replace_uses)
2034 {
2035 gcc_assert (!virtual_operand_p (def));
2036
2037 /* Note that just emitting the copies is fine -- there is no problem
2038 with ordering of phi nodes. This is because A is the single
2039 predecessor of B, therefore results of the phi nodes cannot
2040 appear as arguments of the phi nodes. */
2041 copy = gimple_build_assign (def, use);
2042 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2043 remove_phi_node (&psi, false);
2044 }
2045 else
2046 {
2047 /* If we deal with a PHI for virtual operands, we can simply
2048 propagate these without fussing with folding or updating
2049 the stmt. */
2050 if (virtual_operand_p (def))
2051 {
2052 imm_use_iterator iter;
2053 use_operand_p use_p;
2054 gimple *stmt;
2055
2056 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2057 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2058 SET_USE (use_p, use);
2059
2060 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2061 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2062 }
2063 else
2064 replace_uses_by (def, use);
2065
2066 remove_phi_node (&psi, true);
2067 }
2068 }
2069
2070 /* Ensure that B follows A. */
2071 move_block_after (b, a);
2072
2073 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2074 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2075
2076 /* Remove labels from B and set gimple_bb to A for other statements. */
2077 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2078 {
2079 gimple *stmt = gsi_stmt (gsi);
2080 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2081 {
2082 tree label = gimple_label_label (label_stmt);
2083 int lp_nr;
2084
2085 gsi_remove (&gsi, false);
2086
2087 /* Now that we can thread computed gotos, we might have
2088 a situation where we have a forced label in block B
2089 However, the label at the start of block B might still be
2090 used in other ways (think about the runtime checking for
2091 Fortran assigned gotos). So we can not just delete the
2092 label. Instead we move the label to the start of block A. */
2093 if (FORCED_LABEL (label))
2094 {
2095 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2096 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2097 }
2098 /* Other user labels keep around in a form of a debug stmt. */
2099 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2100 {
2101 gimple *dbg = gimple_build_debug_bind (label,
2102 integer_zero_node,
2103 stmt);
2104 gimple_debug_bind_reset_value (dbg);
2105 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2106 }
2107
2108 lp_nr = EH_LANDING_PAD_NR (label);
2109 if (lp_nr)
2110 {
2111 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2112 lp->post_landing_pad = NULL;
2113 }
2114 }
2115 else
2116 {
2117 gimple_set_bb (stmt, a);
2118 gsi_next (&gsi);
2119 }
2120 }
2121
2122 /* When merging two BBs, if their counts are different, the larger count
2123 is selected as the new bb count. This is to handle inconsistent
2124 profiles. */
2125 if (a->loop_father == b->loop_father)
2126 {
2127 a->count = a->count.merge (b->count);
2128 }
2129
2130 /* Merge the sequences. */
2131 last = gsi_last_bb (a);
2132 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2133 set_bb_seq (b, NULL);
2134
2135 if (cfgcleanup_altered_bbs)
2136 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2137}
2138
2139
2140/* Return the one of two successors of BB that is not reachable by a
2141 complex edge, if there is one. Else, return BB. We use
2142 this in optimizations that use post-dominators for their heuristics,
2143 to catch the cases in C++ where function calls are involved. */
2144
2145basic_block
2146single_noncomplex_succ (basic_block bb)
2147{
2148 edge e0, e1;
2149 if (EDGE_COUNT (bb->succs) != 2)
2150 return bb;
2151
2152 e0 = EDGE_SUCC (bb, 0);
2153 e1 = EDGE_SUCC (bb, 1);
2154 if (e0->flags & EDGE_COMPLEX)
2155 return e1->dest;
2156 if (e1->flags & EDGE_COMPLEX)
2157 return e0->dest;
2158
2159 return bb;
2160}
2161
2162/* T is CALL_EXPR. Set current_function_calls_* flags. */
2163
2164void
2165notice_special_calls (gcall *call)
2166{
2167 int flags = gimple_call_flags (call);
2168
2169 if (flags & ECF_MAY_BE_ALLOCA)
2170 cfun->calls_alloca = true;
2171 if (flags & ECF_RETURNS_TWICE)
2172 cfun->calls_setjmp = true;
2173}
2174
2175
2176/* Clear flags set by notice_special_calls. Used by dead code removal
2177 to update the flags. */
2178
2179void
2180clear_special_calls (void)
2181{
2182 cfun->calls_alloca = false;
2183 cfun->calls_setjmp = false;
2184}
2185
2186/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2187
2188static void
2189remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2190{
2191 /* Since this block is no longer reachable, we can just delete all
2192 of its PHI nodes. */
2193 remove_phi_nodes (bb);
2194
2195 /* Remove edges to BB's successors. */
2196 while (EDGE_COUNT (bb->succs) > 0)
2197 remove_edge (EDGE_SUCC (bb, 0));
2198}
2199
2200
2201/* Remove statements of basic block BB. */
2202
2203static void
2204remove_bb (basic_block bb)
2205{
2206 gimple_stmt_iterator i;
2207
2208 if (dump_file)
2209 {
2210 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2211 if (dump_flags & TDF_DETAILS)
2212 {
2213 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2214 fprintf (dump_file, "\n");
2215 }
2216 }
2217
2218 if (current_loops)
2219 {
2220 struct loop *loop = bb->loop_father;
2221
2222 /* If a loop gets removed, clean up the information associated
2223 with it. */
2224 if (loop->latch == bb
2225 || loop->header == bb)
2226 free_numbers_of_iterations_estimates (loop);
2227 }
2228
2229 /* Remove all the instructions in the block. */
2230 if (bb_seq (bb) != NULL)
2231 {
2232 /* Walk backwards so as to get a chance to substitute all
2233 released DEFs into debug stmts. See
2234 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2235 details. */
2236 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2237 {
2238 gimple *stmt = gsi_stmt (i);
2239 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2240 if (label_stmt
2241 && (FORCED_LABEL (gimple_label_label (label_stmt))
2242 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2243 {
2244 basic_block new_bb;
2245 gimple_stmt_iterator new_gsi;
2246
2247 /* A non-reachable non-local label may still be referenced.
2248 But it no longer needs to carry the extra semantics of
2249 non-locality. */
2250 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2251 {
2252 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2253 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2254 }
2255
2256 new_bb = bb->prev_bb;
2257 new_gsi = gsi_start_bb (new_bb);
2258 gsi_remove (&i, false);
2259 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2260 }
2261 else
2262 {
2263 /* Release SSA definitions. */
2264 release_defs (stmt);
2265 gsi_remove (&i, true);
2266 }
2267
2268 if (gsi_end_p (i))
2269 i = gsi_last_bb (bb);
2270 else
2271 gsi_prev (&i);
2272 }
2273 }
2274
2275 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2276 bb->il.gimple.seq = NULL;
2277 bb->il.gimple.phi_nodes = NULL;
2278}
2279
2280
2281/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2282 predicate VAL, return the edge that will be taken out of the block.
2283 If VAL does not match a unique edge, NULL is returned. */
2284
2285edge
2286find_taken_edge (basic_block bb, tree val)
2287{
2288 gimple *stmt;
2289
2290 stmt = last_stmt (bb);
2291
2292 gcc_assert (is_ctrl_stmt (stmt));
2293
2294 if (gimple_code (stmt) == GIMPLE_COND)
2295 return find_taken_edge_cond_expr (bb, val);
2296
2297 if (gimple_code (stmt) == GIMPLE_SWITCH)
2298 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2299
2300 if (computed_goto_p (stmt))
2301 {
2302 /* Only optimize if the argument is a label, if the argument is
2303 not a label then we can not construct a proper CFG.
2304
2305 It may be the case that we only need to allow the LABEL_REF to
2306 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2307 appear inside a LABEL_EXPR just to be safe. */
2308 if (val
2309 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2310 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2311 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2312 return NULL;
2313 }
2314
2315 gcc_unreachable ();
2316}
2317
2318/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2319 statement, determine which of the outgoing edges will be taken out of the
2320 block. Return NULL if either edge may be taken. */
2321
2322static edge
2323find_taken_edge_computed_goto (basic_block bb, tree val)
2324{
2325 basic_block dest;
2326 edge e = NULL;
2327
2328 dest = label_to_block (val);
2329 if (dest)
2330 {
2331 e = find_edge (bb, dest);
2332 gcc_assert (e != NULL);
2333 }
2334
2335 return e;
2336}
2337
2338/* Given a constant value VAL and the entry block BB to a COND_EXPR
2339 statement, determine which of the two edges will be taken out of the
2340 block. Return NULL if either edge may be taken. */
2341
2342static edge
2343find_taken_edge_cond_expr (basic_block bb, tree val)
2344{
2345 edge true_edge, false_edge;
2346
2347 if (val == NULL
2348 || TREE_CODE (val) != INTEGER_CST)
2349 return NULL;
2350
2351 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2352
2353 return (integer_zerop (val) ? false_edge : true_edge);
2354}
2355
2356/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2357 statement, determine which edge will be taken out of the block. Return
2358 NULL if any edge may be taken. */
2359
2360static edge
2361find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2362 tree val)
2363{
2364 basic_block dest_bb;
2365 edge e;
2366 tree taken_case;
2367
2368 if (gimple_switch_num_labels (switch_stmt) == 1)
2369 taken_case = gimple_switch_default_label (switch_stmt);
2370 else if (! val || TREE_CODE (val) != INTEGER_CST)
2371 return NULL;
2372 else
2373 taken_case = find_case_label_for_value (switch_stmt, val);
2374 dest_bb = label_to_block (CASE_LABEL (taken_case));
2375
2376 e = find_edge (bb, dest_bb);
2377 gcc_assert (e);
2378 return e;
2379}
2380
2381
2382/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2383 We can make optimal use here of the fact that the case labels are
2384 sorted: We can do a binary search for a case matching VAL. */
2385
2386static tree
2387find_case_label_for_value (gswitch *switch_stmt, tree val)
2388{
2389 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2390 tree default_case = gimple_switch_default_label (switch_stmt);
2391
2392 for (low = 0, high = n; high - low > 1; )
2393 {
2394 size_t i = (high + low) / 2;
2395 tree t = gimple_switch_label (switch_stmt, i);
2396 int cmp;
2397
2398 /* Cache the result of comparing CASE_LOW and val. */
2399 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2400
2401 if (cmp > 0)
2402 high = i;
2403 else
2404 low = i;
2405
2406 if (CASE_HIGH (t) == NULL)
2407 {
2408 /* A singe-valued case label. */
2409 if (cmp == 0)
2410 return t;
2411 }
2412 else
2413 {
2414 /* A case range. We can only handle integer ranges. */
2415 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2416 return t;
2417 }
2418 }
2419
2420 return default_case;
2421}
2422
2423
2424/* Dump a basic block on stderr. */
2425
2426void
2427gimple_debug_bb (basic_block bb)
2428{
2429 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2430}
2431
2432
2433/* Dump basic block with index N on stderr. */
2434
2435basic_block
2436gimple_debug_bb_n (int n)
2437{
2438 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2439 return BASIC_BLOCK_FOR_FN (cfun, n);
2440}
2441
2442
2443/* Dump the CFG on stderr.
2444
2445 FLAGS are the same used by the tree dumping functions
2446 (see TDF_* in dumpfile.h). */
2447
2448void
2449gimple_debug_cfg (dump_flags_t flags)
2450{
2451 gimple_dump_cfg (stderr, flags);
2452}
2453
2454
2455/* Dump the program showing basic block boundaries on the given FILE.
2456
2457 FLAGS are the same used by the tree dumping functions (see TDF_* in
2458 tree.h). */
2459
2460void
2461gimple_dump_cfg (FILE *file, dump_flags_t flags)
2462{
2463 if (flags & TDF_DETAILS)
2464 {
2465 dump_function_header (file, current_function_decl, flags);
2466 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2467 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2468 last_basic_block_for_fn (cfun));
2469
2470 brief_dump_cfg (file, flags);
2471 fprintf (file, "\n");
2472 }
2473
2474 if (flags & TDF_STATS)
2475 dump_cfg_stats (file);
2476
2477 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2478}
2479
2480
2481/* Dump CFG statistics on FILE. */
2482
2483void
2484dump_cfg_stats (FILE *file)
2485{
2486 static long max_num_merged_labels = 0;
2487 unsigned long size, total = 0;
2488 long num_edges;
2489 basic_block bb;
2490 const char * const fmt_str = "%-30s%-13s%12s\n";
2491 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2492 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2493 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2494 const char *funcname = current_function_name ();
2495
2496 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2497
2498 fprintf (file, "---------------------------------------------------------\n");
2499 fprintf (file, fmt_str, "", " Number of ", "Memory");
2500 fprintf (file, fmt_str, "", " instances ", "used ");
2501 fprintf (file, "---------------------------------------------------------\n");
2502
2503 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2504 total += size;
2505 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2506 SCALE (size), LABEL (size));
2507
2508 num_edges = 0;
2509 FOR_EACH_BB_FN (bb, cfun)
2510 num_edges += EDGE_COUNT (bb->succs);
2511 size = num_edges * sizeof (struct edge_def);
2512 total += size;
2513 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2514
2515 fprintf (file, "---------------------------------------------------------\n");
2516 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2517 LABEL (total));
2518 fprintf (file, "---------------------------------------------------------\n");
2519 fprintf (file, "\n");
2520
2521 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2522 max_num_merged_labels = cfg_stats.num_merged_labels;
2523
2524 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2525 cfg_stats.num_merged_labels, max_num_merged_labels);
2526
2527 fprintf (file, "\n");
2528}
2529
2530
2531/* Dump CFG statistics on stderr. Keep extern so that it's always
2532 linked in the final executable. */
2533
2534DEBUG_FUNCTION void
2535debug_cfg_stats (void)
2536{
2537 dump_cfg_stats (stderr);
2538}
2539
2540/*---------------------------------------------------------------------------
2541 Miscellaneous helpers
2542---------------------------------------------------------------------------*/
2543
2544/* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2545 flow. Transfers of control flow associated with EH are excluded. */
2546
2547static bool
2548call_can_make_abnormal_goto (gimple *t)
2549{
2550 /* If the function has no non-local labels, then a call cannot make an
2551 abnormal transfer of control. */
2552 if (!cfun->has_nonlocal_label
2553 && !cfun->calls_setjmp)
2554 return false;
2555
2556 /* Likewise if the call has no side effects. */
2557 if (!gimple_has_side_effects (t))
2558 return false;
2559
2560 /* Likewise if the called function is leaf. */
2561 if (gimple_call_flags (t) & ECF_LEAF)
2562 return false;
2563
2564 return true;
2565}
2566
2567
2568/* Return true if T can make an abnormal transfer of control flow.
2569 Transfers of control flow associated with EH are excluded. */
2570
2571bool
2572stmt_can_make_abnormal_goto (gimple *t)
2573{
2574 if (computed_goto_p (t))
2575 return true;
2576 if (is_gimple_call (t))
2577 return call_can_make_abnormal_goto (t);
2578 return false;
2579}
2580
2581
2582/* Return true if T represents a stmt that always transfers control. */
2583
2584bool
2585is_ctrl_stmt (gimple *t)
2586{
2587 switch (gimple_code (t))
2588 {
2589 case GIMPLE_COND:
2590 case GIMPLE_SWITCH:
2591 case GIMPLE_GOTO:
2592 case GIMPLE_RETURN:
2593 case GIMPLE_RESX:
2594 return true;
2595 default:
2596 return false;
2597 }
2598}
2599
2600
2601/* Return true if T is a statement that may alter the flow of control
2602 (e.g., a call to a non-returning function). */
2603
2604bool
2605is_ctrl_altering_stmt (gimple *t)
2606{
2607 gcc_assert (t);
2608
2609 switch (gimple_code (t))
2610 {
2611 case GIMPLE_CALL:
2612 /* Per stmt call flag indicates whether the call could alter
2613 controlflow. */
2614 if (gimple_call_ctrl_altering_p (t))
2615 return true;
2616 break;
2617
2618 case GIMPLE_EH_DISPATCH:
2619 /* EH_DISPATCH branches to the individual catch handlers at
2620 this level of a try or allowed-exceptions region. It can
2621 fallthru to the next statement as well. */
2622 return true;
2623
2624 case GIMPLE_ASM:
2625 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2626 return true;
2627 break;
2628
2629 CASE_GIMPLE_OMP:
2630 /* OpenMP directives alter control flow. */
2631 return true;
2632
2633 case GIMPLE_TRANSACTION:
2634 /* A transaction start alters control flow. */
2635 return true;
2636
2637 default:
2638 break;
2639 }
2640
2641 /* If a statement can throw, it alters control flow. */
2642 return stmt_can_throw_internal (t);
2643}
2644
2645
2646/* Return true if T is a simple local goto. */
2647
2648bool
2649simple_goto_p (gimple *t)
2650{
2651 return (gimple_code (t) == GIMPLE_GOTO
2652 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2653}
2654
2655
2656/* Return true if STMT should start a new basic block. PREV_STMT is
2657 the statement preceding STMT. It is used when STMT is a label or a
2658 case label. Labels should only start a new basic block if their
2659 previous statement wasn't a label. Otherwise, sequence of labels
2660 would generate unnecessary basic blocks that only contain a single
2661 label. */
2662
2663static inline bool
2664stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2665{
2666 if (stmt == NULL)
2667 return false;
2668
2669 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2670 any nondebug stmts in the block. We don't want to start another
2671 block in this case: the debug stmt will already have started the
2672 one STMT would start if we weren't outputting debug stmts. */
2673 if (prev_stmt && is_gimple_debug (prev_stmt))
2674 return false;
2675
2676 /* Labels start a new basic block only if the preceding statement
2677 wasn't a label of the same type. This prevents the creation of
2678 consecutive blocks that have nothing but a single label. */
2679 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2680 {
2681 /* Nonlocal and computed GOTO targets always start a new block. */
2682 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2683 || FORCED_LABEL (gimple_label_label (label_stmt)))
2684 return true;
2685
2686 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2687 {
2688 if (DECL_NONLOCAL (gimple_label_label (
2689 as_a <glabel *> (prev_stmt))))
2690 return true;
2691
2692 cfg_stats.num_merged_labels++;
2693 return false;
2694 }
2695 else
2696 return true;
2697 }
2698 else if (gimple_code (stmt) == GIMPLE_CALL)
2699 {
2700 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2701 /* setjmp acts similar to a nonlocal GOTO target and thus should
2702 start a new block. */
2703 return true;
2704 if (gimple_call_internal_p (stmt, IFN_PHI)
2705 && prev_stmt
2706 && gimple_code (prev_stmt) != GIMPLE_LABEL
2707 && (gimple_code (prev_stmt) != GIMPLE_CALL
2708 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2709 /* PHI nodes start a new block unless preceeded by a label
2710 or another PHI. */
2711 return true;
2712 }
2713
2714 return false;
2715}
2716
2717
2718/* Return true if T should end a basic block. */
2719
2720bool
2721stmt_ends_bb_p (gimple *t)
2722{
2723 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2724}
2725
2726/* Remove block annotations and other data structures. */
2727
2728void
2729delete_tree_cfg_annotations (struct function *fn)
2730{
2731 vec_free (label_to_block_map_for_fn (fn));
2732}
2733
2734/* Return the virtual phi in BB. */
2735
2736gphi *
2737get_virtual_phi (basic_block bb)
2738{
2739 for (gphi_iterator gsi = gsi_start_phis (bb);
2740 !gsi_end_p (gsi);
2741 gsi_next (&gsi))
2742 {
2743 gphi *phi = gsi.phi ();
2744
2745 if (virtual_operand_p (PHI_RESULT (phi)))
2746 return phi;
2747 }
2748
2749 return NULL;
2750}
2751
2752/* Return the first statement in basic block BB. */
2753
2754gimple *
2755first_stmt (basic_block bb)
2756{
2757 gimple_stmt_iterator i = gsi_start_bb (bb);
2758 gimple *stmt = NULL;
2759
2760 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2761 {
2762 gsi_next (&i);
2763 stmt = NULL;
2764 }
2765 return stmt;
2766}
2767
2768/* Return the first non-label statement in basic block BB. */
2769
2770static gimple *
2771first_non_label_stmt (basic_block bb)
2772{
2773 gimple_stmt_iterator i = gsi_start_bb (bb);
2774 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2775 gsi_next (&i);
2776 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2777}
2778
2779/* Return the last statement in basic block BB. */
2780
2781gimple *
2782last_stmt (basic_block bb)
2783{
2784 gimple_stmt_iterator i = gsi_last_bb (bb);
2785 gimple *stmt = NULL;
2786
2787 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2788 {
2789 gsi_prev (&i);
2790 stmt = NULL;
2791 }
2792 return stmt;
2793}
2794
2795/* Return the last statement of an otherwise empty block. Return NULL
2796 if the block is totally empty, or if it contains more than one
2797 statement. */
2798
2799gimple *
2800last_and_only_stmt (basic_block bb)
2801{
2802 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2803 gimple *last, *prev;
2804
2805 if (gsi_end_p (i))
2806 return NULL;
2807
2808 last = gsi_stmt (i);
2809 gsi_prev_nondebug (&i);
2810 if (gsi_end_p (i))
2811 return last;
2812
2813 /* Empty statements should no longer appear in the instruction stream.
2814 Everything that might have appeared before should be deleted by
2815 remove_useless_stmts, and the optimizers should just gsi_remove
2816 instead of smashing with build_empty_stmt.
2817
2818 Thus the only thing that should appear here in a block containing
2819 one executable statement is a label. */
2820 prev = gsi_stmt (i);
2821 if (gimple_code (prev) == GIMPLE_LABEL)
2822 return last;
2823 else
2824 return NULL;
2825}
2826
2827/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2828
2829static void
2830reinstall_phi_args (edge new_edge, edge old_edge)
2831{
2832 edge_var_map *vm;
2833 int i;
2834 gphi_iterator phis;
2835
2836 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2837 if (!v)
2838 return;
2839
2840 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2841 v->iterate (i, &vm) && !gsi_end_p (phis);
2842 i++, gsi_next (&phis))
2843 {
2844 gphi *phi = phis.phi ();
2845 tree result = redirect_edge_var_map_result (vm);
2846 tree arg = redirect_edge_var_map_def (vm);
2847
2848 gcc_assert (result == gimple_phi_result (phi));
2849
2850 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2851 }
2852
2853 redirect_edge_var_map_clear (old_edge);
2854}
2855
2856/* Returns the basic block after which the new basic block created
2857 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2858 near its "logical" location. This is of most help to humans looking
2859 at debugging dumps. */
2860
2861basic_block
2862split_edge_bb_loc (edge edge_in)
2863{
2864 basic_block dest = edge_in->dest;
2865 basic_block dest_prev = dest->prev_bb;
2866
2867 if (dest_prev)
2868 {
2869 edge e = find_edge (dest_prev, dest);
2870 if (e && !(e->flags & EDGE_COMPLEX))
2871 return edge_in->src;
2872 }
2873 return dest_prev;
2874}
2875
2876/* Split a (typically critical) edge EDGE_IN. Return the new block.
2877 Abort on abnormal edges. */
2878
2879static basic_block
2880gimple_split_edge (edge edge_in)
2881{
2882 basic_block new_bb, after_bb, dest;
2883 edge new_edge, e;
2884
2885 /* Abnormal edges cannot be split. */
2886 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2887
2888 dest = edge_in->dest;
2889
2890 after_bb = split_edge_bb_loc (edge_in);
2891
2892 new_bb = create_empty_bb (after_bb);
2893 new_bb->count = edge_in->count ();
2894
2895 e = redirect_edge_and_branch (edge_in, new_bb);
2896 gcc_assert (e == edge_in);
2897
2898 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2899 reinstall_phi_args (new_edge, e);
2900
2901 return new_bb;
2902}
2903
2904
2905/* Verify properties of the address expression T with base object BASE. */
2906
2907static tree
2908verify_address (tree t, tree base)
2909{
2910 bool old_constant;
2911 bool old_side_effects;
2912 bool new_constant;
2913 bool new_side_effects;
2914
2915 old_constant = TREE_CONSTANT (t);
2916 old_side_effects = TREE_SIDE_EFFECTS (t);
2917
2918 recompute_tree_invariant_for_addr_expr (t);
2919 new_side_effects = TREE_SIDE_EFFECTS (t);
2920 new_constant = TREE_CONSTANT (t);
2921
2922 if (old_constant != new_constant)
2923 {
2924 error ("constant not recomputed when ADDR_EXPR changed");
2925 return t;
2926 }
2927 if (old_side_effects != new_side_effects)
2928 {
2929 error ("side effects not recomputed when ADDR_EXPR changed");
2930 return t;
2931 }
2932
2933 if (!(VAR_P (base)
2934 || TREE_CODE (base) == PARM_DECL
2935 || TREE_CODE (base) == RESULT_DECL))
2936 return NULL_TREE;
2937
2938 if (DECL_GIMPLE_REG_P (base))
2939 {
2940 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2941 return base;
2942 }
2943
2944 return NULL_TREE;
2945}
2946
2947/* Callback for walk_tree, check that all elements with address taken are
2948 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2949 inside a PHI node. */
2950
2951static tree
2952verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2953{
2954 tree t = *tp, x;
2955
2956 if (TYPE_P (t))
2957 *walk_subtrees = 0;
2958
2959 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2960#define CHECK_OP(N, MSG) \
2961 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2962 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2963
2964 switch (TREE_CODE (t))
2965 {
2966 case SSA_NAME:
2967 if (SSA_NAME_IN_FREE_LIST (t))
2968 {
2969 error ("SSA name in freelist but still referenced");
2970 return *tp;
2971 }
2972 break;
2973
2974 case PARM_DECL:
2975 case VAR_DECL:
2976 case RESULT_DECL:
2977 {
2978 tree context = decl_function_context (t);
2979 if (context != cfun->decl
2980 && !SCOPE_FILE_SCOPE_P (context)
2981 && !TREE_STATIC (t)
2982 && !DECL_EXTERNAL (t))
2983 {
2984 error ("Local declaration from a different function");
2985 return t;
2986 }
2987 }
2988 break;
2989
2990 case INDIRECT_REF:
2991 error ("INDIRECT_REF in gimple IL");
2992 return t;
2993
2994 case MEM_REF:
2995 x = TREE_OPERAND (t, 0);
2996 if (!POINTER_TYPE_P (TREE_TYPE (x))
2997 || !is_gimple_mem_ref_addr (x))
2998 {
2999 error ("invalid first operand of MEM_REF");
3000 return x;
3001 }
3002 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
3003 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
3004 {
3005 error ("invalid offset operand of MEM_REF");
3006 return TREE_OPERAND (t, 1);
3007 }
3008 if (TREE_CODE (x) == ADDR_EXPR)
3009 {
3010 tree va = verify_address (x, TREE_OPERAND (x, 0));
3011 if (va)
3012 return va;
3013 x = TREE_OPERAND (x, 0);
3014 }
3015 walk_tree (&x, verify_expr, data, NULL);
3016 *walk_subtrees = 0;
3017 break;
3018
3019 case ASSERT_EXPR:
3020 x = fold (ASSERT_EXPR_COND (t));
3021 if (x == boolean_false_node)
3022 {
3023 error ("ASSERT_EXPR with an always-false condition");
3024 return *tp;
3025 }
3026 break;
3027
3028 case MODIFY_EXPR:
3029 error ("MODIFY_EXPR not expected while having tuples");
3030 return *tp;
3031
3032 case ADDR_EXPR:
3033 {
3034 tree tem;
3035
3036 gcc_assert (is_gimple_address (t));
3037
3038 /* Skip any references (they will be checked when we recurse down the
3039 tree) and ensure that any variable used as a prefix is marked
3040 addressable. */
3041 for (x = TREE_OPERAND (t, 0);
3042 handled_component_p (x);
3043 x = TREE_OPERAND (x, 0))
3044 ;
3045
3046 if ((tem = verify_address (t, x)))
3047 return tem;
3048
3049 if (!(VAR_P (x)
3050 || TREE_CODE (x) == PARM_DECL
3051 || TREE_CODE (x) == RESULT_DECL))
3052 return NULL;
3053
3054 if (!TREE_ADDRESSABLE (x))
3055 {
3056 error ("address taken, but ADDRESSABLE bit not set");
3057 return x;
3058 }
3059
3060 break;
3061 }
3062
3063 case COND_EXPR:
3064 x = COND_EXPR_COND (t);
3065 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3066 {
3067 error ("non-integral used in condition");
3068 return x;
3069 }
3070 if (!is_gimple_condexpr (x))
3071 {
3072 error ("invalid conditional operand");
3073 return x;
3074 }
3075 break;
3076
3077 case NON_LVALUE_EXPR:
3078 case TRUTH_NOT_EXPR:
3079 gcc_unreachable ();
3080
3081 CASE_CONVERT:
3082 case FIX_TRUNC_EXPR:
3083 case FLOAT_EXPR:
3084 case NEGATE_EXPR:
3085 case ABS_EXPR:
3086 case BIT_NOT_EXPR:
3087 CHECK_OP (0, "invalid operand to unary operator");
3088 break;
3089
3090 case REALPART_EXPR:
3091 case IMAGPART_EXPR:
3092 case BIT_FIELD_REF:
3093 if (!is_gimple_reg_type (TREE_TYPE (t)))
3094 {
3095 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3096 return t;
3097 }
3098
3099 if (TREE_CODE (t) == BIT_FIELD_REF)
3100 {
3101 tree t0 = TREE_OPERAND (t, 0);
3102 tree t1 = TREE_OPERAND (t, 1);
3103 tree t2 = TREE_OPERAND (t, 2);
3104 if (!tree_fits_uhwi_p (t1)
3105 || !tree_fits_uhwi_p (t2)
3106 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3107 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3108 {
3109 error ("invalid position or size operand to BIT_FIELD_REF");
3110 return t;
3111 }
3112 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3113 && (TYPE_PRECISION (TREE_TYPE (t))
3114 != tree_to_uhwi (t1)))
3115 {
3116 error ("integral result type precision does not match "
3117 "field size of BIT_FIELD_REF");
3118 return t;
3119 }
3120 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3121 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3122 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3123 != tree_to_uhwi (t1)))
3124 {
3125 error ("mode size of non-integral result does not "
3126 "match field size of BIT_FIELD_REF");
3127 return t;
3128 }
3129 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3130 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3131 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3132 {
3133 error ("position plus size exceeds size of referenced object in "
3134 "BIT_FIELD_REF");
3135 return t;
3136 }
3137 }
3138 t = TREE_OPERAND (t, 0);
3139
3140 /* Fall-through. */
3141 case COMPONENT_REF:
3142 case ARRAY_REF:
3143 case ARRAY_RANGE_REF:
3144 case VIEW_CONVERT_EXPR:
3145 /* We have a nest of references. Verify that each of the operands
3146 that determine where to reference is either a constant or a variable,
3147 verify that the base is valid, and then show we've already checked
3148 the subtrees. */
3149 while (handled_component_p (t))
3150 {
3151 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3152 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3153 else if (TREE_CODE (t) == ARRAY_REF
3154 || TREE_CODE (t) == ARRAY_RANGE_REF)
3155 {
3156 CHECK_OP (1, "invalid array index");
3157 if (TREE_OPERAND (t, 2))
3158 CHECK_OP (2, "invalid array lower bound");
3159 if (TREE_OPERAND (t, 3))
3160 CHECK_OP (3, "invalid array stride");
3161 }
3162 else if (TREE_CODE (t) == BIT_FIELD_REF
3163 || TREE_CODE (t) == REALPART_EXPR
3164 || TREE_CODE (t) == IMAGPART_EXPR)
3165 {
3166 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3167 "REALPART_EXPR");
3168 return t;
3169 }
3170
3171 t = TREE_OPERAND (t, 0);
3172 }
3173
3174 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3175 {
3176 error ("invalid reference prefix");
3177 return t;
3178 }
3179 walk_tree (&t, verify_expr, data, NULL);
3180 *walk_subtrees = 0;
3181 break;
3182 case PLUS_EXPR:
3183 case MINUS_EXPR:
3184 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3185 POINTER_PLUS_EXPR. */
3186 if (POINTER_TYPE_P (TREE_TYPE (t)))
3187 {
3188 error ("invalid operand to plus/minus, type is a pointer");
3189 return t;
3190 }
3191 CHECK_OP (0, "invalid operand to binary operator");
3192 CHECK_OP (1, "invalid operand to binary operator");
3193 break;
3194
3195 case POINTER_DIFF_EXPR:
3196 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3197 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
3198 {
3199 error ("invalid operand to pointer diff, operand is not a pointer");
3200 return t;
3201 }
3202 if (TREE_CODE (TREE_TYPE (t)) != INTEGER_TYPE
3203 || TYPE_UNSIGNED (TREE_TYPE (t))
3204 || (TYPE_PRECISION (TREE_TYPE (t))
3205 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))))
3206 {
3207 error ("invalid type for pointer diff");
3208 return t;
3209 }
3210 CHECK_OP (0, "invalid operand to pointer diff");
3211 CHECK_OP (1, "invalid operand to pointer diff");
3212 break;
3213
3214 case POINTER_PLUS_EXPR:
3215 /* Check to make sure the first operand is a pointer or reference type. */
3216 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3217 {
3218 error ("invalid operand to pointer plus, first operand is not a pointer");
3219 return t;
3220 }
3221 /* Check to make sure the second operand is a ptrofftype. */
3222 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3223 {
3224 error ("invalid operand to pointer plus, second operand is not an "
3225 "integer type of appropriate width");
3226 return t;
3227 }
3228 /* FALLTHROUGH */
3229 case LT_EXPR:
3230 case LE_EXPR:
3231 case GT_EXPR:
3232 case GE_EXPR:
3233 case EQ_EXPR:
3234 case NE_EXPR:
3235 case UNORDERED_EXPR:
3236 case ORDERED_EXPR:
3237 case UNLT_EXPR:
3238 case UNLE_EXPR:
3239 case UNGT_EXPR:
3240 case UNGE_EXPR:
3241 case UNEQ_EXPR:
3242 case LTGT_EXPR:
3243 case MULT_EXPR:
3244 case TRUNC_DIV_EXPR:
3245 case CEIL_DIV_EXPR:
3246 case FLOOR_DIV_EXPR:
3247 case ROUND_DIV_EXPR:
3248 case TRUNC_MOD_EXPR:
3249 case CEIL_MOD_EXPR:
3250 case FLOOR_MOD_EXPR:
3251 case ROUND_MOD_EXPR:
3252 case RDIV_EXPR:
3253 case EXACT_DIV_EXPR:
3254 case MIN_EXPR:
3255 case MAX_EXPR:
3256 case LSHIFT_EXPR:
3257 case RSHIFT_EXPR:
3258 case LROTATE_EXPR:
3259 case RROTATE_EXPR:
3260 case BIT_IOR_EXPR:
3261 case BIT_XOR_EXPR:
3262 case BIT_AND_EXPR:
3263 CHECK_OP (0, "invalid operand to binary operator");
3264 CHECK_OP (1, "invalid operand to binary operator");
3265 break;
3266
3267 case CONSTRUCTOR:
3268 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3269 *walk_subtrees = 0;
3270 break;
3271
3272 case CASE_LABEL_EXPR:
3273 if (CASE_CHAIN (t))
3274 {
3275 error ("invalid CASE_CHAIN");
3276 return t;
3277 }
3278 break;
3279
3280 default:
3281 break;
3282 }
3283 return NULL;
3284
3285#undef CHECK_OP
3286}
3287
3288
3289/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3290 Returns true if there is an error, otherwise false. */
3291
3292static bool
3293verify_types_in_gimple_min_lval (tree expr)
3294{
3295 tree op;
3296
3297 if (is_gimple_id (expr))
3298 return false;
3299
3300 if (TREE_CODE (expr) != TARGET_MEM_REF
3301 && TREE_CODE (expr) != MEM_REF)
3302 {
3303 error ("invalid expression for min lvalue");
3304 return true;
3305 }
3306
3307 /* TARGET_MEM_REFs are strange beasts. */
3308 if (TREE_CODE (expr) == TARGET_MEM_REF)
3309 return false;
3310
3311 op = TREE_OPERAND (expr, 0);
3312 if (!is_gimple_val (op))
3313 {
3314 error ("invalid operand in indirect reference");
3315 debug_generic_stmt (op);
3316 return true;
3317 }
3318 /* Memory references now generally can involve a value conversion. */
3319
3320 return false;
3321}
3322
3323/* Verify if EXPR is a valid GIMPLE reference expression. If
3324 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3325 if there is an error, otherwise false. */
3326
3327static bool
3328verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3329{
3330 while (handled_component_p (expr))
3331 {
3332 tree op = TREE_OPERAND (expr, 0);
3333
3334 if (TREE_CODE (expr) == ARRAY_REF
3335 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3336 {
3337 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3338 || (TREE_OPERAND (expr, 2)
3339 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3340 || (TREE_OPERAND (expr, 3)
3341 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3342 {
3343 error ("invalid operands to array reference");
3344 debug_generic_stmt (expr);
3345 return true;
3346 }
3347 }
3348
3349 /* Verify if the reference array element types are compatible. */
3350 if (TREE_CODE (expr) == ARRAY_REF
3351 && !useless_type_conversion_p (TREE_TYPE (expr),
3352 TREE_TYPE (TREE_TYPE (op))))
3353 {
3354 error ("type mismatch in array reference");
3355 debug_generic_stmt (TREE_TYPE (expr));
3356 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3357 return true;
3358 }
3359 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3360 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3361 TREE_TYPE (TREE_TYPE (op))))
3362 {
3363 error ("type mismatch in array range reference");
3364 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3365 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3366 return true;
3367 }
3368
3369 if ((TREE_CODE (expr) == REALPART_EXPR
3370 || TREE_CODE (expr) == IMAGPART_EXPR)
3371 && !useless_type_conversion_p (TREE_TYPE (expr),
3372 TREE_TYPE (TREE_TYPE (op))))
3373 {
3374 error ("type mismatch in real/imagpart reference");
3375 debug_generic_stmt (TREE_TYPE (expr));
3376 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3377 return true;
3378 }
3379
3380 if (TREE_CODE (expr) == COMPONENT_REF
3381 && !useless_type_conversion_p (TREE_TYPE (expr),
3382 TREE_TYPE (TREE_OPERAND (expr, 1))))
3383 {
3384 error ("type mismatch in component reference");
3385 debug_generic_stmt (TREE_TYPE (expr));
3386 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3387 return true;
3388 }
3389
3390 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3391 {
3392 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3393 that their operand is not an SSA name or an invariant when
3394 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3395 bug). Otherwise there is nothing to verify, gross mismatches at
3396 most invoke undefined behavior. */
3397 if (require_lvalue
3398 && (TREE_CODE (op) == SSA_NAME
3399 || is_gimple_min_invariant (op)))
3400 {
3401 error ("conversion of an SSA_NAME on the left hand side");
3402 debug_generic_stmt (expr);
3403 return true;
3404 }
3405 else if (TREE_CODE (op) == SSA_NAME
3406 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3407 {
3408 error ("conversion of register to a different size");
3409 debug_generic_stmt (expr);
3410 return true;
3411 }
3412 else if (!handled_component_p (op))
3413 return false;
3414 }
3415
3416 expr = op;
3417 }
3418
3419 if (TREE_CODE (expr) == MEM_REF)
3420 {
3421 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3422 {
3423 error ("invalid address operand in MEM_REF");
3424 debug_generic_stmt (expr);
3425 return true;
3426 }
3427 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3428 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3429 {
3430 error ("invalid offset operand in MEM_REF");
3431 debug_generic_stmt (expr);
3432 return true;
3433 }
3434 }
3435 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3436 {
3437 if (!TMR_BASE (expr)
3438 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3439 {
3440 error ("invalid address operand in TARGET_MEM_REF");
3441 return true;
3442 }
3443 if (!TMR_OFFSET (expr)
3444 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3445 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3446 {
3447 error ("invalid offset operand in TARGET_MEM_REF");
3448 debug_generic_stmt (expr);
3449 return true;
3450 }
3451 }
3452
3453 return ((require_lvalue || !is_gimple_min_invariant (expr))
3454 && verify_types_in_gimple_min_lval (expr));
3455}
3456
3457/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3458 list of pointer-to types that is trivially convertible to DEST. */
3459
3460static bool
3461one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3462{
3463 tree src;
3464
3465 if (!TYPE_POINTER_TO (src_obj))
3466 return true;
3467
3468 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3469 if (useless_type_conversion_p (dest, src))
3470 return true;
3471
3472 return false;
3473}
3474
3475/* Return true if TYPE1 is a fixed-point type and if conversions to and
3476 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3477
3478static bool
3479valid_fixed_convert_types_p (tree type1, tree type2)
3480{
3481 return (FIXED_POINT_TYPE_P (type1)
3482 && (INTEGRAL_TYPE_P (type2)
3483 || SCALAR_FLOAT_TYPE_P (type2)
3484 || FIXED_POINT_TYPE_P (type2)));
3485}
3486
3487/* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3488 is a problem, otherwise false. */
3489
3490static bool
3491verify_gimple_call (gcall *stmt)
3492{
3493 tree fn = gimple_call_fn (stmt);
3494 tree fntype, fndecl;
3495 unsigned i;
3496
3497 if (gimple_call_internal_p (stmt))
3498 {
3499 if (fn)
3500 {
3501 error ("gimple call has two targets");
3502 debug_generic_stmt (fn);
3503 return true;
3504 }
3505 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3506 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3507 {
3508 return false;
3509 }
3510 }
3511 else
3512 {
3513 if (!fn)
3514 {
3515 error ("gimple call has no target");
3516 return true;
3517 }
3518 }
3519
3520 if (fn && !is_gimple_call_addr (fn))
3521 {
3522 error ("invalid function in gimple call");
3523 debug_generic_stmt (fn);
3524 return true;
3525 }
3526
3527 if (fn
3528 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3529 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3530 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3531 {
3532 error ("non-function in gimple call");
3533 return true;
3534 }
3535
3536 fndecl = gimple_call_fndecl (stmt);
3537 if (fndecl
3538 && TREE_CODE (fndecl) == FUNCTION_DECL
3539 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3540 && !DECL_PURE_P (fndecl)
3541 && !TREE_READONLY (fndecl))
3542 {
3543 error ("invalid pure const state for function");
3544 return true;
3545 }
3546
3547 tree lhs = gimple_call_lhs (stmt);
3548 if (lhs
3549 && (!is_gimple_lvalue (lhs)
3550 || verify_types_in_gimple_reference (lhs, true)))
3551 {
3552 error ("invalid LHS in gimple call");
3553 return true;
3554 }
3555
3556 if (gimple_call_ctrl_altering_p (stmt)
3557 && gimple_call_noreturn_p (stmt)
3558 && should_remove_lhs_p (lhs))
3559 {
3560 error ("LHS in noreturn call");
3561 return true;
3562 }
3563
3564 fntype = gimple_call_fntype (stmt);
3565 if (fntype
3566 && lhs
3567 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3568 /* ??? At least C++ misses conversions at assignments from
3569 void * call results.
3570 For now simply allow arbitrary pointer type conversions. */
3571 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3572 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3573 {
3574 error ("invalid conversion in gimple call");
3575 debug_generic_stmt (TREE_TYPE (lhs));
3576 debug_generic_stmt (TREE_TYPE (fntype));
3577 return true;
3578 }
3579
3580 if (gimple_call_chain (stmt)
3581 && !is_gimple_val (gimple_call_chain (stmt)))
3582 {
3583 error ("invalid static chain in gimple call");
3584 debug_generic_stmt (gimple_call_chain (stmt));
3585 return true;
3586 }
3587
3588 /* If there is a static chain argument, the call should either be
3589 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3590 if (gimple_call_chain (stmt)
3591 && fndecl
3592 && !DECL_STATIC_CHAIN (fndecl))
3593 {
3594 error ("static chain with function that doesn%'t use one");
3595 return true;
3596 }
3597
3598 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3599 {
3600 switch (DECL_FUNCTION_CODE (fndecl))
3601 {
3602 case BUILT_IN_UNREACHABLE:
3603 case BUILT_IN_TRAP:
3604 if (gimple_call_num_args (stmt) > 0)
3605 {
3606 /* Built-in unreachable with parameters might not be caught by
3607 undefined behavior sanitizer. Front-ends do check users do not
3608 call them that way but we also produce calls to
3609 __builtin_unreachable internally, for example when IPA figures
3610 out a call cannot happen in a legal program. In such cases,
3611 we must make sure arguments are stripped off. */
3612 error ("__builtin_unreachable or __builtin_trap call with "
3613 "arguments");
3614 return true;
3615 }
3616 break;
3617 default:
3618 break;
3619 }
3620 }
3621
3622 /* ??? The C frontend passes unpromoted arguments in case it
3623 didn't see a function declaration before the call. So for now
3624 leave the call arguments mostly unverified. Once we gimplify
3625 unit-at-a-time we have a chance to fix this. */
3626
3627 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3628 {
3629 tree arg = gimple_call_arg (stmt, i);
3630 if ((is_gimple_reg_type (TREE_TYPE (arg))
3631 && !is_gimple_val (arg))
3632 || (!is_gimple_reg_type (TREE_TYPE (arg))
3633 && !is_gimple_lvalue (arg)))
3634 {
3635 error ("invalid argument to gimple call");
3636 debug_generic_expr (arg);
3637 return true;
3638 }
3639 }
3640
3641 return false;
3642}
3643
3644/* Verifies the gimple comparison with the result type TYPE and
3645 the operands OP0 and OP1, comparison code is CODE. */
3646
3647static bool
3648verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3649{
3650 tree op0_type = TREE_TYPE (op0);
3651 tree op1_type = TREE_TYPE (op1);
3652
3653 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3654 {
3655 error ("invalid operands in gimple comparison");
3656 return true;
3657 }
3658
3659 /* For comparisons we do not have the operations type as the
3660 effective type the comparison is carried out in. Instead
3661 we require that either the first operand is trivially
3662 convertible into the second, or the other way around.
3663 Because we special-case pointers to void we allow
3664 comparisons of pointers with the same mode as well. */
3665 if (!useless_type_conversion_p (op0_type, op1_type)
3666 && !useless_type_conversion_p (op1_type, op0_type)
3667 && (!POINTER_TYPE_P (op0_type)
3668 || !POINTER_TYPE_P (op1_type)
3669 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3670 {
3671 error ("mismatching comparison operand types");
3672 debug_generic_expr (op0_type);
3673 debug_generic_expr (op1_type);
3674 return true;
3675 }
3676
3677 /* The resulting type of a comparison may be an effective boolean type. */
3678 if (INTEGRAL_TYPE_P (type)
3679 && (TREE_CODE (type) == BOOLEAN_TYPE
3680 || TYPE_PRECISION (type) == 1))
3681 {
3682 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3683 || TREE_CODE (op1_type) == VECTOR_TYPE)
3684 && code != EQ_EXPR && code != NE_EXPR
3685 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3686 && !VECTOR_INTEGER_TYPE_P (op0_type))
3687 {
3688 error ("unsupported operation or type for vector comparison"
3689 " returning a boolean");
3690 debug_generic_expr (op0_type);
3691 debug_generic_expr (op1_type);
3692 return true;
3693 }
3694 }
3695 /* Or a boolean vector type with the same element count
3696 as the comparison operand types. */
3697 else if (TREE_CODE (type) == VECTOR_TYPE
3698 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3699 {
3700 if (TREE_CODE (op0_type) != VECTOR_TYPE
3701 || TREE_CODE (op1_type) != VECTOR_TYPE)
3702 {
3703 error ("non-vector operands in vector comparison");
3704 debug_generic_expr (op0_type);
3705 debug_generic_expr (op1_type);
3706 return true;
3707 }
3708
3709 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3710 {
3711 error ("invalid vector comparison resulting type");
3712 debug_generic_expr (type);
3713 return true;
3714 }
3715 }
3716 else
3717 {
3718 error ("bogus comparison result type");
3719 debug_generic_expr (type);
3720 return true;
3721 }
3722
3723 return false;
3724}
3725
3726/* Verify a gimple assignment statement STMT with an unary rhs.
3727 Returns true if anything is wrong. */
3728
3729static bool
3730verify_gimple_assign_unary (gassign *stmt)
3731{
3732 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3733 tree lhs = gimple_assign_lhs (stmt);
3734 tree lhs_type = TREE_TYPE (lhs);
3735 tree rhs1 = gimple_assign_rhs1 (stmt);
3736 tree rhs1_type = TREE_TYPE (rhs1);
3737
3738 if (!is_gimple_reg (lhs))
3739 {
3740 error ("non-register as LHS of unary operation");
3741 return true;
3742 }
3743
3744 if (!is_gimple_val (rhs1))
3745 {
3746 error ("invalid operand in unary operation");
3747 return true;
3748 }
3749
3750 /* First handle conversions. */
3751 switch (rhs_code)
3752 {
3753 CASE_CONVERT:
3754 {
3755 /* Allow conversions from pointer type to integral type only if
3756 there is no sign or zero extension involved.
3757 For targets were the precision of ptrofftype doesn't match that
3758 of pointers we need to allow arbitrary conversions to ptrofftype. */
3759 if ((POINTER_TYPE_P (lhs_type)
3760 && INTEGRAL_TYPE_P (rhs1_type))
3761 || (POINTER_TYPE_P (rhs1_type)
3762 && INTEGRAL_TYPE_P (lhs_type)
3763 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3764 || ptrofftype_p (sizetype))))
3765 return false;
3766
3767 /* Allow conversion from integral to offset type and vice versa. */
3768 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3769 && INTEGRAL_TYPE_P (rhs1_type))
3770 || (INTEGRAL_TYPE_P (lhs_type)
3771 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3772 return false;
3773
3774 /* Otherwise assert we are converting between types of the
3775 same kind. */
3776 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3777 {
3778 error ("invalid types in nop conversion");
3779 debug_generic_expr (lhs_type);
3780 debug_generic_expr (rhs1_type);
3781 return true;
3782 }
3783
3784 return false;
3785 }
3786
3787 case ADDR_SPACE_CONVERT_EXPR:
3788 {
3789 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3790 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3791 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3792 {
3793 error ("invalid types in address space conversion");
3794 debug_generic_expr (lhs_type);
3795 debug_generic_expr (rhs1_type);
3796 return true;
3797 }
3798
3799 return false;
3800 }
3801
3802 case FIXED_CONVERT_EXPR:
3803 {
3804 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3805 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3806 {
3807 error ("invalid types in fixed-point conversion");
3808 debug_generic_expr (lhs_type);
3809 debug_generic_expr (rhs1_type);
3810 return true;
3811 }
3812
3813 return false;
3814 }
3815
3816 case FLOAT_EXPR:
3817 {
3818 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3819 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3820 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3821 {
3822 error ("invalid types in conversion to floating point");
3823 debug_generic_expr (lhs_type);
3824 debug_generic_expr (rhs1_type);
3825 return true;
3826 }
3827
3828 return false;
3829 }
3830
3831 case FIX_TRUNC_EXPR:
3832 {
3833 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3834 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3835 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3836 {
3837 error ("invalid types in conversion to integer");
3838 debug_generic_expr (lhs_type);
3839 debug_generic_expr (rhs1_type);
3840 return true;
3841 }
3842
3843 return false;
3844 }
3845
3846 case VEC_UNPACK_HI_EXPR:
3847 case VEC_UNPACK_LO_EXPR:
3848 case VEC_UNPACK_FLOAT_HI_EXPR:
3849 case VEC_UNPACK_FLOAT_LO_EXPR:
3850 /* FIXME. */
3851 return false;
3852
3853 case NEGATE_EXPR:
3854 case ABS_EXPR:
3855 case BIT_NOT_EXPR:
3856 case PAREN_EXPR:
3857 case CONJ_EXPR:
3858 break;
3859
3860 default:
3861 gcc_unreachable ();
3862 }
3863
3864 /* For the remaining codes assert there is no conversion involved. */
3865 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3866 {
3867 error ("non-trivial conversion in unary operation");
3868 debug_generic_expr (lhs_type);
3869 debug_generic_expr (rhs1_type);
3870 return true;
3871 }
3872
3873 return false;
3874}
3875
3876/* Verify a gimple assignment statement STMT with a binary rhs.
3877 Returns true if anything is wrong. */
3878
3879static bool
3880verify_gimple_assign_binary (gassign *stmt)
3881{
3882 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3883 tree lhs = gimple_assign_lhs (stmt);
3884 tree lhs_type = TREE_TYPE (lhs);
3885 tree rhs1 = gimple_assign_rhs1 (stmt);
3886 tree rhs1_type = TREE_TYPE (rhs1);
3887 tree rhs2 = gimple_assign_rhs2 (stmt);
3888 tree rhs2_type = TREE_TYPE (rhs2);
3889
3890 if (!is_gimple_reg (lhs))
3891 {
3892 error ("non-register as LHS of binary operation");
3893 return true;
3894 }
3895
3896 if (!is_gimple_val (rhs1)
3897 || !is_gimple_val (rhs2))
3898 {
3899 error ("invalid operands in binary operation");
3900 return true;
3901 }
3902
3903 /* First handle operations that involve different types. */
3904 switch (rhs_code)
3905 {
3906 case COMPLEX_EXPR:
3907 {
3908 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3909 || !(INTEGRAL_TYPE_P (rhs1_type)
3910 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3911 || !(INTEGRAL_TYPE_P (rhs2_type)
3912 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3913 {
3914 error ("type mismatch in complex expression");
3915 debug_generic_expr (lhs_type);
3916 debug_generic_expr (rhs1_type);
3917 debug_generic_expr (rhs2_type);
3918 return true;
3919 }
3920
3921 return false;
3922 }
3923
3924 case LSHIFT_EXPR:
3925 case RSHIFT_EXPR:
3926 case LROTATE_EXPR:
3927 case RROTATE_EXPR:
3928 {
3929 /* Shifts and rotates are ok on integral types, fixed point
3930 types and integer vector types. */
3931 if ((!INTEGRAL_TYPE_P (rhs1_type)
3932 && !FIXED_POINT_TYPE_P (rhs1_type)
3933 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3934 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3935 || (!INTEGRAL_TYPE_P (rhs2_type)
3936 /* Vector shifts of vectors are also ok. */
3937 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3938 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3939 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3940 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3941 || !useless_type_conversion_p (lhs_type, rhs1_type))
3942 {
3943 error ("type mismatch in shift expression");
3944 debug_generic_expr (lhs_type);
3945 debug_generic_expr (rhs1_type);
3946 debug_generic_expr (rhs2_type);
3947 return true;
3948 }
3949
3950 return false;
3951 }
3952
3953 case WIDEN_LSHIFT_EXPR:
3954 {
3955 if (!INTEGRAL_TYPE_P (lhs_type)
3956 || !INTEGRAL_TYPE_P (rhs1_type)
3957 || TREE_CODE (rhs2) != INTEGER_CST
3958 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3959 {
3960 error ("type mismatch in widening vector shift expression");
3961 debug_generic_expr (lhs_type);
3962 debug_generic_expr (rhs1_type);
3963 debug_generic_expr (rhs2_type);
3964 return true;
3965 }
3966
3967 return false;
3968 }
3969
3970 case VEC_WIDEN_LSHIFT_HI_EXPR:
3971 case VEC_WIDEN_LSHIFT_LO_EXPR:
3972 {
3973 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3974 || TREE_CODE (lhs_type) != VECTOR_TYPE
3975 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3976 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3977 || TREE_CODE (rhs2) != INTEGER_CST
3978 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3979 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3980 {
3981 error ("type mismatch in widening vector shift expression");
3982 debug_generic_expr (lhs_type);
3983 debug_generic_expr (rhs1_type);
3984 debug_generic_expr (rhs2_type);
3985 return true;
3986 }
3987
3988 return false;
3989 }
3990
3991 case PLUS_EXPR:
3992 case MINUS_EXPR:
3993 {
3994 tree lhs_etype = lhs_type;
3995 tree rhs1_etype = rhs1_type;
3996 tree rhs2_etype = rhs2_type;
3997 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3998 {
3999 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4000 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
4001 {
4002 error ("invalid non-vector operands to vector valued plus");
4003 return true;
4004 }
4005 lhs_etype = TREE_TYPE (lhs_type);
4006 rhs1_etype = TREE_TYPE (rhs1_type);
4007 rhs2_etype = TREE_TYPE (rhs2_type);
4008 }
4009 if (POINTER_TYPE_P (lhs_etype)
4010 || POINTER_TYPE_P (rhs1_etype)
4011 || POINTER_TYPE_P (rhs2_etype))
4012 {
4013 error ("invalid (pointer) operands to plus/minus");
4014 return true;
4015 }
4016
4017 /* Continue with generic binary expression handling. */
4018 break;
4019 }
4020
4021 case POINTER_PLUS_EXPR:
4022 {
4023 if (!POINTER_TYPE_P (rhs1_type)
4024 || !useless_type_conversion_p (lhs_type, rhs1_type)
4025 || !ptrofftype_p (rhs2_type))
4026 {
4027 error ("type mismatch in pointer plus expression");
4028 debug_generic_stmt (lhs_type);
4029 debug_generic_stmt (rhs1_type);
4030 debug_generic_stmt (rhs2_type);
4031 return true;
4032 }
4033
4034 return false;
4035 }
4036
4037 case POINTER_DIFF_EXPR:
4038 {
4039 if (!POINTER_TYPE_P (rhs1_type)
4040 || !POINTER_TYPE_P (rhs2_type)
4041 /* Because we special-case pointers to void we allow difference
4042 of arbitrary pointers with the same mode. */
4043 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4044 || TREE_CODE (lhs_type) != INTEGER_TYPE
4045 || TYPE_UNSIGNED (lhs_type)
4046 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4047 {
4048 error ("type mismatch in pointer diff expression");
4049 debug_generic_stmt (lhs_type);
4050 debug_generic_stmt (rhs1_type);
4051 debug_generic_stmt (rhs2_type);
4052 return true;
4053 }
4054
4055 return false;
4056 }
4057
4058 case TRUTH_ANDIF_EXPR:
4059 case TRUTH_ORIF_EXPR:
4060 case TRUTH_AND_EXPR:
4061 case TRUTH_OR_EXPR:
4062 case TRUTH_XOR_EXPR:
4063
4064 gcc_unreachable ();
4065
4066 case LT_EXPR:
4067 case LE_EXPR:
4068 case GT_EXPR:
4069 case GE_EXPR:
4070 case EQ_EXPR:
4071 case NE_EXPR:
4072 case UNORDERED_EXPR:
4073 case ORDERED_EXPR:
4074 case UNLT_EXPR:
4075 case UNLE_EXPR:
4076 case UNGT_EXPR:
4077 case UNGE_EXPR:
4078 case UNEQ_EXPR:
4079 case LTGT_EXPR:
4080 /* Comparisons are also binary, but the result type is not
4081 connected to the operand types. */
4082 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4083
4084 case WIDEN_MULT_EXPR:
4085 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4086 return true;
4087 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4088 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4089
4090 case WIDEN_SUM_EXPR:
4091 {
4092 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4093 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4094 && ((!INTEGRAL_TYPE_P (rhs1_type)
4095 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4096 || (!INTEGRAL_TYPE_P (lhs_type)
4097 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4098 || !useless_type_conversion_p (lhs_type, rhs2_type)
4099 || (GET_MODE_SIZE (element_mode (rhs2_type))
4100 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4101 {
4102 error ("type mismatch in widening sum reduction");
4103 debug_generic_expr (lhs_type);
4104 debug_generic_expr (rhs1_type);
4105 debug_generic_expr (rhs2_type);
4106 return true;
4107 }
4108 return false;
4109 }
4110
4111 case VEC_WIDEN_MULT_HI_EXPR:
4112 case VEC_WIDEN_MULT_LO_EXPR:
4113 case VEC_WIDEN_MULT_EVEN_EXPR:
4114 case VEC_WIDEN_MULT_ODD_EXPR:
4115 {
4116 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4117 || TREE_CODE (lhs_type) != VECTOR_TYPE
4118 || !types_compatible_p (rhs1_type, rhs2_type)
4119 || (GET_MODE_SIZE (element_mode (lhs_type))
4120 != 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4121 {
4122 error ("type mismatch in vector widening multiplication");
4123 debug_generic_expr (lhs_type);
4124 debug_generic_expr (rhs1_type);
4125 debug_generic_expr (rhs2_type);
4126 return true;
4127 }
4128 return false;
4129 }
4130
4131 case VEC_PACK_TRUNC_EXPR:
4132 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4133 vector boolean types. */
4134 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4135 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4136 && types_compatible_p (rhs1_type, rhs2_type)
4137 && (TYPE_VECTOR_SUBPARTS (lhs_type)
4138 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4139 return false;
4140
4141 /* Fallthru. */
4142 case VEC_PACK_SAT_EXPR:
4143 case VEC_PACK_FIX_TRUNC_EXPR:
4144 {
4145 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4146 || TREE_CODE (lhs_type) != VECTOR_TYPE
4147 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4148 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4149 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4150 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4151 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4152 || !types_compatible_p (rhs1_type, rhs2_type)
4153 || (GET_MODE_SIZE (element_mode (rhs1_type))
4154 != 2 * GET_MODE_SIZE (element_mode (lhs_type))))
4155 {
4156 error ("type mismatch in vector pack expression");
4157 debug_generic_expr (lhs_type);
4158 debug_generic_expr (rhs1_type);
4159 debug_generic_expr (rhs2_type);
4160 return true;
4161 }
4162
4163 return false;
4164 }
4165
4166 case MULT_EXPR:
4167 case MULT_HIGHPART_EXPR:
4168 case TRUNC_DIV_EXPR:
4169 case CEIL_DIV_EXPR:
4170 case FLOOR_DIV_EXPR:
4171 case ROUND_DIV_EXPR:
4172 case TRUNC_MOD_EXPR:
4173 case CEIL_MOD_EXPR:
4174 case FLOOR_MOD_EXPR:
4175 case ROUND_MOD_EXPR:
4176 case RDIV_EXPR:
4177 case EXACT_DIV_EXPR:
4178 case MIN_EXPR:
4179 case MAX_EXPR:
4180 case BIT_IOR_EXPR:
4181 case BIT_XOR_EXPR:
4182 case BIT_AND_EXPR:
4183 /* Continue with generic binary expression handling. */
4184 break;
4185
4186 default:
4187 gcc_unreachable ();
4188 }
4189
4190 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4191 || !useless_type_conversion_p (lhs_type, rhs2_type))
4192 {
4193 error ("type mismatch in binary expression");
4194 debug_generic_stmt (lhs_type);
4195 debug_generic_stmt (rhs1_type);
4196 debug_generic_stmt (rhs2_type);
4197 return true;
4198 }
4199
4200 return false;
4201}
4202
4203/* Verify a gimple assignment statement STMT with a ternary rhs.
4204 Returns true if anything is wrong. */
4205
4206static bool
4207verify_gimple_assign_ternary (gassign *stmt)
4208{
4209 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4210 tree lhs = gimple_assign_lhs (stmt);
4211 tree lhs_type = TREE_TYPE (lhs);
4212 tree rhs1 = gimple_assign_rhs1 (stmt);
4213 tree rhs1_type = TREE_TYPE (rhs1);
4214 tree rhs2 = gimple_assign_rhs2 (stmt);
4215 tree rhs2_type = TREE_TYPE (rhs2);
4216 tree rhs3 = gimple_assign_rhs3 (stmt);
4217 tree rhs3_type = TREE_TYPE (rhs3);
4218
4219 if (!is_gimple_reg (lhs))
4220 {
4221 error ("non-register as LHS of ternary operation");
4222 return true;
4223 }
4224
4225 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4226 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4227 || !is_gimple_val (rhs2)
4228 || !is_gimple_val (rhs3))
4229 {
4230 error ("invalid operands in ternary operation");
4231 return true;
4232 }
4233
4234 /* First handle operations that involve different types. */
4235 switch (rhs_code)
4236 {
4237 case WIDEN_MULT_PLUS_EXPR:
4238 case WIDEN_MULT_MINUS_EXPR:
4239 if ((!INTEGRAL_TYPE_P (rhs1_type)
4240 && !FIXED_POINT_TYPE_P (rhs1_type))
4241 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4242 || !useless_type_conversion_p (lhs_type, rhs3_type)
4243 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4244 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4245 {
4246 error ("type mismatch in widening multiply-accumulate expression");
4247 debug_generic_expr (lhs_type);
4248 debug_generic_expr (rhs1_type);
4249 debug_generic_expr (rhs2_type);
4250 debug_generic_expr (rhs3_type);
4251 return true;
4252 }
4253 break;
4254
4255 case FMA_EXPR:
4256 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4257 || !useless_type_conversion_p (lhs_type, rhs2_type)
4258 || !useless_type_conversion_p (lhs_type, rhs3_type))
4259 {
4260 error ("type mismatch in fused multiply-add expression");
4261 debug_generic_expr (lhs_type);
4262 debug_generic_expr (rhs1_type);
4263 debug_generic_expr (rhs2_type);
4264 debug_generic_expr (rhs3_type);
4265 return true;
4266 }
4267 break;
4268
4269 case VEC_COND_EXPR:
4270 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4271 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4272 != TYPE_VECTOR_SUBPARTS (lhs_type))
4273 {
4274 error ("the first argument of a VEC_COND_EXPR must be of a "
4275 "boolean vector type of the same number of elements "
4276 "as the result");
4277 debug_generic_expr (lhs_type);
4278 debug_generic_expr (rhs1_type);
4279 return true;
4280 }
4281 /* Fallthrough. */
4282 case COND_EXPR:
4283 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4284 || !useless_type_conversion_p (lhs_type, rhs3_type))
4285 {
4286 error ("type mismatch in conditional expression");
4287 debug_generic_expr (lhs_type);
4288 debug_generic_expr (rhs2_type);
4289 debug_generic_expr (rhs3_type);
4290 return true;
4291 }
4292 break;
4293
4294 case VEC_PERM_EXPR:
4295 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4296 || !useless_type_conversion_p (lhs_type, rhs2_type))
4297 {
4298 error ("type mismatch in vector permute expression");
4299 debug_generic_expr (lhs_type);
4300 debug_generic_expr (rhs1_type);
4301 debug_generic_expr (rhs2_type);
4302 debug_generic_expr (rhs3_type);
4303 return true;
4304 }
4305
4306 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4307 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4308 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4309 {
4310 error ("vector types expected in vector permute expression");
4311 debug_generic_expr (lhs_type);
4312 debug_generic_expr (rhs1_type);
4313 debug_generic_expr (rhs2_type);
4314 debug_generic_expr (rhs3_type);
4315 return true;
4316 }
4317
4318 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4319 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4320 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4321 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4322 != TYPE_VECTOR_SUBPARTS (lhs_type))
4323 {
4324 error ("vectors with different element number found "
4325 "in vector permute expression");
4326 debug_generic_expr (lhs_type);
4327 debug_generic_expr (rhs1_type);
4328 debug_generic_expr (rhs2_type);
4329 debug_generic_expr (rhs3_type);
4330 return true;
4331 }
4332
4333 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4334 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type)))
4335 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type))))
4336 {
4337 error ("invalid mask type in vector permute expression");
4338 debug_generic_expr (lhs_type);
4339 debug_generic_expr (rhs1_type);
4340 debug_generic_expr (rhs2_type);
4341 debug_generic_expr (rhs3_type);
4342 return true;
4343 }
4344
4345 return false;
4346
4347 case SAD_EXPR:
4348 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4349 || !useless_type_conversion_p (lhs_type, rhs3_type)
4350 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4351 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4352 {
4353 error ("type mismatch in sad expression");
4354 debug_generic_expr (lhs_type);
4355 debug_generic_expr (rhs1_type);
4356 debug_generic_expr (rhs2_type);
4357 debug_generic_expr (rhs3_type);
4358 return true;
4359 }
4360
4361 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4362 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4363 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4364 {
4365 error ("vector types expected in sad expression");
4366 debug_generic_expr (lhs_type);
4367 debug_generic_expr (rhs1_type);
4368 debug_generic_expr (rhs2_type);
4369 debug_generic_expr (rhs3_type);
4370 return true;
4371 }
4372
4373 return false;
4374
4375 case BIT_INSERT_EXPR:
4376 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4377 {
4378 error ("type mismatch in BIT_INSERT_EXPR");
4379 debug_generic_expr (lhs_type);
4380 debug_generic_expr (rhs1_type);
4381 return true;
4382 }
4383 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4384 && INTEGRAL_TYPE_P (rhs2_type))
4385 || (VECTOR_TYPE_P (rhs1_type)
4386 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4387 {
4388 error ("not allowed type combination in BIT_INSERT_EXPR");
4389 debug_generic_expr (rhs1_type);
4390 debug_generic_expr (rhs2_type);
4391 return true;
4392 }
4393 if (! tree_fits_uhwi_p (rhs3)
4394 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4395 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4396 {
4397 error ("invalid position or size in BIT_INSERT_EXPR");
4398 return true;
4399 }
4400 if (INTEGRAL_TYPE_P (rhs1_type))
4401 {
4402 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4403 if (bitpos >= TYPE_PRECISION (rhs1_type)
4404 || (bitpos + TYPE_PRECISION (rhs2_type)
4405 > TYPE_PRECISION (rhs1_type)))
4406 {
4407 error ("insertion out of range in BIT_INSERT_EXPR");
4408 return true;
4409 }
4410 }
4411 else if (VECTOR_TYPE_P (rhs1_type))
4412 {
4413 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4414 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4415 if (bitpos % bitsize != 0)
4416 {
4417 error ("vector insertion not at element boundary");
4418 return true;
4419 }
4420 }
4421 return false;
4422
4423 case DOT_PROD_EXPR:
4424 {
4425 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4426 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4427 && ((!INTEGRAL_TYPE_P (rhs1_type)
4428 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4429 || (!INTEGRAL_TYPE_P (lhs_type)
4430 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4431 || !types_compatible_p (rhs1_type, rhs2_type)
4432 || !useless_type_conversion_p (lhs_type, rhs3_type)
4433 || (GET_MODE_SIZE (element_mode (rhs3_type))
4434 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4435 {
4436 error ("type mismatch in dot product reduction");
4437 debug_generic_expr (lhs_type);
4438 debug_generic_expr (rhs1_type);
4439 debug_generic_expr (rhs2_type);
4440 return true;
4441 }
4442 return false;
4443 }
4444
4445 case REALIGN_LOAD_EXPR:
4446 /* FIXME. */
4447 return false;
4448
4449 default:
4450 gcc_unreachable ();
4451 }
4452 return false;
4453}
4454
4455/* Verify a gimple assignment statement STMT with a single rhs.
4456 Returns true if anything is wrong. */
4457
4458static bool
4459verify_gimple_assign_single (gassign *stmt)
4460{
4461 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4462 tree lhs = gimple_assign_lhs (stmt);
4463 tree lhs_type = TREE_TYPE (lhs);
4464 tree rhs1 = gimple_assign_rhs1 (stmt);
4465 tree rhs1_type = TREE_TYPE (rhs1);
4466 bool res = false;
4467
4468 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4469 {
4470 error ("non-trivial conversion at assignment");
4471 debug_generic_expr (lhs_type);
4472 debug_generic_expr (rhs1_type);
4473 return true;
4474 }
4475
4476 if (gimple_clobber_p (stmt)
4477 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4478 {
4479 error ("non-decl/MEM_REF LHS in clobber statement");
4480 debug_generic_expr (lhs);
4481 return true;
4482 }
4483
4484 if (handled_component_p (lhs)
4485 || TREE_CODE (lhs) == MEM_REF
4486 || TREE_CODE (lhs) == TARGET_MEM_REF)
4487 res |= verify_types_in_gimple_reference (lhs, true);
4488
4489 /* Special codes we cannot handle via their class. */
4490 switch (rhs_code)
4491 {
4492 case ADDR_EXPR:
4493 {
4494 tree op = TREE_OPERAND (rhs1, 0);
4495 if (!is_gimple_addressable (op))
4496 {
4497 error ("invalid operand in unary expression");
4498 return true;
4499 }
4500
4501 /* Technically there is no longer a need for matching types, but
4502 gimple hygiene asks for this check. In LTO we can end up
4503 combining incompatible units and thus end up with addresses
4504 of globals that change their type to a common one. */
4505 if (!in_lto_p
4506 && !types_compatible_p (TREE_TYPE (op),
4507 TREE_TYPE (TREE_TYPE (rhs1)))
4508 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4509 TREE_TYPE (op)))
4510 {
4511 error ("type mismatch in address expression");
4512 debug_generic_stmt (TREE_TYPE (rhs1));
4513 debug_generic_stmt (TREE_TYPE (op));
4514 return true;
4515 }
4516
4517 return verify_types_in_gimple_reference (op, true);
4518 }
4519
4520 /* tcc_reference */
4521 case INDIRECT_REF:
4522 error ("INDIRECT_REF in gimple IL");
4523 return true;
4524
4525 case COMPONENT_REF:
4526 case BIT_FIELD_REF:
4527 case ARRAY_REF:
4528 case ARRAY_RANGE_REF:
4529 case VIEW_CONVERT_EXPR:
4530 case REALPART_EXPR:
4531 case IMAGPART_EXPR:
4532 case TARGET_MEM_REF:
4533 case MEM_REF:
4534 if (!is_gimple_reg (lhs)
4535 && is_gimple_reg_type (TREE_TYPE (lhs)))
4536 {
4537 error ("invalid rhs for gimple memory store");
4538 debug_generic_stmt (lhs);
4539 debug_generic_stmt (rhs1);
4540 return true;
4541 }
4542 return res || verify_types_in_gimple_reference (rhs1, false);
4543
4544 /* tcc_constant */
4545 case SSA_NAME:
4546 case INTEGER_CST:
4547 case REAL_CST:
4548 case FIXED_CST:
4549 case COMPLEX_CST:
4550 case VECTOR_CST:
4551 case STRING_CST:
4552 return res;
4553
4554 /* tcc_declaration */
4555 case CONST_DECL:
4556 return res;
4557 case VAR_DECL:
4558 case PARM_DECL:
4559 if (!is_gimple_reg (lhs)
4560 && !is_gimple_reg (rhs1)
4561 && is_gimple_reg_type (TREE_TYPE (lhs)))
4562 {
4563 error ("invalid rhs for gimple memory store");
4564 debug_generic_stmt (lhs);
4565 debug_generic_stmt (rhs1);
4566 return true;
4567 }
4568 return res;
4569
4570 case CONSTRUCTOR:
4571 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4572 {
4573 unsigned int i;
4574 tree elt_i, elt_v, elt_t = NULL_TREE;
4575
4576 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4577 return res;
4578 /* For vector CONSTRUCTORs we require that either it is empty
4579 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4580 (then the element count must be correct to cover the whole
4581 outer vector and index must be NULL on all elements, or it is
4582 a CONSTRUCTOR of scalar elements, where we as an exception allow
4583 smaller number of elements (assuming zero filling) and
4584 consecutive indexes as compared to NULL indexes (such
4585 CONSTRUCTORs can appear in the IL from FEs). */
4586 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4587 {
4588 if (elt_t == NULL_TREE)
4589 {
4590 elt_t = TREE_TYPE (elt_v);
4591 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4592 {
4593 tree elt_t = TREE_TYPE (elt_v);
4594 if (!useless_type_conversion_p