1/* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30/* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
37#include "backend.h"
38#include "target.h"
39#include "tree.h"
40#include "gimple.h"
41#include "tree-pass.h"
42#include "tree-streamer.h"
43#include "cgraph.h"
44#include "diagnostic.h"
45#include "calls.h"
46#include "cfganal.h"
47#include "tree-eh.h"
48#include "gimple-iterator.h"
49#include "gimple-walk.h"
50#include "tree-cfg.h"
51#include "tree-ssa-loop-niter.h"
52#include "langhooks.h"
53#include "ipa-utils.h"
54#include "gimple-pretty-print.h"
55#include "cfgloop.h"
56#include "tree-scalar-evolution.h"
57#include "intl.h"
58#include "opts.h"
59#include "ssa.h"
60#include "alloc-pool.h"
61#include "symbol-summary.h"
62#include "ipa-prop.h"
63#include "ipa-fnsummary.h"
64
65/* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68enum pure_const_state_e
69{
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
73};
74
75static const char *pure_const_names[3] = {"const", "pure", "neither"};
76
77enum malloc_state_e
78{
79 STATE_MALLOC_TOP,
80 STATE_MALLOC,
81 STATE_MALLOC_BOTTOM
82};
83
84static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
85
86/* Holder for the const_state. There is one of these per function
87 decl. */
88struct funct_state_d
89{
90 /* See above. */
91 enum pure_const_state_e pure_const_state;
92 /* What user set here; we can be always sure about this. */
93 enum pure_const_state_e state_previously_known;
94 bool looping_previously_known;
95
96 /* True if the function could possibly infinite loop. There are a
97 lot of ways that this could be determined. We are pretty
98 conservative here. While it is possible to cse pure and const
99 calls, it is not legal to have dce get rid of the call if there
100 is a possibility that the call could infinite loop since this is
101 a behavioral change. */
102 bool looping;
103
104 bool can_throw;
105
106 /* If function can call free, munmap or otherwise make previously
107 non-trapping memory accesses trapping. */
108 bool can_free;
109
110 enum malloc_state_e malloc_state;
111};
112
113/* State used when we know nothing about function. */
114static struct funct_state_d varying_state
115 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true, STATE_MALLOC_BOTTOM };
116
117
118typedef struct funct_state_d * funct_state;
119
120/* The storage of the funct_state is abstracted because there is the
121 possibility that it may be desirable to move this to the cgraph
122 local info. */
123
124/* Array, indexed by cgraph node uid, of function states. */
125
126static vec<funct_state> funct_state_vec;
127
128static bool gate_pure_const (void);
129
130namespace {
131
132const pass_data pass_data_ipa_pure_const =
133{
134 IPA_PASS, /* type */
135 "pure-const", /* name */
136 OPTGROUP_NONE, /* optinfo_flags */
137 TV_IPA_PURE_CONST, /* tv_id */
138 0, /* properties_required */
139 0, /* properties_provided */
140 0, /* properties_destroyed */
141 0, /* todo_flags_start */
142 0, /* todo_flags_finish */
143};
144
145class pass_ipa_pure_const : public ipa_opt_pass_d
146{
147public:
148 pass_ipa_pure_const(gcc::context *ctxt);
149
150 /* opt_pass methods: */
151 bool gate (function *) { return gate_pure_const (); }
152 unsigned int execute (function *fun);
153
154 void register_hooks (void);
155
156private:
157 bool init_p;
158
159 /* Holders of ipa cgraph hooks: */
160 struct cgraph_node_hook_list *function_insertion_hook_holder;
161 struct cgraph_2node_hook_list *node_duplication_hook_holder;
162 struct cgraph_node_hook_list *node_removal_hook_holder;
163
164}; // class pass_ipa_pure_const
165
166} // anon namespace
167
168/* Try to guess if function body will always be visible to compiler
169 when compiling the call and whether compiler will be able
170 to propagate the information by itself. */
171
172static bool
173function_always_visible_to_compiler_p (tree decl)
174{
175 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
176 || DECL_COMDAT (decl));
177}
178
179/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
180 is true if the function is known to be finite. The diagnostic is
181 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
182 OPTION, this function may initialize it and it is always returned
183 by the function. */
184
185static hash_set<tree> *
186suggest_attribute (int option, tree decl, bool known_finite,
187 hash_set<tree> *warned_about,
188 const char * attrib_name)
189{
190 if (!option_enabled (option, &global_options))
191 return warned_about;
192 if (TREE_THIS_VOLATILE (decl)
193 || (known_finite && function_always_visible_to_compiler_p (decl)))
194 return warned_about;
195
196 if (!warned_about)
197 warned_about = new hash_set<tree>;
198 if (warned_about->contains (decl))
199 return warned_about;
200 warned_about->add (decl);
201 warning_at (DECL_SOURCE_LOCATION (decl),
202 option,
203 known_finite
204 ? G_("function might be candidate for attribute %qs")
205 : G_("function might be candidate for attribute %qs"
206 " if it is known to return normally"), attrib_name);
207 return warned_about;
208}
209
210/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
211 is true if the function is known to be finite. */
212
213static void
214warn_function_pure (tree decl, bool known_finite)
215{
216 static hash_set<tree> *warned_about;
217
218 warned_about
219 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
220 known_finite, warned_about, "pure");
221}
222
223/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
224 is true if the function is known to be finite. */
225
226static void
227warn_function_const (tree decl, bool known_finite)
228{
229 static hash_set<tree> *warned_about;
230 warned_about
231 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
232 known_finite, warned_about, "const");
233}
234
235/* Emit suggestion about __attribute__((malloc)) for DECL. */
236
237static void
238warn_function_malloc (tree decl)
239{
240 static hash_set<tree> *warned_about;
241 warned_about
242 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
243 false, warned_about, "malloc");
244}
245
246/* Emit suggestion about __attribute__((noreturn)) for DECL. */
247
248static void
249warn_function_noreturn (tree decl)
250{
251 tree original_decl = decl;
252
253 cgraph_node *node = cgraph_node::get (decl);
254 if (node->instrumentation_clone)
255 decl = node->instrumented_version->decl;
256
257 static hash_set<tree> *warned_about;
258 if (!lang_hooks.missing_noreturn_ok_p (decl)
259 && targetm.warn_func_return (decl))
260 warned_about
261 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
262 true, warned_about, "noreturn");
263}
264
265void
266warn_function_cold (tree decl)
267{
268 tree original_decl = decl;
269
270 cgraph_node *node = cgraph_node::get (decl);
271 if (node->instrumentation_clone)
272 decl = node->instrumented_version->decl;
273
274 static hash_set<tree> *warned_about;
275 warned_about
276 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
277 true, warned_about, "cold");
278}
279
280/* Return true if we have a function state for NODE. */
281
282static inline bool
283has_function_state (struct cgraph_node *node)
284{
285 if (!funct_state_vec.exists ()
286 || funct_state_vec.length () <= (unsigned int)node->uid)
287 return false;
288 return funct_state_vec[node->uid] != NULL;
289}
290
291/* Return the function state from NODE. */
292
293static inline funct_state
294get_function_state (struct cgraph_node *node)
295{
296 if (!funct_state_vec.exists ()
297 || funct_state_vec.length () <= (unsigned int)node->uid
298 || !funct_state_vec[node->uid])
299 /* We might want to put correct previously_known state into varying. */
300 return &varying_state;
301 return funct_state_vec[node->uid];
302}
303
304/* Set the function state S for NODE. */
305
306static inline void
307set_function_state (struct cgraph_node *node, funct_state s)
308{
309 if (!funct_state_vec.exists ()
310 || funct_state_vec.length () <= (unsigned int)node->uid)
311 funct_state_vec.safe_grow_cleared (node->uid + 1);
312
313 /* If funct_state_vec already contains a funct_state, we have to release
314 it before it's going to be ovewritten. */
315 if (funct_state_vec[node->uid] != NULL
316 && funct_state_vec[node->uid] != &varying_state)
317 free (funct_state_vec[node->uid]);
318
319 funct_state_vec[node->uid] = s;
320}
321
322/* Check to see if the use (or definition when CHECKING_WRITE is true)
323 variable T is legal in a function that is either pure or const. */
324
325static inline void
326check_decl (funct_state local,
327 tree t, bool checking_write, bool ipa)
328{
329 /* Do not want to do anything with volatile except mark any
330 function that uses one to be not const or pure. */
331 if (TREE_THIS_VOLATILE (t))
332 {
333 local->pure_const_state = IPA_NEITHER;
334 if (dump_file)
335 fprintf (dump_file, " Volatile operand is not const/pure\n");
336 return;
337 }
338
339 /* Do not care about a local automatic that is not static. */
340 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
341 return;
342
343 /* If the variable has the "used" attribute, treat it as if it had a
344 been touched by the devil. */
345 if (DECL_PRESERVE_P (t))
346 {
347 local->pure_const_state = IPA_NEITHER;
348 if (dump_file)
349 fprintf (dump_file, " Used static/global variable is not const/pure\n");
350 return;
351 }
352
353 /* In IPA mode we are not interested in checking actual loads and stores;
354 they will be processed at propagation time using ipa_ref. */
355 if (ipa)
356 return;
357
358 /* Since we have dealt with the locals and params cases above, if we
359 are CHECKING_WRITE, this cannot be a pure or constant
360 function. */
361 if (checking_write)
362 {
363 local->pure_const_state = IPA_NEITHER;
364 if (dump_file)
365 fprintf (dump_file, " static/global memory write is not const/pure\n");
366 return;
367 }
368
369 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
370 {
371 /* Readonly reads are safe. */
372 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
373 return; /* Read of a constant, do not change the function state. */
374 else
375 {
376 if (dump_file)
377 fprintf (dump_file, " global memory read is not const\n");
378 /* Just a regular read. */
379 if (local->pure_const_state == IPA_CONST)
380 local->pure_const_state = IPA_PURE;
381 }
382 }
383 else
384 {
385 /* Compilation level statics can be read if they are readonly
386 variables. */
387 if (TREE_READONLY (t))
388 return;
389
390 if (dump_file)
391 fprintf (dump_file, " static memory read is not const\n");
392 /* Just a regular read. */
393 if (local->pure_const_state == IPA_CONST)
394 local->pure_const_state = IPA_PURE;
395 }
396}
397
398
399/* Check to see if the use (or definition when CHECKING_WRITE is true)
400 variable T is legal in a function that is either pure or const. */
401
402static inline void
403check_op (funct_state local, tree t, bool checking_write)
404{
405 t = get_base_address (t);
406 if (t && TREE_THIS_VOLATILE (t))
407 {
408 local->pure_const_state = IPA_NEITHER;
409 if (dump_file)
410 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
411 return;
412 }
413 else if (t
414 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
415 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
416 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
417 {
418 if (dump_file)
419 fprintf (dump_file, " Indirect ref to local memory is OK\n");
420 return;
421 }
422 else if (checking_write)
423 {
424 local->pure_const_state = IPA_NEITHER;
425 if (dump_file)
426 fprintf (dump_file, " Indirect ref write is not const/pure\n");
427 return;
428 }
429 else
430 {
431 if (dump_file)
432 fprintf (dump_file, " Indirect ref read is not const\n");
433 if (local->pure_const_state == IPA_CONST)
434 local->pure_const_state = IPA_PURE;
435 }
436}
437
438/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
439
440static void
441state_from_flags (enum pure_const_state_e *state, bool *looping,
442 int flags, bool cannot_lead_to_return)
443{
444 *looping = false;
445 if (flags & ECF_LOOPING_CONST_OR_PURE)
446 {
447 *looping = true;
448 if (dump_file && (dump_flags & TDF_DETAILS))
449 fprintf (dump_file, " looping\n");
450 }
451 if (flags & ECF_CONST)
452 {
453 *state = IPA_CONST;
454 if (dump_file && (dump_flags & TDF_DETAILS))
455 fprintf (dump_file, " const\n");
456 }
457 else if (flags & ECF_PURE)
458 {
459 *state = IPA_PURE;
460 if (dump_file && (dump_flags & TDF_DETAILS))
461 fprintf (dump_file, " pure\n");
462 }
463 else if (cannot_lead_to_return)
464 {
465 *state = IPA_PURE;
466 *looping = true;
467 if (dump_file && (dump_flags & TDF_DETAILS))
468 fprintf (dump_file, " ignoring side effects->pure looping\n");
469 }
470 else
471 {
472 if (dump_file && (dump_flags & TDF_DETAILS))
473 fprintf (dump_file, " neither\n");
474 *state = IPA_NEITHER;
475 *looping = true;
476 }
477}
478
479/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
480 into STATE and LOOPING better of the two variants.
481 Be sure to merge looping correctly. IPA_NEITHER functions
482 have looping 0 even if they don't have to return. */
483
484static inline void
485better_state (enum pure_const_state_e *state, bool *looping,
486 enum pure_const_state_e state2, bool looping2)
487{
488 if (state2 < *state)
489 {
490 if (*state == IPA_NEITHER)
491 *looping = looping2;
492 else
493 *looping = MIN (*looping, looping2);
494 *state = state2;
495 }
496 else if (state2 != IPA_NEITHER)
497 *looping = MIN (*looping, looping2);
498}
499
500/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
501 into STATE and LOOPING worse of the two variants.
502 N is the actual node called. */
503
504static inline void
505worse_state (enum pure_const_state_e *state, bool *looping,
506 enum pure_const_state_e state2, bool looping2,
507 struct symtab_node *from,
508 struct symtab_node *to)
509{
510 /* Consider function:
511
512 bool a(int *p)
513 {
514 return *p==*p;
515 }
516
517 During early optimization we will turn this into:
518
519 bool a(int *p)
520 {
521 return true;
522 }
523
524 Now if this function will be detected as CONST however when interposed it
525 may end up being just pure. We always must assume the worst scenario here.
526 */
527 if (*state == IPA_CONST && state2 == IPA_CONST
528 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
529 {
530 if (dump_file && (dump_flags & TDF_DETAILS))
531 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
532 "bind to current def.\n", to->name ());
533 state2 = IPA_PURE;
534 }
535 *state = MAX (*state, state2);
536 *looping = MAX (*looping, looping2);
537}
538
539/* Recognize special cases of builtins that are by themselves not pure or const
540 but function using them is. */
541static bool
542special_builtin_state (enum pure_const_state_e *state, bool *looping,
543 tree callee)
544{
545 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
546 switch (DECL_FUNCTION_CODE (callee))
547 {
548 case BUILT_IN_RETURN:
549 case BUILT_IN_UNREACHABLE:
550 CASE_BUILT_IN_ALLOCA:
551 case BUILT_IN_STACK_SAVE:
552 case BUILT_IN_STACK_RESTORE:
553 case BUILT_IN_EH_POINTER:
554 case BUILT_IN_EH_FILTER:
555 case BUILT_IN_UNWIND_RESUME:
556 case BUILT_IN_CXA_END_CLEANUP:
557 case BUILT_IN_EH_COPY_VALUES:
558 case BUILT_IN_FRAME_ADDRESS:
559 case BUILT_IN_APPLY:
560 case BUILT_IN_APPLY_ARGS:
561 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
562 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
563 *looping = false;
564 *state = IPA_CONST;
565 return true;
566 case BUILT_IN_PREFETCH:
567 *looping = true;
568 *state = IPA_CONST;
569 return true;
570 default:
571 break;
572 }
573 return false;
574}
575
576/* Check the parameters of a function call to CALL_EXPR to see if
577 there are any references in the parameters that are not allowed for
578 pure or const functions. Also check to see if this is either an
579 indirect call, a call outside the compilation unit, or has special
580 attributes that may also effect the purity. The CALL_EXPR node for
581 the entire call expression. */
582
583static void
584check_call (funct_state local, gcall *call, bool ipa)
585{
586 int flags = gimple_call_flags (call);
587 tree callee_t = gimple_call_fndecl (call);
588 bool possibly_throws = stmt_could_throw_p (call);
589 bool possibly_throws_externally = (possibly_throws
590 && stmt_can_throw_external (call));
591
592 if (possibly_throws)
593 {
594 unsigned int i;
595 for (i = 0; i < gimple_num_ops (call); i++)
596 if (gimple_op (call, i)
597 && tree_could_throw_p (gimple_op (call, i)))
598 {
599 if (possibly_throws && cfun->can_throw_non_call_exceptions)
600 {
601 if (dump_file)
602 fprintf (dump_file, " operand can throw; looping\n");
603 local->looping = true;
604 }
605 if (possibly_throws_externally)
606 {
607 if (dump_file)
608 fprintf (dump_file, " operand can throw externally\n");
609 local->can_throw = true;
610 }
611 }
612 }
613
614 /* The const and pure flags are set by a variety of places in the
615 compiler (including here). If someone has already set the flags
616 for the callee, (such as for some of the builtins) we will use
617 them, otherwise we will compute our own information.
618
619 Const and pure functions have less clobber effects than other
620 functions so we process these first. Otherwise if it is a call
621 outside the compilation unit or an indirect call we punt. This
622 leaves local calls which will be processed by following the call
623 graph. */
624 if (callee_t)
625 {
626 enum pure_const_state_e call_state;
627 bool call_looping;
628
629 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
630 && !nonfreeing_call_p (call))
631 local->can_free = true;
632
633 if (special_builtin_state (&call_state, &call_looping, callee_t))
634 {
635 worse_state (&local->pure_const_state, &local->looping,
636 call_state, call_looping,
637 NULL, NULL);
638 return;
639 }
640 /* When bad things happen to bad functions, they cannot be const
641 or pure. */
642 if (setjmp_call_p (callee_t))
643 {
644 if (dump_file)
645 fprintf (dump_file, " setjmp is not const/pure\n");
646 local->looping = true;
647 local->pure_const_state = IPA_NEITHER;
648 }
649
650 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
651 switch (DECL_FUNCTION_CODE (callee_t))
652 {
653 case BUILT_IN_LONGJMP:
654 case BUILT_IN_NONLOCAL_GOTO:
655 if (dump_file)
656 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
657 local->pure_const_state = IPA_NEITHER;
658 local->looping = true;
659 break;
660 default:
661 break;
662 }
663 }
664 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
665 local->can_free = true;
666
667 /* When not in IPA mode, we can still handle self recursion. */
668 if (!ipa && callee_t
669 && recursive_call_p (current_function_decl, callee_t))
670 {
671 if (dump_file)
672 fprintf (dump_file, " Recursive call can loop.\n");
673 local->looping = true;
674 }
675 /* Either callee is unknown or we are doing local analysis.
676 Look to see if there are any bits available for the callee (such as by
677 declaration or because it is builtin) and process solely on the basis of
678 those bits. Handle internal calls always, those calls don't have
679 corresponding cgraph edges and thus aren't processed during
680 the propagation. */
681 else if (!ipa || gimple_call_internal_p (call))
682 {
683 enum pure_const_state_e call_state;
684 bool call_looping;
685 if (possibly_throws && cfun->can_throw_non_call_exceptions)
686 {
687 if (dump_file)
688 fprintf (dump_file, " can throw; looping\n");
689 local->looping = true;
690 }
691 if (possibly_throws_externally)
692 {
693 if (dump_file)
694 {
695 fprintf (dump_file, " can throw externally to lp %i\n",
696 lookup_stmt_eh_lp (call));
697 if (callee_t)
698 fprintf (dump_file, " callee:%s\n",
699 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
700 }
701 local->can_throw = true;
702 }
703 if (dump_file && (dump_flags & TDF_DETAILS))
704 fprintf (dump_file, " checking flags for call:");
705 state_from_flags (&call_state, &call_looping, flags,
706 ((flags & (ECF_NORETURN | ECF_NOTHROW))
707 == (ECF_NORETURN | ECF_NOTHROW))
708 || (!flag_exceptions && (flags & ECF_NORETURN)));
709 worse_state (&local->pure_const_state, &local->looping,
710 call_state, call_looping, NULL, NULL);
711 }
712 /* Direct functions calls are handled by IPA propagation. */
713}
714
715/* Wrapper around check_decl for loads in local more. */
716
717static bool
718check_load (gimple *, tree op, tree, void *data)
719{
720 if (DECL_P (op))
721 check_decl ((funct_state)data, op, false, false);
722 else
723 check_op ((funct_state)data, op, false);
724 return false;
725}
726
727/* Wrapper around check_decl for stores in local more. */
728
729static bool
730check_store (gimple *, tree op, tree, void *data)
731{
732 if (DECL_P (op))
733 check_decl ((funct_state)data, op, true, false);
734 else
735 check_op ((funct_state)data, op, true);
736 return false;
737}
738
739/* Wrapper around check_decl for loads in ipa mode. */
740
741static bool
742check_ipa_load (gimple *, tree op, tree, void *data)
743{
744 if (DECL_P (op))
745 check_decl ((funct_state)data, op, false, true);
746 else
747 check_op ((funct_state)data, op, false);
748 return false;
749}
750
751/* Wrapper around check_decl for stores in ipa mode. */
752
753static bool
754check_ipa_store (gimple *, tree op, tree, void *data)
755{
756 if (DECL_P (op))
757 check_decl ((funct_state)data, op, true, true);
758 else
759 check_op ((funct_state)data, op, true);
760 return false;
761}
762
763/* Look into pointer pointed to by GSIP and figure out what interesting side
764 effects it has. */
765static void
766check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
767{
768 gimple *stmt = gsi_stmt (*gsip);
769
770 if (is_gimple_debug (stmt))
771 return;
772
773 /* Do consider clobber as side effects before IPA, so we rather inline
774 C++ destructors and keep clobber semantics than eliminate them.
775
776 TODO: We may get smarter during early optimizations on these and let
777 functions containing only clobbers to be optimized more. This is a common
778 case of C++ destructors. */
779
780 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
781 return;
782
783 if (dump_file)
784 {
785 fprintf (dump_file, " scanning: ");
786 print_gimple_stmt (dump_file, stmt, 0);
787 }
788
789 if (gimple_has_volatile_ops (stmt)
790 && !gimple_clobber_p (stmt))
791 {
792 local->pure_const_state = IPA_NEITHER;
793 if (dump_file)
794 fprintf (dump_file, " Volatile stmt is not const/pure\n");
795 }
796
797 /* Look for loads and stores. */
798 walk_stmt_load_store_ops (stmt, local,
799 ipa ? check_ipa_load : check_load,
800 ipa ? check_ipa_store : check_store);
801
802 if (gimple_code (stmt) != GIMPLE_CALL
803 && stmt_could_throw_p (stmt))
804 {
805 if (cfun->can_throw_non_call_exceptions)
806 {
807 if (dump_file)
808 fprintf (dump_file, " can throw; looping\n");
809 local->looping = true;
810 }
811 if (stmt_can_throw_external (stmt))
812 {
813 if (dump_file)
814 fprintf (dump_file, " can throw externally\n");
815 local->can_throw = true;
816 }
817 else
818 if (dump_file)
819 fprintf (dump_file, " can throw\n");
820 }
821 switch (gimple_code (stmt))
822 {
823 case GIMPLE_CALL:
824 check_call (local, as_a <gcall *> (stmt), ipa);
825 break;
826 case GIMPLE_LABEL:
827 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
828 /* Target of long jump. */
829 {
830 if (dump_file)
831 fprintf (dump_file, " nonlocal label is not const/pure\n");
832 local->pure_const_state = IPA_NEITHER;
833 }
834 break;
835 case GIMPLE_ASM:
836 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
837 {
838 if (dump_file)
839 fprintf (dump_file, " memory asm clobber is not const/pure\n");
840 /* Abandon all hope, ye who enter here. */
841 local->pure_const_state = IPA_NEITHER;
842 local->can_free = true;
843 }
844 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
845 {
846 if (dump_file)
847 fprintf (dump_file, " volatile is not const/pure\n");
848 /* Abandon all hope, ye who enter here. */
849 local->pure_const_state = IPA_NEITHER;
850 local->looping = true;
851 local->can_free = true;
852 }
853 return;
854 default:
855 break;
856 }
857}
858
859/* Check that RETVAL is used only in STMT and in comparisons against 0.
860 RETVAL is return value of the function and STMT is return stmt. */
861
862static bool
863check_retval_uses (tree retval, gimple *stmt)
864{
865 imm_use_iterator use_iter;
866 gimple *use_stmt;
867
868 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
869 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
870 {
871 tree op2 = gimple_cond_rhs (cond);
872 if (!integer_zerop (op2))
873 RETURN_FROM_IMM_USE_STMT (use_iter, false);
874 }
875 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
876 {
877 enum tree_code code = gimple_assign_rhs_code (ga);
878 if (TREE_CODE_CLASS (code) != tcc_comparison)
879 RETURN_FROM_IMM_USE_STMT (use_iter, false);
880 if (!integer_zerop (gimple_assign_rhs2 (ga)))
881 RETURN_FROM_IMM_USE_STMT (use_iter, false);
882 }
883 else if (is_gimple_debug (use_stmt))
884 ;
885 else if (use_stmt != stmt)
886 RETURN_FROM_IMM_USE_STMT (use_iter, false);
887
888 return true;
889}
890
891/* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
892 attribute. Currently this function does a very conservative analysis.
893 FUN is considered to be a candidate if
894 1) It returns a value of pointer type.
895 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
896 a phi, and element of phi is either NULL or
897 SSA_NAME_DEF_STMT(element) is function call.
898 3) The return-value has immediate uses only within comparisons (gcond or gassign)
899 and return_stmt (and likewise a phi arg has immediate use only within comparison
900 or the phi stmt). */
901
902static bool
903malloc_candidate_p (function *fun, bool ipa)
904{
905 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
906 edge e;
907 edge_iterator ei;
908 cgraph_node *node = cgraph_node::get_create (fun->decl);
909
910#define DUMP_AND_RETURN(reason) \
911{ \
912 if (dump_file && (dump_flags & TDF_DETAILS)) \
913 fprintf (dump_file, "%s", (reason)); \
914 return false; \
915}
916
917 if (EDGE_COUNT (exit_block->preds) == 0)
918 return false;
919
920 FOR_EACH_EDGE (e, ei, exit_block->preds)
921 {
922 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
923 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
924
925 if (!ret_stmt)
926 return false;
927
928 tree retval = gimple_return_retval (ret_stmt);
929 if (!retval)
930 DUMP_AND_RETURN("No return value.")
931
932 if (TREE_CODE (retval) != SSA_NAME
933 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
934 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
935
936 if (!check_retval_uses (retval, ret_stmt))
937 DUMP_AND_RETURN("Return value has uses outside return stmt"
938 " and comparisons against 0.")
939
940 gimple *def = SSA_NAME_DEF_STMT (retval);
941 if (gcall *call_stmt = dyn_cast<gcall *> (def))
942 {
943 tree callee_decl = gimple_call_fndecl (call_stmt);
944 if (!callee_decl)
945 return false;
946
947 if (!ipa && !DECL_IS_MALLOC (callee_decl))
948 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
949 " non-ipa mode.")
950
951 cgraph_edge *cs = node->get_edge (call_stmt);
952 if (cs)
953 {
954 ipa_call_summary *es = ipa_call_summaries->get (cs);
955 gcc_assert (es);
956 es->is_return_callee_uncaptured = true;
957 }
958 }
959
960 else if (gphi *phi = dyn_cast<gphi *> (def))
961 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
962 {
963 tree arg = gimple_phi_arg_def (phi, i);
964 if (TREE_CODE (arg) != SSA_NAME)
965 DUMP_AND_RETURN("phi arg is not SSA_NAME.")
966 if (!(arg == null_pointer_node || check_retval_uses (arg, phi)))
967 DUMP_AND_RETURN("phi arg has uses outside phi"
968 " and comparisons against 0.")
969
970 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
971 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
972 if (!call_stmt)
973 return false;
974 tree callee_decl = gimple_call_fndecl (call_stmt);
975 if (!callee_decl)
976 return false;
977 if (!ipa && !DECL_IS_MALLOC (callee_decl))
978 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
979 " non-ipa mode.")
980
981 cgraph_edge *cs = node->get_edge (call_stmt);
982 if (cs)
983 {
984 ipa_call_summary *es = ipa_call_summaries->get (cs);
985 gcc_assert (es);
986 es->is_return_callee_uncaptured = true;
987 }
988 }
989
990 else
991 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
992 }
993
994 if (dump_file && (dump_flags & TDF_DETAILS))
995 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
996 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
997 return true;
998
999#undef DUMP_AND_RETURN
1000}
1001
1002
1003/* This is the main routine for finding the reference patterns for
1004 global variables within a function FN. */
1005
1006static funct_state
1007analyze_function (struct cgraph_node *fn, bool ipa)
1008{
1009 tree decl = fn->decl;
1010 funct_state l;
1011 basic_block this_block;
1012
1013 l = XCNEW (struct funct_state_d);
1014 l->pure_const_state = IPA_CONST;
1015 l->state_previously_known = IPA_NEITHER;
1016 l->looping_previously_known = true;
1017 l->looping = false;
1018 l->can_throw = false;
1019 l->can_free = false;
1020 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1021 flags_from_decl_or_type (fn->decl),
1022 fn->cannot_return_p ());
1023
1024 if (fn->thunk.thunk_p || fn->alias)
1025 {
1026 /* Thunk gets propagated through, so nothing interesting happens. */
1027 gcc_assert (ipa);
1028 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1029 l->pure_const_state = IPA_NEITHER;
1030 return l;
1031 }
1032
1033 if (dump_file)
1034 {
1035 fprintf (dump_file, "\n\n local analysis of %s\n ",
1036 fn->name ());
1037 }
1038
1039 push_cfun (DECL_STRUCT_FUNCTION (decl));
1040
1041 FOR_EACH_BB_FN (this_block, cfun)
1042 {
1043 gimple_stmt_iterator gsi;
1044 struct walk_stmt_info wi;
1045
1046 memset (&wi, 0, sizeof (wi));
1047 for (gsi = gsi_start_bb (this_block);
1048 !gsi_end_p (gsi);
1049 gsi_next (&gsi))
1050 {
1051 check_stmt (&gsi, l, ipa);
1052 if (l->pure_const_state == IPA_NEITHER
1053 && l->looping
1054 && l->can_throw
1055 && l->can_free)
1056 goto end;
1057 }
1058 }
1059
1060end:
1061 if (l->pure_const_state != IPA_NEITHER)
1062 {
1063 /* Const functions cannot have back edges (an
1064 indication of possible infinite loop side
1065 effect. */
1066 if (mark_dfs_back_edges ())
1067 {
1068 /* Preheaders are needed for SCEV to work.
1069 Simple latches and recorded exits improve chances that loop will
1070 proved to be finite in testcases such as in loop-15.c
1071 and loop-24.c */
1072 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1073 | LOOPS_HAVE_SIMPLE_LATCHES
1074 | LOOPS_HAVE_RECORDED_EXITS);
1075 if (dump_file && (dump_flags & TDF_DETAILS))
1076 flow_loops_dump (dump_file, NULL, 0);
1077 if (mark_irreducible_loops ())
1078 {
1079 if (dump_file)
1080 fprintf (dump_file, " has irreducible loops\n");
1081 l->looping = true;
1082 }
1083 else
1084 {
1085 struct loop *loop;
1086 scev_initialize ();
1087 FOR_EACH_LOOP (loop, 0)
1088 if (!finite_loop_p (loop))
1089 {
1090 if (dump_file)
1091 fprintf (dump_file, " can not prove finiteness of "
1092 "loop %i\n", loop->num);
1093 l->looping =true;
1094 break;
1095 }
1096 scev_finalize ();
1097 }
1098 loop_optimizer_finalize ();
1099 }
1100 }
1101
1102 if (dump_file && (dump_flags & TDF_DETAILS))
1103 fprintf (dump_file, " checking previously known:");
1104
1105 better_state (&l->pure_const_state, &l->looping,
1106 l->state_previously_known,
1107 l->looping_previously_known);
1108 if (TREE_NOTHROW (decl))
1109 l->can_throw = false;
1110
1111 l->malloc_state = STATE_MALLOC_BOTTOM;
1112 if (DECL_IS_MALLOC (decl))
1113 l->malloc_state = STATE_MALLOC;
1114 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1115 l->malloc_state = STATE_MALLOC_TOP;
1116 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1117 l->malloc_state = STATE_MALLOC;
1118
1119 pop_cfun ();
1120 if (dump_file)
1121 {
1122 if (l->looping)
1123 fprintf (dump_file, "Function is locally looping.\n");
1124 if (l->can_throw)
1125 fprintf (dump_file, "Function is locally throwing.\n");
1126 if (l->pure_const_state == IPA_CONST)
1127 fprintf (dump_file, "Function is locally const.\n");
1128 if (l->pure_const_state == IPA_PURE)
1129 fprintf (dump_file, "Function is locally pure.\n");
1130 if (l->can_free)
1131 fprintf (dump_file, "Function can locally free.\n");
1132 if (l->malloc_state == STATE_MALLOC)
1133 fprintf (dump_file, "Function is locally malloc.\n");
1134 }
1135 return l;
1136}
1137
1138/* Called when new function is inserted to callgraph late. */
1139static void
1140add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1141{
1142 /* There are some shared nodes, in particular the initializers on
1143 static declarations. We do not need to scan them more than once
1144 since all we would be interested in are the addressof
1145 operations. */
1146 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1147 set_function_state (node, analyze_function (node, true));
1148}
1149
1150/* Called when new clone is inserted to callgraph late. */
1151
1152static void
1153duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
1154 void *data ATTRIBUTE_UNUSED)
1155{
1156 if (has_function_state (src))
1157 {
1158 funct_state l = XNEW (struct funct_state_d);
1159 gcc_assert (!has_function_state (dst));
1160 memcpy (l, get_function_state (src), sizeof (*l));
1161 set_function_state (dst, l);
1162 }
1163}
1164
1165/* Called when new clone is inserted to callgraph late. */
1166
1167static void
1168remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1169{
1170 if (has_function_state (node))
1171 set_function_state (node, NULL);
1172}
1173
1174
1175void
1176pass_ipa_pure_const::
1177register_hooks (void)
1178{
1179 if (init_p)
1180 return;
1181
1182 init_p = true;
1183
1184 node_removal_hook_holder =
1185 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
1186 node_duplication_hook_holder =
1187 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
1188 function_insertion_hook_holder =
1189 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
1190}
1191
1192
1193/* Analyze each function in the cgraph to see if it is locally PURE or
1194 CONST. */
1195
1196static void
1197pure_const_generate_summary (void)
1198{
1199 struct cgraph_node *node;
1200
1201 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1202 pass->register_hooks ();
1203
1204 /* Process all of the functions.
1205
1206 We process AVAIL_INTERPOSABLE functions. We can not use the results
1207 by default, but the info can be used at LTO with -fwhole-program or
1208 when function got cloned and the clone is AVAILABLE. */
1209
1210 FOR_EACH_DEFINED_FUNCTION (node)
1211 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1212 set_function_state (node, analyze_function (node, true));
1213}
1214
1215
1216/* Serialize the ipa info for lto. */
1217
1218static void
1219pure_const_write_summary (void)
1220{
1221 struct cgraph_node *node;
1222 struct lto_simple_output_block *ob
1223 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1224 unsigned int count = 0;
1225 lto_symtab_encoder_iterator lsei;
1226 lto_symtab_encoder_t encoder;
1227
1228 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1229
1230 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1231 lsei_next_function_in_partition (&lsei))
1232 {
1233 node = lsei_cgraph_node (lsei);
1234 if (node->definition && has_function_state (node))
1235 count++;
1236 }
1237
1238 streamer_write_uhwi_stream (ob->main_stream, count);
1239
1240 /* Process all of the functions. */
1241 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1242 lsei_next_function_in_partition (&lsei))
1243 {
1244 node = lsei_cgraph_node (lsei);
1245 if (node->definition && has_function_state (node))
1246 {
1247 struct bitpack_d bp;
1248 funct_state fs;
1249 int node_ref;
1250 lto_symtab_encoder_t encoder;
1251
1252 fs = get_function_state (node);
1253
1254 encoder = ob->decl_state->symtab_node_encoder;
1255 node_ref = lto_symtab_encoder_encode (encoder, node);
1256 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1257
1258 /* Note that flags will need to be read in the opposite
1259 order as we are pushing the bitflags into FLAGS. */
1260 bp = bitpack_create (ob->main_stream);
1261 bp_pack_value (&bp, fs->pure_const_state, 2);
1262 bp_pack_value (&bp, fs->state_previously_known, 2);
1263 bp_pack_value (&bp, fs->looping_previously_known, 1);
1264 bp_pack_value (&bp, fs->looping, 1);
1265 bp_pack_value (&bp, fs->can_throw, 1);
1266 bp_pack_value (&bp, fs->can_free, 1);
1267 bp_pack_value (&bp, fs->malloc_state, 2);
1268 streamer_write_bitpack (&bp);
1269 }
1270 }
1271
1272 lto_destroy_simple_output_block (ob);
1273}
1274
1275
1276/* Deserialize the ipa info for lto. */
1277
1278static void
1279pure_const_read_summary (void)
1280{
1281 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1282 struct lto_file_decl_data *file_data;
1283 unsigned int j = 0;
1284
1285 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1286 pass->register_hooks ();
1287
1288 while ((file_data = file_data_vec[j++]))
1289 {
1290 const char *data;
1291 size_t len;
1292 struct lto_input_block *ib
1293 = lto_create_simple_input_block (file_data,
1294 LTO_section_ipa_pure_const,
1295 &data, &len);
1296 if (ib)
1297 {
1298 unsigned int i;
1299 unsigned int count = streamer_read_uhwi (ib);
1300
1301 for (i = 0; i < count; i++)
1302 {
1303 unsigned int index;
1304 struct cgraph_node *node;
1305 struct bitpack_d bp;
1306 funct_state fs;
1307 lto_symtab_encoder_t encoder;
1308
1309 fs = XCNEW (struct funct_state_d);
1310 index = streamer_read_uhwi (ib);
1311 encoder = file_data->symtab_node_encoder;
1312 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1313 index));
1314 set_function_state (node, fs);
1315
1316 /* Note that the flags must be read in the opposite
1317 order in which they were written (the bitflags were
1318 pushed into FLAGS). */
1319 bp = streamer_read_bitpack (ib);
1320 fs->pure_const_state
1321 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1322 fs->state_previously_known
1323 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1324 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1325 fs->looping = bp_unpack_value (&bp, 1);
1326 fs->can_throw = bp_unpack_value (&bp, 1);
1327 fs->can_free = bp_unpack_value (&bp, 1);
1328 fs->malloc_state
1329 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1330
1331 if (dump_file)
1332 {
1333 int flags = flags_from_decl_or_type (node->decl);
1334 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1335 if (flags & ECF_CONST)
1336 fprintf (dump_file, " const");
1337 if (flags & ECF_PURE)
1338 fprintf (dump_file, " pure");
1339 if (flags & ECF_NOTHROW)
1340 fprintf (dump_file, " nothrow");
1341 fprintf (dump_file, "\n pure const state: %s\n",
1342 pure_const_names[fs->pure_const_state]);
1343 fprintf (dump_file, " previously known state: %s\n",
1344 pure_const_names[fs->state_previously_known]);
1345 if (fs->looping)
1346 fprintf (dump_file," function is locally looping\n");
1347 if (fs->looping_previously_known)
1348 fprintf (dump_file," function is previously known looping\n");
1349 if (fs->can_throw)
1350 fprintf (dump_file," function is locally throwing\n");
1351 if (fs->can_free)
1352 fprintf (dump_file," function can locally free\n");
1353 fprintf (dump_file, "\n malloc state: %s\n",
1354 malloc_state_names[fs->malloc_state]);
1355 }
1356 }
1357
1358 lto_destroy_simple_input_block (file_data,
1359 LTO_section_ipa_pure_const,
1360 ib, data, len);
1361 }
1362 }
1363}
1364
1365/* We only propagate across edges that can throw externally and their callee
1366 is not interposable. */
1367
1368static bool
1369ignore_edge_for_nothrow (struct cgraph_edge *e)
1370{
1371 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1372 return true;
1373
1374 enum availability avail;
1375 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1376 e->caller);
1377 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1378 return true;
1379 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1380 && !e->callee->binds_to_current_def_p (e->caller);
1381}
1382
1383/* Return true if NODE is self recursive function.
1384 Indirectly recursive functions appears as non-trivial strongly
1385 connected components, so we need to care about self recursion
1386 only. */
1387
1388static bool
1389self_recursive_p (struct cgraph_node *node)
1390{
1391 struct cgraph_edge *e;
1392 for (e = node->callees; e; e = e->next_callee)
1393 if (e->callee->function_symbol () == node)
1394 return true;
1395 return false;
1396}
1397
1398/* Return true if N is cdtor that is not const or pure. In this case we may
1399 need to remove unreachable function if it is marked const/pure. */
1400
1401static bool
1402cdtor_p (cgraph_node *n, void *)
1403{
1404 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1405 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1406 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1407 return false;
1408}
1409
1410/* We only propagate across edges with non-interposable callee. */
1411
1412static bool
1413ignore_edge_for_pure_const (struct cgraph_edge *e)
1414{
1415 enum availability avail;
1416 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1417 return (avail <= AVAIL_INTERPOSABLE);
1418}
1419
1420
1421/* Produce transitive closure over the callgraph and compute pure/const
1422 attributes. */
1423
1424static bool
1425propagate_pure_const (void)
1426{
1427 struct cgraph_node *node;
1428 struct cgraph_node *w;
1429 struct cgraph_node **order =
1430 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1431 int order_pos;
1432 int i;
1433 struct ipa_dfs_info * w_info;
1434 bool remove_p = false;
1435 bool has_cdtor;
1436
1437 order_pos = ipa_reduced_postorder (order, true, false,
1438 ignore_edge_for_pure_const);
1439 if (dump_file)
1440 {
1441 cgraph_node::dump_cgraph (dump_file);
1442 ipa_print_order (dump_file, "reduced", order, order_pos);
1443 }
1444
1445 /* Propagate the local information through the call graph to produce
1446 the global information. All the nodes within a cycle will have
1447 the same info so we collapse cycles first. Then we can do the
1448 propagation in one pass from the leaves to the roots. */
1449 for (i = 0; i < order_pos; i++ )
1450 {
1451 enum pure_const_state_e pure_const_state = IPA_CONST;
1452 bool looping = false;
1453 int count = 0;
1454 node = order[i];
1455
1456 if (node->alias)
1457 continue;
1458
1459 if (dump_file && (dump_flags & TDF_DETAILS))
1460 fprintf (dump_file, "Starting cycle\n");
1461
1462 /* Find the worst state for any node in the cycle. */
1463 w = node;
1464 while (w && pure_const_state != IPA_NEITHER)
1465 {
1466 struct cgraph_edge *e;
1467 struct cgraph_edge *ie;
1468 int i;
1469 struct ipa_ref *ref = NULL;
1470
1471 funct_state w_l = get_function_state (w);
1472 if (dump_file && (dump_flags & TDF_DETAILS))
1473 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1474 w->dump_name (),
1475 pure_const_names[w_l->pure_const_state],
1476 w_l->looping);
1477
1478 /* First merge in function body properties.
1479 We are safe to pass NULL as FROM and TO because we will take care
1480 of possible interposition when walking callees. */
1481 worse_state (&pure_const_state, &looping,
1482 w_l->pure_const_state, w_l->looping,
1483 NULL, NULL);
1484 if (pure_const_state == IPA_NEITHER)
1485 break;
1486
1487 count++;
1488
1489 /* We consider recursive cycles as possibly infinite.
1490 This might be relaxed since infinite recursion leads to stack
1491 overflow. */
1492 if (count > 1)
1493 looping = true;
1494
1495 /* Now walk the edges and merge in callee properties. */
1496 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1497 e = e->next_callee)
1498 {
1499 enum availability avail;
1500 struct cgraph_node *y = e->callee->
1501 function_or_virtual_thunk_symbol (&avail,
1502 e->caller);
1503 enum pure_const_state_e edge_state = IPA_CONST;
1504 bool edge_looping = false;
1505
1506 if (dump_file && (dump_flags & TDF_DETAILS))
1507 {
1508 fprintf (dump_file, " Call to %s",
1509 e->callee->dump_name ());
1510 }
1511 if (avail > AVAIL_INTERPOSABLE)
1512 {
1513 funct_state y_l = get_function_state (y);
1514 if (dump_file && (dump_flags & TDF_DETAILS))
1515 {
1516 fprintf (dump_file,
1517 " state:%s looping:%i\n",
1518 pure_const_names[y_l->pure_const_state],
1519 y_l->looping);
1520 }
1521 if (y_l->pure_const_state > IPA_PURE
1522 && e->cannot_lead_to_return_p ())
1523 {
1524 if (dump_file && (dump_flags & TDF_DETAILS))
1525 fprintf (dump_file,
1526 " Ignoring side effects"
1527 " -> pure, looping\n");
1528 edge_state = IPA_PURE;
1529 edge_looping = true;
1530 }
1531 else
1532 {
1533 edge_state = y_l->pure_const_state;
1534 edge_looping = y_l->looping;
1535 }
1536 }
1537 else if (special_builtin_state (&edge_state, &edge_looping,
1538 y->decl))
1539 ;
1540 else
1541 state_from_flags (&edge_state, &edge_looping,
1542 flags_from_decl_or_type (y->decl),
1543 e->cannot_lead_to_return_p ());
1544
1545 /* Merge the results with what we already know. */
1546 better_state (&edge_state, &edge_looping,
1547 w_l->state_previously_known,
1548 w_l->looping_previously_known);
1549 worse_state (&pure_const_state, &looping,
1550 edge_state, edge_looping, e->caller, e->callee);
1551 if (pure_const_state == IPA_NEITHER)
1552 break;
1553 }
1554
1555 /* Now process the indirect call. */
1556 for (ie = w->indirect_calls;
1557 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1558 {
1559 enum pure_const_state_e edge_state = IPA_CONST;
1560 bool edge_looping = false;
1561
1562 if (dump_file && (dump_flags & TDF_DETAILS))
1563 fprintf (dump_file, " Indirect call");
1564 state_from_flags (&edge_state, &edge_looping,
1565 ie->indirect_info->ecf_flags,
1566 ie->cannot_lead_to_return_p ());
1567 /* Merge the results with what we already know. */
1568 better_state (&edge_state, &edge_looping,
1569 w_l->state_previously_known,
1570 w_l->looping_previously_known);
1571 worse_state (&pure_const_state, &looping,
1572 edge_state, edge_looping, NULL, NULL);
1573 if (pure_const_state == IPA_NEITHER)
1574 break;
1575 }
1576
1577 /* And finally all loads and stores. */
1578 for (i = 0; w->iterate_reference (i, ref)
1579 && pure_const_state != IPA_NEITHER; i++)
1580 {
1581 enum pure_const_state_e ref_state = IPA_CONST;
1582 bool ref_looping = false;
1583 switch (ref->use)
1584 {
1585 case IPA_REF_LOAD:
1586 /* readonly reads are safe. */
1587 if (TREE_READONLY (ref->referred->decl))
1588 break;
1589 if (dump_file && (dump_flags & TDF_DETAILS))
1590 fprintf (dump_file, " nonreadonly global var read\n");
1591 ref_state = IPA_PURE;
1592 break;
1593 case IPA_REF_STORE:
1594 if (ref->cannot_lead_to_return ())
1595 break;
1596 ref_state = IPA_NEITHER;
1597 if (dump_file && (dump_flags & TDF_DETAILS))
1598 fprintf (dump_file, " global var write\n");
1599 break;
1600 case IPA_REF_ADDR:
1601 case IPA_REF_CHKP:
1602 break;
1603 default:
1604 gcc_unreachable ();
1605 }
1606 better_state (&ref_state, &ref_looping,
1607 w_l->state_previously_known,
1608 w_l->looping_previously_known);
1609 worse_state (&pure_const_state, &looping,
1610 ref_state, ref_looping, NULL, NULL);
1611 if (pure_const_state == IPA_NEITHER)
1612 break;
1613 }
1614 w_info = (struct ipa_dfs_info *) w->aux;
1615 w = w_info->next_cycle;
1616 }
1617 if (dump_file && (dump_flags & TDF_DETAILS))
1618 fprintf (dump_file, "Result %s looping %i\n",
1619 pure_const_names [pure_const_state],
1620 looping);
1621
1622 /* Find the worst state of can_free for any node in the cycle. */
1623 bool can_free = false;
1624 w = node;
1625 while (w && !can_free)
1626 {
1627 struct cgraph_edge *e;
1628 funct_state w_l = get_function_state (w);
1629
1630 if (w_l->can_free
1631 || w->get_availability () == AVAIL_INTERPOSABLE
1632 || w->indirect_calls)
1633 can_free = true;
1634
1635 for (e = w->callees; e && !can_free; e = e->next_callee)
1636 {
1637 enum availability avail;
1638 struct cgraph_node *y = e->callee->
1639 function_or_virtual_thunk_symbol (&avail,
1640 e->caller);
1641
1642 if (avail > AVAIL_INTERPOSABLE)
1643 can_free = get_function_state (y)->can_free;
1644 else
1645 can_free = true;
1646 }
1647 w_info = (struct ipa_dfs_info *) w->aux;
1648 w = w_info->next_cycle;
1649 }
1650
1651 /* Copy back the region's pure_const_state which is shared by
1652 all nodes in the region. */
1653 w = node;
1654 while (w)
1655 {
1656 funct_state w_l = get_function_state (w);
1657 enum pure_const_state_e this_state = pure_const_state;
1658 bool this_looping = looping;
1659
1660 w_l->can_free = can_free;
1661 w->nonfreeing_fn = !can_free;
1662 if (!can_free && dump_file)
1663 fprintf (dump_file, "Function found not to call free: %s\n",
1664 w->name ());
1665
1666 if (w_l->state_previously_known != IPA_NEITHER
1667 && this_state > w_l->state_previously_known)
1668 {
1669 this_state = w_l->state_previously_known;
1670 if (this_state == IPA_NEITHER)
1671 this_looping = w_l->looping_previously_known;
1672 }
1673 if (!this_looping && self_recursive_p (w))
1674 this_looping = true;
1675 if (!w_l->looping_previously_known)
1676 this_looping = false;
1677
1678 /* All nodes within a cycle share the same info. */
1679 w_l->pure_const_state = this_state;
1680 w_l->looping = this_looping;
1681
1682 /* Inline clones share declaration with their offline copies;
1683 do not modify their declarations since the offline copy may
1684 be different. */
1685 if (!w->global.inlined_to)
1686 switch (this_state)
1687 {
1688 case IPA_CONST:
1689 if (!TREE_READONLY (w->decl))
1690 {
1691 warn_function_const (w->decl, !this_looping);
1692 if (dump_file)
1693 fprintf (dump_file, "Function found to be %sconst: %s\n",
1694 this_looping ? "looping " : "",
1695 w->name ());
1696 }
1697 /* Turning constructor or destructor to non-looping const/pure
1698 enables us to possibly remove the function completely. */
1699 if (this_looping)
1700 has_cdtor = false;
1701 else
1702 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1703 NULL, true);
1704 if (w->set_const_flag (true, this_looping))
1705 {
1706 if (dump_file)
1707 fprintf (dump_file,
1708 "Declaration updated to be %sconst: %s\n",
1709 this_looping ? "looping " : "",
1710 w->name ());
1711 remove_p |= has_cdtor;
1712 }
1713 break;
1714
1715 case IPA_PURE:
1716 if (!DECL_PURE_P (w->decl))
1717 {
1718 warn_function_pure (w->decl, !this_looping);
1719 if (dump_file)
1720 fprintf (dump_file, "Function found to be %spure: %s\n",
1721 this_looping ? "looping " : "",
1722 w->name ());
1723 }
1724 if (this_looping)
1725 has_cdtor = false;
1726 else
1727 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1728 NULL, true);
1729 if (w->set_pure_flag (true, this_looping))
1730 {
1731 if (dump_file)
1732 fprintf (dump_file,
1733 "Declaration updated to be %spure: %s\n",
1734 this_looping ? "looping " : "",
1735 w->name ());
1736 remove_p |= has_cdtor;
1737 }
1738 break;
1739
1740 default:
1741 break;
1742 }
1743 w_info = (struct ipa_dfs_info *) w->aux;
1744 w = w_info->next_cycle;
1745 }
1746 }
1747
1748 ipa_free_postorder_info ();
1749 free (order);
1750 return remove_p;
1751}
1752
1753/* Produce transitive closure over the callgraph and compute nothrow
1754 attributes. */
1755
1756static void
1757propagate_nothrow (void)
1758{
1759 struct cgraph_node *node;
1760 struct cgraph_node *w;
1761 struct cgraph_node **order =
1762 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1763 int order_pos;
1764 int i;
1765 struct ipa_dfs_info * w_info;
1766
1767 order_pos = ipa_reduced_postorder (order, true, false,
1768 ignore_edge_for_nothrow);
1769 if (dump_file)
1770 {
1771 cgraph_node::dump_cgraph (dump_file);
1772 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1773 }
1774
1775 /* Propagate the local information through the call graph to produce
1776 the global information. All the nodes within a cycle will have
1777 the same info so we collapse cycles first. Then we can do the
1778 propagation in one pass from the leaves to the roots. */
1779 for (i = 0; i < order_pos; i++ )
1780 {
1781 bool can_throw = false;
1782 node = order[i];
1783
1784 if (node->alias)
1785 continue;
1786
1787 /* Find the worst state for any node in the cycle. */
1788 w = node;
1789 while (w && !can_throw)
1790 {
1791 struct cgraph_edge *e, *ie;
1792
1793 if (!TREE_NOTHROW (w->decl))
1794 {
1795 funct_state w_l = get_function_state (w);
1796
1797 if (w_l->can_throw
1798 || w->get_availability () == AVAIL_INTERPOSABLE)
1799 can_throw = true;
1800
1801 for (e = w->callees; e && !can_throw; e = e->next_callee)
1802 {
1803 enum availability avail;
1804
1805 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1806 continue;
1807
1808 struct cgraph_node *y = e->callee->
1809 function_or_virtual_thunk_symbol (&avail,
1810 e->caller);
1811
1812 /* We can use info about the callee only if we know it can
1813 not be interposed.
1814 When callee is compiled with non-call exceptions we also
1815 must check that the declaration is bound to current
1816 body as other semantically equivalent body may still
1817 throw. */
1818 if (avail <= AVAIL_INTERPOSABLE
1819 || (!TREE_NOTHROW (y->decl)
1820 && (get_function_state (y)->can_throw
1821 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1822 && !e->callee->binds_to_current_def_p (w)))))
1823 can_throw = true;
1824 }
1825 for (ie = w->indirect_calls; ie && !can_throw;
1826 ie = ie->next_callee)
1827 if (ie->can_throw_external
1828 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1829 can_throw = true;
1830 }
1831 w_info = (struct ipa_dfs_info *) w->aux;
1832 w = w_info->next_cycle;
1833 }
1834
1835 /* Copy back the region's pure_const_state which is shared by
1836 all nodes in the region. */
1837 w = node;
1838 while (w)
1839 {
1840 funct_state w_l = get_function_state (w);
1841 if (!can_throw && !TREE_NOTHROW (w->decl))
1842 {
1843 /* Inline clones share declaration with their offline copies;
1844 do not modify their declarations since the offline copy may
1845 be different. */
1846 if (!w->global.inlined_to)
1847 {
1848 w->set_nothrow_flag (true);
1849 if (dump_file)
1850 fprintf (dump_file, "Function found to be nothrow: %s\n",
1851 w->name ());
1852 }
1853 }
1854 else if (can_throw && !TREE_NOTHROW (w->decl))
1855 w_l->can_throw = true;
1856 w_info = (struct ipa_dfs_info *) w->aux;
1857 w = w_info->next_cycle;
1858 }
1859 }
1860
1861 ipa_free_postorder_info ();
1862 free (order);
1863}
1864
1865/* Debugging function to dump state of malloc lattice. */
1866
1867DEBUG_FUNCTION
1868static void
1869dump_malloc_lattice (FILE *dump_file, const char *s)
1870{
1871 if (!dump_file)
1872 return;
1873
1874 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1875 cgraph_node *node;
1876 FOR_EACH_FUNCTION (node)
1877 {
1878 funct_state fs = get_function_state (node);
1879 malloc_state_e state = fs->malloc_state;
1880 fprintf (dump_file, "%s: %s\n", node->name (), malloc_state_names[state]);
1881 }
1882}
1883
1884/* Propagate malloc attribute across the callgraph. */
1885
1886static void
1887propagate_malloc (void)
1888{
1889 cgraph_node *node;
1890 FOR_EACH_FUNCTION (node)
1891 {
1892 if (DECL_IS_MALLOC (node->decl))
1893 if (!has_function_state (node))
1894 {
1895 funct_state l = XCNEW (struct funct_state_d);
1896 *l = varying_state;
1897 l->malloc_state = STATE_MALLOC;
1898 set_function_state (node, l);
1899 }
1900 }
1901
1902 dump_malloc_lattice (dump_file, "Initial");
1903 struct cgraph_node **order
1904 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1905 int order_pos = ipa_reverse_postorder (order);
1906 bool changed = true;
1907
1908 while (changed)
1909 {
1910 changed = false;
1911 /* Walk in postorder. */
1912 for (int i = order_pos - 1; i >= 0; --i)
1913 {
1914 cgraph_node *node = order[i];
1915 if (node->alias
1916 || !node->definition
1917 || !has_function_state (node))
1918 continue;
1919
1920 funct_state l = get_function_state (node);
1921
1922 /* FIXME: add support for indirect-calls. */
1923 if (node->indirect_calls)
1924 {
1925 l->malloc_state = STATE_MALLOC_BOTTOM;
1926 continue;
1927 }
1928
1929 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1930 {
1931 l->malloc_state = STATE_MALLOC_BOTTOM;
1932 continue;
1933 }
1934
1935 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1936 continue;
1937
1938 vec<cgraph_node *> callees = vNULL;
1939 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1940 {
1941 ipa_call_summary *es = ipa_call_summaries->get (cs);
1942 if (es && es->is_return_callee_uncaptured)
1943 callees.safe_push (cs->callee);
1944 }
1945
1946 malloc_state_e new_state = l->malloc_state;
1947 for (unsigned j = 0; j < callees.length (); j++)
1948 {
1949 cgraph_node *callee = callees[j];
1950 if (!has_function_state (callee))
1951 {
1952 new_state = STATE_MALLOC_BOTTOM;
1953 break;
1954 }
1955 malloc_state_e callee_state = get_function_state (callee)->malloc_state;
1956 if (new_state < callee_state)
1957 new_state = callee_state;
1958 }
1959 if (new_state != l->malloc_state)
1960 {
1961 changed = true;
1962 l->malloc_state = new_state;
1963 }
1964 }
1965 }
1966
1967 FOR_EACH_DEFINED_FUNCTION (node)
1968 if (has_function_state (node))
1969 {
1970 funct_state l = get_function_state (node);
1971 if (!node->alias
1972 && l->malloc_state == STATE_MALLOC
1973 && !node->global.inlined_to)
1974 {
1975 if (dump_file && (dump_flags & TDF_DETAILS))
1976 fprintf (dump_file, "Function %s found to be malloc\n",
1977 node->name ());
1978
1979 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1980 node->set_malloc_flag (true);
1981 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1982 warn_function_malloc (node->decl);
1983 }
1984 }
1985
1986 dump_malloc_lattice (dump_file, "after propagation");
1987 ipa_free_postorder_info ();
1988 free (order);
1989}
1990
1991/* Produce the global information by preforming a transitive closure
1992 on the local information that was produced by generate_summary. */
1993
1994unsigned int
1995pass_ipa_pure_const::
1996execute (function *)
1997{
1998 struct cgraph_node *node;
1999 bool remove_p;
2000
2001 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
2002 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
2003 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
2004
2005 /* Nothrow makes more function to not lead to return and improve
2006 later analysis. */
2007 propagate_nothrow ();
2008 propagate_malloc ();
2009 remove_p = propagate_pure_const ();
2010
2011 /* Cleanup. */
2012 FOR_EACH_FUNCTION (node)
2013 if (has_function_state (node))
2014 free (get_function_state (node));
2015 funct_state_vec.release ();
2016
2017 /* In WPA we use inline summaries for partitioning process. */
2018 if (!flag_wpa)
2019 ipa_free_fn_summary ();
2020 return remove_p ? TODO_remove_functions : 0;
2021}
2022
2023static bool
2024gate_pure_const (void)
2025{
2026 return flag_ipa_pure_const || in_lto_p;
2027}
2028
2029pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2030 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2031 pure_const_generate_summary, /* generate_summary */
2032 pure_const_write_summary, /* write_summary */
2033 pure_const_read_summary, /* read_summary */
2034 NULL, /* write_optimization_summary */
2035 NULL, /* read_optimization_summary */
2036 NULL, /* stmt_fixup */
2037 0, /* function_transform_todo_flags_start */
2038 NULL, /* function_transform */
2039 NULL), /* variable_transform */
2040 init_p(false),
2041 function_insertion_hook_holder(NULL),
2042 node_duplication_hook_holder(NULL),
2043 node_removal_hook_holder(NULL)
2044{
2045}
2046
2047ipa_opt_pass_d *
2048make_pass_ipa_pure_const (gcc::context *ctxt)
2049{
2050 return new pass_ipa_pure_const (ctxt);
2051}
2052
2053/* Return true if function should be skipped for local pure const analysis. */
2054
2055static bool
2056skip_function_for_local_pure_const (struct cgraph_node *node)
2057{
2058 /* Because we do not schedule pass_fixup_cfg over whole program after early
2059 optimizations we must not promote functions that are called by already
2060 processed functions. */
2061
2062 if (function_called_by_processed_nodes_p ())
2063 {
2064 if (dump_file)
2065 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2066 return true;
2067 }
2068 /* Save some work and do not analyze functions which are interposable and
2069 do not have any non-interposable aliases. */
2070 if (node->get_availability () <= AVAIL_INTERPOSABLE
2071 && !node->has_aliases_p ())
2072 {
2073 if (dump_file)
2074 fprintf (dump_file,
2075 "Function is interposable; not analyzing.\n");
2076 return true;
2077 }
2078 return false;
2079}
2080
2081/* Simple local pass for pure const discovery reusing the analysis from
2082 ipa_pure_const. This pass is effective when executed together with
2083 other optimization passes in early optimization pass queue. */
2084
2085namespace {
2086
2087const pass_data pass_data_local_pure_const =
2088{
2089 GIMPLE_PASS, /* type */
2090 "local-pure-const", /* name */
2091 OPTGROUP_NONE, /* optinfo_flags */
2092 TV_IPA_PURE_CONST, /* tv_id */
2093 0, /* properties_required */
2094 0, /* properties_provided */
2095 0, /* properties_destroyed */
2096 0, /* todo_flags_start */
2097 0, /* todo_flags_finish */
2098};
2099
2100class pass_local_pure_const : public gimple_opt_pass
2101{
2102public:
2103 pass_local_pure_const (gcc::context *ctxt)
2104 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2105 {}
2106
2107 /* opt_pass methods: */
2108 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2109 virtual bool gate (function *) { return gate_pure_const (); }
2110 virtual unsigned int execute (function *);
2111
2112}; // class pass_local_pure_const
2113
2114unsigned int
2115pass_local_pure_const::execute (function *fun)
2116{
2117 bool changed = false;
2118 funct_state l;
2119 bool skip;
2120 struct cgraph_node *node;
2121
2122 node = cgraph_node::get (current_function_decl);
2123 skip = skip_function_for_local_pure_const (node);
2124
2125 if (!warn_suggest_attribute_const
2126 && !warn_suggest_attribute_pure
2127 && skip)
2128 return 0;
2129
2130 l = analyze_function (node, false);
2131
2132 /* Do NORETURN discovery. */
2133 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2134 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2135 {
2136 warn_function_noreturn (fun->decl);
2137 if (dump_file)
2138 fprintf (dump_file, "Function found to be noreturn: %s\n",
2139 current_function_name ());
2140
2141 /* Update declaration and reduce profile to executed once. */
2142 TREE_THIS_VOLATILE (current_function_decl) = 1;
2143 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2144 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2145
2146 changed = true;
2147 }
2148
2149 switch (l->pure_const_state)
2150 {
2151 case IPA_CONST:
2152 if (!TREE_READONLY (current_function_decl))
2153 {
2154 warn_function_const (current_function_decl, !l->looping);
2155 if (dump_file)
2156 fprintf (dump_file, "Function found to be %sconst: %s\n",
2157 l->looping ? "looping " : "",
2158 current_function_name ());
2159 }
2160 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2161 && !l->looping)
2162 {
2163 if (dump_file)
2164 fprintf (dump_file, "Function found to be non-looping: %s\n",
2165 current_function_name ());
2166 }
2167 if (!skip && node->set_const_flag (true, l->looping))
2168 {
2169 if (dump_file)
2170 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2171 l->looping ? "looping " : "",
2172 current_function_name ());
2173 changed = true;
2174 }
2175 break;
2176
2177 case IPA_PURE:
2178 if (!DECL_PURE_P (current_function_decl))
2179 {
2180 warn_function_pure (current_function_decl, !l->looping);
2181 if (dump_file)
2182 fprintf (dump_file, "Function found to be %spure: %s\n",
2183 l->looping ? "looping " : "",
2184 current_function_name ());
2185 }
2186 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2187 && !l->looping)
2188 {
2189 if (dump_file)
2190 fprintf (dump_file, "Function found to be non-looping: %s\n",
2191 current_function_name ());
2192 }
2193 if (!skip && node->set_pure_flag (true, l->looping))
2194 {
2195 if (dump_file)
2196 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2197 l->looping ? "looping " : "",
2198 current_function_name ());
2199 changed = true;
2200 }
2201 break;
2202
2203 default:
2204 break;
2205 }
2206 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2207 {
2208 node->set_nothrow_flag (true);
2209 changed = true;
2210 if (dump_file)
2211 fprintf (dump_file, "Function found to be nothrow: %s\n",
2212 current_function_name ());
2213 }
2214
2215 if (l->malloc_state == STATE_MALLOC
2216 && !DECL_IS_MALLOC (current_function_decl))
2217 {
2218 node->set_malloc_flag (true);
2219 if (warn_suggest_attribute_malloc)
2220 warn_function_malloc (node->decl);
2221 changed = true;
2222 if (dump_file)
2223 fprintf (dump_file, "Function found to be malloc: %s\n",
2224 node->name ());
2225 }
2226
2227 free (l);
2228 if (changed)
2229 return execute_fixup_cfg ();
2230 else
2231 return 0;
2232}
2233
2234} // anon namespace
2235
2236gimple_opt_pass *
2237make_pass_local_pure_const (gcc::context *ctxt)
2238{
2239 return new pass_local_pure_const (ctxt);
2240}
2241
2242/* Emit noreturn warnings. */
2243
2244namespace {
2245
2246const pass_data pass_data_warn_function_noreturn =
2247{
2248 GIMPLE_PASS, /* type */
2249 "*warn_function_noreturn", /* name */
2250 OPTGROUP_NONE, /* optinfo_flags */
2251 TV_NONE, /* tv_id */
2252 PROP_cfg, /* properties_required */
2253 0, /* properties_provided */
2254 0, /* properties_destroyed */
2255 0, /* todo_flags_start */
2256 0, /* todo_flags_finish */
2257};
2258
2259class pass_warn_function_noreturn : public gimple_opt_pass
2260{
2261public:
2262 pass_warn_function_noreturn (gcc::context *ctxt)
2263 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2264 {}
2265
2266 /* opt_pass methods: */
2267 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
2268 virtual unsigned int execute (function *fun)
2269 {
2270 if (!TREE_THIS_VOLATILE (current_function_decl)
2271 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2272 warn_function_noreturn (current_function_decl);
2273 return 0;
2274 }
2275
2276}; // class pass_warn_function_noreturn
2277
2278} // anon namespace
2279
2280gimple_opt_pass *
2281make_pass_warn_function_noreturn (gcc::context *ctxt)
2282{
2283 return new pass_warn_function_noreturn (ctxt);
2284}
2285
2286/* Simple local pass for pure const discovery reusing the analysis from
2287 ipa_pure_const. This pass is effective when executed together with
2288 other optimization passes in early optimization pass queue. */
2289
2290namespace {
2291
2292const pass_data pass_data_nothrow =
2293{
2294 GIMPLE_PASS, /* type */
2295 "nothrow", /* name */
2296 OPTGROUP_NONE, /* optinfo_flags */
2297 TV_IPA_PURE_CONST, /* tv_id */
2298 0, /* properties_required */
2299 0, /* properties_provided */
2300 0, /* properties_destroyed */
2301 0, /* todo_flags_start */
2302 0, /* todo_flags_finish */
2303};
2304
2305class pass_nothrow : public gimple_opt_pass
2306{
2307public:
2308 pass_nothrow (gcc::context *ctxt)
2309 : gimple_opt_pass (pass_data_nothrow, ctxt)
2310 {}
2311
2312 /* opt_pass methods: */
2313 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2314 virtual bool gate (function *) { return optimize; }
2315 virtual unsigned int execute (function *);
2316
2317}; // class pass_nothrow
2318
2319unsigned int
2320pass_nothrow::execute (function *)
2321{
2322 struct cgraph_node *node;
2323 basic_block this_block;
2324
2325 if (TREE_NOTHROW (current_function_decl))
2326 return 0;
2327
2328 node = cgraph_node::get (current_function_decl);
2329
2330 /* We run during lowering, we can not really use availability yet. */
2331 if (cgraph_node::get (current_function_decl)->get_availability ()
2332 <= AVAIL_INTERPOSABLE)
2333 {
2334 if (dump_file)
2335 fprintf (dump_file, "Function is interposable;"
2336 " not analyzing.\n");
2337 return true;
2338 }
2339
2340 FOR_EACH_BB_FN (this_block, cfun)
2341 {
2342 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2343 !gsi_end_p (gsi);
2344 gsi_next (&gsi))
2345 if (stmt_can_throw_external (gsi_stmt (gsi)))
2346 {
2347 if (is_gimple_call (gsi_stmt (gsi)))
2348 {
2349 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2350 if (callee_t && recursive_call_p (current_function_decl,
2351 callee_t))
2352 continue;
2353 }
2354
2355 if (dump_file)
2356 {
2357 fprintf (dump_file, "Statement can throw: ");
2358 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2359 }
2360 return 0;
2361 }
2362 }
2363
2364 node->set_nothrow_flag (true);
2365
2366 bool cfg_changed = false;
2367 if (self_recursive_p (node))
2368 FOR_EACH_BB_FN (this_block, cfun)
2369 if (gimple *g = last_stmt (this_block))
2370 if (is_gimple_call (g))
2371 {
2372 tree callee_t = gimple_call_fndecl (g);
2373 if (callee_t
2374 && recursive_call_p (current_function_decl, callee_t)
2375 && maybe_clean_eh_stmt (g)
2376 && gimple_purge_dead_eh_edges (this_block))
2377 cfg_changed = true;
2378 }
2379
2380 if (dump_file)
2381 fprintf (dump_file, "Function found to be nothrow: %s\n",
2382 current_function_name ());
2383 return cfg_changed ? TODO_cleanup_cfg : 0;
2384}
2385
2386} // anon namespace
2387
2388gimple_opt_pass *
2389make_pass_nothrow (gcc::context *ctxt)
2390{
2391 return new pass_nothrow (ctxt);
2392}
2393