1/* Interprocedural analyses.
2 Copyright (C) 2005-2024 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "rtl.h"
25#include "tree.h"
26#include "gimple.h"
27#include "alloc-pool.h"
28#include "tree-pass.h"
29#include "ssa.h"
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
33#include "fold-const.h"
34#include "gimple-iterator.h"
35#include "gimple-fold.h"
36#include "tree-eh.h"
37#include "calls.h"
38#include "stor-layout.h"
39#include "print-tree.h"
40#include "gimplify.h"
41#include "gimplify-me.h"
42#include "gimple-walk.h"
43#include "symbol-summary.h"
44#include "sreal.h"
45#include "ipa-cp.h"
46#include "ipa-prop.h"
47#include "tree-cfg.h"
48#include "tree-dfa.h"
49#include "tree-inline.h"
50#include "ipa-fnsummary.h"
51#include "gimple-pretty-print.h"
52#include "ipa-utils.h"
53#include "dbgcnt.h"
54#include "domwalk.h"
55#include "builtins.h"
56#include "tree-cfgcleanup.h"
57#include "options.h"
58#include "symtab-clones.h"
59#include "attr-fnspec.h"
60#include "gimple-range.h"
61#include "value-range-storage.h"
62
63/* Function summary where the parameter infos are actually stored. */
64ipa_node_params_t *ipa_node_params_sum = NULL;
65
66function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
67
68/* Edge summary for IPA-CP edge information. */
69ipa_edge_args_sum_t *ipa_edge_args_sum;
70
71/* Traits for a hash table for reusing ranges. */
72
73struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <ipa_vr *>
74{
75 typedef ipa_vr *value_type;
76 typedef const vrange *compare_type;
77 static hashval_t
78 hash (const ipa_vr *p)
79 {
80 // This never get called, except in the verification code, as
81 // ipa_get_value_range() calculates the hash itself. This
82 // function is mostly here for completness' sake.
83 Value_Range vr;
84 p->get_vrange (vr);
85 inchash::hash hstate;
86 add_vrange (vr, hstate);
87 return hstate.end ();
88 }
89 static bool
90 equal (const ipa_vr *a, const vrange *b)
91 {
92 return a->equal_p (*b);
93 }
94 static const bool empty_zero_p = true;
95 static void
96 mark_empty (ipa_vr *&p)
97 {
98 p = NULL;
99 }
100 static bool
101 is_empty (const ipa_vr *p)
102 {
103 return p == NULL;
104 }
105 static bool
106 is_deleted (const ipa_vr *p)
107 {
108 return p == reinterpret_cast<const ipa_vr *> (1);
109 }
110 static void
111 mark_deleted (ipa_vr *&p)
112 {
113 p = reinterpret_cast<ipa_vr *> (1);
114 }
115};
116
117/* Hash table for avoid repeated allocations of equal ranges. */
118static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
119
120/* Holders of ipa cgraph hooks: */
121static struct cgraph_node_hook_list *function_insertion_hook_holder;
122
123/* Description of a reference to an IPA constant. */
124struct ipa_cst_ref_desc
125{
126 /* Edge that corresponds to the statement which took the reference. */
127 struct cgraph_edge *cs;
128 /* Linked list of duplicates created when call graph edges are cloned. */
129 struct ipa_cst_ref_desc *next_duplicate;
130 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
131 if out of control. */
132 int refcount;
133};
134
135/* Allocation pool for reference descriptions. */
136
137static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
138 ("IPA-PROP ref descriptions");
139
140ipa_vr::ipa_vr ()
141 : m_storage (NULL),
142 m_type (NULL)
143{
144}
145
146ipa_vr::ipa_vr (const vrange &r)
147 : m_storage (ggc_alloc_vrange_storage (r)),
148 m_type (r.type ())
149{
150}
151
152bool
153ipa_vr::equal_p (const vrange &r) const
154{
155 gcc_checking_assert (!r.undefined_p ());
156 return (types_compatible_p (type1: m_type, type2: r.type ()) && m_storage->equal_p (r));
157}
158
159bool
160ipa_vr::equal_p (const ipa_vr &o) const
161{
162 if (!known_p ())
163 return !o.known_p ();
164
165 if (!types_compatible_p (type1: m_type, type2: o.m_type))
166 return false;
167
168 Value_Range r;
169 o.get_vrange (r);
170 return m_storage->equal_p (r);
171}
172
173void
174ipa_vr::get_vrange (Value_Range &r) const
175{
176 r.set_type (m_type);
177 m_storage->get_vrange (r, type: m_type);
178}
179
180void
181ipa_vr::set_unknown ()
182{
183 if (m_storage)
184 ggc_free (m_storage);
185
186 m_storage = NULL;
187}
188
189void
190ipa_vr::streamer_read (lto_input_block *ib, data_in *data_in)
191{
192 struct bitpack_d bp = streamer_read_bitpack (ib);
193 bool known = bp_unpack_value (bp: &bp, nbits: 1);
194 if (known)
195 {
196 Value_Range vr;
197 streamer_read_value_range (ib, data_in, vr);
198 if (!m_storage || !m_storage->fits_p (r: vr))
199 {
200 if (m_storage)
201 ggc_free (m_storage);
202 m_storage = ggc_alloc_vrange_storage (vr);
203 }
204 m_storage->set_vrange (vr);
205 m_type = vr.type ();
206 }
207 else
208 {
209 m_storage = NULL;
210 m_type = NULL;
211 }
212}
213
214void
215ipa_vr::streamer_write (output_block *ob) const
216{
217 struct bitpack_d bp = bitpack_create (s: ob->main_stream);
218 bp_pack_value (bp: &bp, val: !!m_storage, nbits: 1);
219 streamer_write_bitpack (bp: &bp);
220 if (m_storage)
221 {
222 Value_Range vr (m_type);
223 m_storage->get_vrange (r&: vr, type: m_type);
224 streamer_write_vrange (ob, vr);
225 }
226}
227
228void
229ipa_vr::dump (FILE *out) const
230{
231 if (known_p ())
232 {
233 Value_Range vr (m_type);
234 m_storage->get_vrange (r&: vr, type: m_type);
235 vr.dump (out);
236 }
237 else
238 fprintf (stream: out, format: "NO RANGE");
239}
240
241// These stubs are because we use an ipa_vr in a hash_traits and
242// hash-traits.h defines an extern of gt_ggc_mx (T &) instead of
243// picking up the gt_ggc_mx (T *) version.
244void
245gt_pch_nx (ipa_vr *&x)
246{
247 return gt_pch_nx ((ipa_vr *) x);
248}
249
250void
251gt_ggc_mx (ipa_vr *&x)
252{
253 return gt_ggc_mx ((ipa_vr *) x);
254}
255
256/* Analysis summery of function call return value. */
257struct GTY(()) ipa_return_value_summary
258{
259 /* Known value range.
260 This needs to be wrapped in struccture due to specific way
261 we allocate ipa_vr. */
262 ipa_vr *vr;
263};
264
265/* Function summary for return values. */
266class ipa_return_value_sum_t : public function_summary <ipa_return_value_summary *>
267{
268public:
269 ipa_return_value_sum_t (symbol_table *table, bool ggc):
270 function_summary <ipa_return_value_summary *> (table, ggc) { }
271
272 /* Hook that is called by summary when a node is duplicated. */
273 void duplicate (cgraph_node *,
274 cgraph_node *,
275 ipa_return_value_summary *data,
276 ipa_return_value_summary *data2) final override
277 {
278 *data2=*data;
279 }
280};
281
282/* Variable hoding the return value summary. */
283static GTY(()) function_summary <ipa_return_value_summary *> *ipa_return_value_sum;
284
285
286/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
287 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
288
289static bool
290ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
291{
292 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
293
294 if (!fs_opts)
295 return false;
296 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
297}
298
299/* Return index of the formal whose tree is PTREE in function which corresponds
300 to INFO. */
301
302static int
303ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
304 tree ptree)
305{
306 int i, count;
307
308 count = vec_safe_length (v: descriptors);
309 for (i = 0; i < count; i++)
310 if ((*descriptors)[i].decl_or_type == ptree)
311 return i;
312
313 return -1;
314}
315
316/* Return index of the formal whose tree is PTREE in function which corresponds
317 to INFO. */
318
319int
320ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
321{
322 return ipa_get_param_decl_index_1 (descriptors: info->descriptors, ptree);
323}
324
325/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
326 NODE. */
327
328static void
329ipa_populate_param_decls (struct cgraph_node *node,
330 vec<ipa_param_descriptor, va_gc> &descriptors)
331{
332 tree fndecl;
333 tree fnargs;
334 tree parm;
335 int param_num;
336
337 fndecl = node->decl;
338 gcc_assert (gimple_has_body_p (fndecl));
339 fnargs = DECL_ARGUMENTS (fndecl);
340 param_num = 0;
341 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
342 {
343 descriptors[param_num].decl_or_type = parm;
344 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
345 descriptors[param_num].move_cost = cost;
346 /* Watch overflow, move_cost is a bitfield. */
347 gcc_checking_assert (cost == descriptors[param_num].move_cost);
348 param_num++;
349 }
350}
351
352/* Return how many formal parameters FNDECL has. */
353
354int
355count_formal_params (tree fndecl)
356{
357 tree parm;
358 int count = 0;
359 gcc_assert (gimple_has_body_p (fndecl));
360
361 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
362 count++;
363
364 return count;
365}
366
367/* Return the declaration of Ith formal parameter of the function corresponding
368 to INFO. Note there is no setter function as this array is built just once
369 using ipa_initialize_node_params. */
370
371void
372ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
373{
374 fprintf (stream: file, format: "param #%i", i);
375 if ((*info->descriptors)[i].decl_or_type)
376 {
377 fprintf (stream: file, format: " ");
378 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
379 }
380}
381
382/* If necessary, allocate vector of parameter descriptors in info of NODE.
383 Return true if they were allocated, false if not. */
384
385static bool
386ipa_alloc_node_params (struct cgraph_node *node, int param_count)
387{
388 ipa_node_params *info = ipa_node_params_sum->get_create (node);
389
390 if (!info->descriptors && param_count)
391 {
392 vec_safe_grow_cleared (v&: info->descriptors, len: param_count, exact: true);
393 return true;
394 }
395 else
396 return false;
397}
398
399/* Initialize the ipa_node_params structure associated with NODE by counting
400 the function parameters, creating the descriptors and populating their
401 param_decls. */
402
403void
404ipa_initialize_node_params (struct cgraph_node *node)
405{
406 ipa_node_params *info = ipa_node_params_sum->get_create (node);
407
408 if (!info->descriptors
409 && ipa_alloc_node_params (node, param_count: count_formal_params (fndecl: node->decl)))
410 ipa_populate_param_decls (node, descriptors&: *info->descriptors);
411}
412
413/* Print VAL which is extracted from a jump function to F. */
414
415static void
416ipa_print_constant_value (FILE *f, tree val)
417{
418 print_generic_expr (f, val);
419
420 /* This is in keeping with values_equal_for_ipcp_p. */
421 if (TREE_CODE (val) == ADDR_EXPR
422 && (TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL
423 || (TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
424 && DECL_IN_CONSTANT_POOL (TREE_OPERAND (val, 0)))))
425 {
426 fputs (s: " -> ", stream: f);
427 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
428 }
429}
430
431/* Print the jump functions associated with call graph edge CS to file F. */
432
433static void
434ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
435{
436 ipa_edge_args *args = ipa_edge_args_sum->get (edge: cs);
437 int count = ipa_get_cs_argument_count (args);
438
439 for (int i = 0; i < count; i++)
440 {
441 struct ipa_jump_func *jump_func;
442 enum jump_func_type type;
443
444 jump_func = ipa_get_ith_jump_func (args, i);
445 type = jump_func->type;
446
447 fprintf (stream: f, format: " param %d: ", i);
448 if (type == IPA_JF_UNKNOWN)
449 fprintf (stream: f, format: "UNKNOWN\n");
450 else if (type == IPA_JF_CONST)
451 {
452 fprintf (stream: f, format: "CONST: ");
453 ipa_print_constant_value (f, val: jump_func->value.constant.value);
454 fprintf (stream: f, format: "\n");
455 }
456 else if (type == IPA_JF_PASS_THROUGH)
457 {
458 fprintf (stream: f, format: "PASS THROUGH: ");
459 fprintf (stream: f, format: "%d, op %s",
460 jump_func->value.pass_through.formal_id,
461 get_tree_code_name(jump_func->value.pass_through.operation));
462 if (jump_func->value.pass_through.operation != NOP_EXPR)
463 {
464 fprintf (stream: f, format: " ");
465 print_generic_expr (f, jump_func->value.pass_through.operand);
466 }
467 if (jump_func->value.pass_through.agg_preserved)
468 fprintf (stream: f, format: ", agg_preserved");
469 if (jump_func->value.pass_through.refdesc_decremented)
470 fprintf (stream: f, format: ", refdesc_decremented");
471 fprintf (stream: f, format: "\n");
472 }
473 else if (type == IPA_JF_ANCESTOR)
474 {
475 fprintf (stream: f, format: "ANCESTOR: ");
476 fprintf (stream: f, format: "%d, offset " HOST_WIDE_INT_PRINT_DEC,
477 jump_func->value.ancestor.formal_id,
478 jump_func->value.ancestor.offset);
479 if (jump_func->value.ancestor.agg_preserved)
480 fprintf (stream: f, format: ", agg_preserved");
481 if (jump_func->value.ancestor.keep_null)
482 fprintf (stream: f, format: ", keep_null");
483 fprintf (stream: f, format: "\n");
484 }
485
486 if (jump_func->agg.items)
487 {
488 struct ipa_agg_jf_item *item;
489 int j;
490
491 fprintf (stream: f, format: " Aggregate passed by %s:\n",
492 jump_func->agg.by_ref ? "reference" : "value");
493 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
494 {
495 fprintf (stream: f, format: " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
496 item->offset);
497 fprintf (stream: f, format: "type: ");
498 print_generic_expr (f, item->type);
499 fprintf (stream: f, format: ", ");
500 if (item->jftype == IPA_JF_PASS_THROUGH)
501 fprintf (stream: f, format: "PASS THROUGH: %d,",
502 item->value.pass_through.formal_id);
503 else if (item->jftype == IPA_JF_LOAD_AGG)
504 {
505 fprintf (stream: f, format: "LOAD AGG: %d",
506 item->value.pass_through.formal_id);
507 fprintf (stream: f, format: " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
508 item->value.load_agg.offset,
509 item->value.load_agg.by_ref ? "reference"
510 : "value");
511 }
512
513 if (item->jftype == IPA_JF_PASS_THROUGH
514 || item->jftype == IPA_JF_LOAD_AGG)
515 {
516 fprintf (stream: f, format: " op %s",
517 get_tree_code_name (item->value.pass_through.operation));
518 if (item->value.pass_through.operation != NOP_EXPR)
519 {
520 fprintf (stream: f, format: " ");
521 print_generic_expr (f, item->value.pass_through.operand);
522 }
523 }
524 else if (item->jftype == IPA_JF_CONST)
525 {
526 fprintf (stream: f, format: "CONST: ");
527 ipa_print_constant_value (f, val: item->value.constant);
528 }
529 else if (item->jftype == IPA_JF_UNKNOWN)
530 fprintf (stream: f, format: "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
531 tree_to_uhwi (TYPE_SIZE (item->type)));
532 fprintf (stream: f, format: "\n");
533 }
534 }
535
536 class ipa_polymorphic_call_context *ctx
537 = ipa_get_ith_polymorhic_call_context (args, i);
538 if (ctx && !ctx->useless_p ())
539 {
540 fprintf (stream: f, format: " Context: ");
541 ctx->dump (f: dump_file);
542 }
543
544 if (jump_func->m_vr)
545 {
546 jump_func->m_vr->dump (out: f);
547 fprintf (stream: f, format: "\n");
548 }
549 else
550 fprintf (stream: f, format: " Unknown VR\n");
551 }
552}
553
554
555/* Print the jump functions of all arguments on all call graph edges going from
556 NODE to file F. */
557
558void
559ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
560{
561 struct cgraph_edge *cs;
562
563 fprintf (stream: f, format: " Jump functions of caller %s:\n", node->dump_name ());
564 for (cs = node->callees; cs; cs = cs->next_callee)
565 {
566
567 fprintf (stream: f, format: " callsite %s -> %s : \n",
568 node->dump_name (),
569 cs->callee->dump_name ());
570 if (!ipa_edge_args_info_available_for_edge_p (edge: cs))
571 fprintf (stream: f, format: " no arg info\n");
572 else
573 ipa_print_node_jump_functions_for_edge (f, cs);
574 }
575
576 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
577 {
578 class cgraph_indirect_call_info *ii;
579
580 ii = cs->indirect_info;
581 if (ii->agg_contents)
582 fprintf (stream: f, format: " indirect %s callsite, calling param %i, "
583 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
584 ii->member_ptr ? "member ptr" : "aggregate",
585 ii->param_index, ii->offset,
586 ii->by_ref ? "by reference" : "by_value");
587 else
588 fprintf (stream: f, format: " indirect %s callsite, calling param %i, "
589 "offset " HOST_WIDE_INT_PRINT_DEC,
590 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
591 ii->offset);
592
593 if (cs->call_stmt)
594 {
595 fprintf (stream: f, format: ", for stmt ");
596 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
597 }
598 else
599 fprintf (stream: f, format: "\n");
600 if (ii->polymorphic)
601 ii->context.dump (f);
602 if (!ipa_edge_args_info_available_for_edge_p (edge: cs))
603 fprintf (stream: f, format: " no arg info\n");
604 else
605 ipa_print_node_jump_functions_for_edge (f, cs);
606 }
607}
608
609/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
610
611void
612ipa_print_all_jump_functions (FILE *f)
613{
614 struct cgraph_node *node;
615
616 fprintf (stream: f, format: "\nJump functions:\n");
617 FOR_EACH_FUNCTION (node)
618 {
619 ipa_print_node_jump_functions (f, node);
620 }
621}
622
623/* Set jfunc to be a know-really nothing jump function. */
624
625static void
626ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
627{
628 jfunc->type = IPA_JF_UNKNOWN;
629}
630
631/* Set JFUNC to be a copy of another jmp (to be used by jump function
632 combination code). The two functions will share their rdesc. */
633
634static void
635ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
636 struct ipa_jump_func *src)
637
638{
639 gcc_checking_assert (src->type == IPA_JF_CONST);
640 dst->type = IPA_JF_CONST;
641 dst->value.constant = src->value.constant;
642}
643
644/* Set JFUNC to be a constant jmp function. */
645
646static void
647ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
648 struct cgraph_edge *cs)
649{
650 jfunc->type = IPA_JF_CONST;
651 jfunc->value.constant.value = unshare_expr_without_location (constant);
652
653 if (TREE_CODE (constant) == ADDR_EXPR
654 && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
655 || (VAR_P (TREE_OPERAND (constant, 0))
656 && TREE_STATIC (TREE_OPERAND (constant, 0)))))
657 {
658 struct ipa_cst_ref_desc *rdesc;
659
660 rdesc = ipa_refdesc_pool.allocate ();
661 rdesc->cs = cs;
662 rdesc->next_duplicate = NULL;
663 rdesc->refcount = 1;
664 jfunc->value.constant.rdesc = rdesc;
665 }
666 else
667 jfunc->value.constant.rdesc = NULL;
668}
669
670/* Set JFUNC to be a simple pass-through jump function. */
671static void
672ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
673 bool agg_preserved)
674{
675 jfunc->type = IPA_JF_PASS_THROUGH;
676 jfunc->value.pass_through.operand = NULL_TREE;
677 jfunc->value.pass_through.formal_id = formal_id;
678 jfunc->value.pass_through.operation = NOP_EXPR;
679 jfunc->value.pass_through.agg_preserved = agg_preserved;
680 jfunc->value.pass_through.refdesc_decremented = false;
681}
682
683/* Set JFUNC to be an unary pass through jump function. */
684
685static void
686ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
687 enum tree_code operation)
688{
689 jfunc->type = IPA_JF_PASS_THROUGH;
690 jfunc->value.pass_through.operand = NULL_TREE;
691 jfunc->value.pass_through.formal_id = formal_id;
692 jfunc->value.pass_through.operation = operation;
693 jfunc->value.pass_through.agg_preserved = false;
694 jfunc->value.pass_through.refdesc_decremented = false;
695}
696/* Set JFUNC to be an arithmetic pass through jump function. */
697
698static void
699ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
700 tree operand, enum tree_code operation)
701{
702 jfunc->type = IPA_JF_PASS_THROUGH;
703 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
704 jfunc->value.pass_through.formal_id = formal_id;
705 jfunc->value.pass_through.operation = operation;
706 jfunc->value.pass_through.agg_preserved = false;
707 jfunc->value.pass_through.refdesc_decremented = false;
708}
709
710/* Set JFUNC to be an ancestor jump function. */
711
712static void
713ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
714 int formal_id, bool agg_preserved, bool keep_null)
715{
716 jfunc->type = IPA_JF_ANCESTOR;
717 jfunc->value.ancestor.formal_id = formal_id;
718 jfunc->value.ancestor.offset = offset;
719 jfunc->value.ancestor.agg_preserved = agg_preserved;
720 jfunc->value.ancestor.keep_null = keep_null;
721}
722
723/* Get IPA BB information about the given BB. FBI is the context of analyzis
724 of this function body. */
725
726static struct ipa_bb_info *
727ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
728{
729 gcc_checking_assert (fbi);
730 return &fbi->bb_infos[bb->index];
731}
732
733/* Structure to be passed in between detect_type_change and
734 check_stmt_for_type_change. */
735
736struct prop_type_change_info
737{
738 /* Offset into the object where there is the virtual method pointer we are
739 looking for. */
740 HOST_WIDE_INT offset;
741 /* The declaration or SSA_NAME pointer of the base that we are checking for
742 type change. */
743 tree object;
744 /* Set to true if dynamic type change has been detected. */
745 bool type_maybe_changed;
746};
747
748/* Return true if STMT can modify a virtual method table pointer.
749
750 This function makes special assumptions about both constructors and
751 destructors which are all the functions that are allowed to alter the VMT
752 pointers. It assumes that destructors begin with assignment into all VMT
753 pointers and that constructors essentially look in the following way:
754
755 1) The very first thing they do is that they call constructors of ancestor
756 sub-objects that have them.
757
758 2) Then VMT pointers of this and all its ancestors is set to new values
759 corresponding to the type corresponding to the constructor.
760
761 3) Only afterwards, other stuff such as constructor of member sub-objects
762 and the code written by the user is run. Only this may include calling
763 virtual functions, directly or indirectly.
764
765 There is no way to call a constructor of an ancestor sub-object in any
766 other way.
767
768 This means that we do not have to care whether constructors get the correct
769 type information because they will always change it (in fact, if we define
770 the type to be given by the VMT pointer, it is undefined).
771
772 The most important fact to derive from the above is that if, for some
773 statement in the section 3, we try to detect whether the dynamic type has
774 changed, we can safely ignore all calls as we examine the function body
775 backwards until we reach statements in section 2 because these calls cannot
776 be ancestor constructors or destructors (if the input is not bogus) and so
777 do not change the dynamic type (this holds true only for automatically
778 allocated objects but at the moment we devirtualize only these). We then
779 must detect that statements in section 2 change the dynamic type and can try
780 to derive the new type. That is enough and we can stop, we will never see
781 the calls into constructors of sub-objects in this code. Therefore we can
782 safely ignore all call statements that we traverse.
783 */
784
785static bool
786stmt_may_be_vtbl_ptr_store (gimple *stmt)
787{
788 if (is_gimple_call (gs: stmt))
789 return false;
790 if (gimple_clobber_p (s: stmt))
791 return false;
792 else if (is_gimple_assign (gs: stmt))
793 {
794 tree lhs = gimple_assign_lhs (gs: stmt);
795
796 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
797 {
798 if (flag_strict_aliasing
799 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
800 return false;
801
802 if (TREE_CODE (lhs) == COMPONENT_REF
803 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
804 return false;
805 /* In the future we might want to use get_ref_base_and_extent to find
806 if there is a field corresponding to the offset and if so, proceed
807 almost like if it was a component ref. */
808 }
809 }
810 return true;
811}
812
813/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
814 to check whether a particular statement may modify the virtual table
815 pointerIt stores its result into DATA, which points to a
816 prop_type_change_info structure. */
817
818static bool
819check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
820{
821 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
822 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
823
824 if (stmt_may_be_vtbl_ptr_store (stmt))
825 {
826 tci->type_maybe_changed = true;
827 return true;
828 }
829 else
830 return false;
831}
832
833/* See if ARG is PARAM_DECl describing instance passed by pointer
834 or reference in FUNCTION. Return false if the dynamic type may change
835 in between beggining of the function until CALL is invoked.
836
837 Generally functions are not allowed to change type of such instances,
838 but they call destructors. We assume that methods cannot destroy the THIS
839 pointer. Also as a special cases, constructor and destructors may change
840 type of the THIS pointer. */
841
842static bool
843param_type_may_change_p (tree function, tree arg, gimple *call)
844{
845 /* Pure functions cannot do any changes on the dynamic type;
846 that require writting to memory. */
847 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
848 return false;
849 /* We need to check if we are within inlined consturctor
850 or destructor (ideally we would have way to check that the
851 inline cdtor is actually working on ARG, but we don't have
852 easy tie on this, so punt on all non-pure cdtors.
853 We may also record the types of cdtors and once we know type
854 of the instance match them.
855
856 Also code unification optimizations may merge calls from
857 different blocks making return values unreliable. So
858 do nothing during late optimization. */
859 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
860 return true;
861 if (TREE_CODE (arg) == SSA_NAME
862 && SSA_NAME_IS_DEFAULT_DEF (arg)
863 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
864 {
865 /* Normal (non-THIS) argument. */
866 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
867 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
868 /* THIS pointer of an method - here we want to watch constructors
869 and destructors as those definitely may change the dynamic
870 type. */
871 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
872 && !DECL_CXX_CONSTRUCTOR_P (function)
873 && !DECL_CXX_DESTRUCTOR_P (function)
874 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
875 {
876 /* Walk the inline stack and watch out for ctors/dtors. */
877 for (tree block = gimple_block (g: call); block && TREE_CODE (block) == BLOCK;
878 block = BLOCK_SUPERCONTEXT (block))
879 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
880 return true;
881 return false;
882 }
883 }
884 return true;
885}
886
887/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
888 callsite CALL) by looking for assignments to its virtual table pointer. If
889 it is, return true. ARG is the object itself (not a pointer
890 to it, unless dereferenced). BASE is the base of the memory access as
891 returned by get_ref_base_and_extent, as is the offset.
892
893 This is helper function for detect_type_change and detect_type_change_ssa
894 that does the heavy work which is usually unnecesary. */
895
896static bool
897detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
898 tree base, tree comp_type, gcall *call,
899 HOST_WIDE_INT offset)
900{
901 struct prop_type_change_info tci;
902 ao_ref ao;
903
904 gcc_checking_assert (DECL_P (arg)
905 || TREE_CODE (arg) == MEM_REF
906 || handled_component_p (arg));
907
908 comp_type = TYPE_MAIN_VARIANT (comp_type);
909
910 /* Const calls cannot call virtual methods through VMT and so type changes do
911 not matter. */
912 if (!flag_devirtualize || !gimple_vuse (g: call)
913 /* Be sure expected_type is polymorphic. */
914 || !comp_type
915 || TREE_CODE (comp_type) != RECORD_TYPE
916 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
917 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
918 return true;
919
920 if (fbi->aa_walk_budget == 0)
921 return false;
922
923 ao_ref_init (&ao, arg);
924 ao.base = base;
925 ao.offset = offset;
926 ao.size = POINTER_SIZE;
927 ao.max_size = ao.size;
928
929 tci.offset = offset;
930 tci.object = get_base_address (t: arg);
931 tci.type_maybe_changed = false;
932
933 int walked
934 = walk_aliased_vdefs (&ao, gimple_vuse (g: call), check_stmt_for_type_change,
935 &tci, NULL, NULL, limit: fbi->aa_walk_budget);
936 if (walked >= 0)
937 fbi->aa_walk_budget -= walked;
938 else
939 fbi->aa_walk_budget = 0;
940
941 if (walked >= 0 && !tci.type_maybe_changed)
942 return false;
943
944 return true;
945}
946
947/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
948 If it is, return true. ARG is the object itself (not a pointer
949 to it, unless dereferenced). BASE is the base of the memory access as
950 returned by get_ref_base_and_extent, as is the offset. */
951
952static bool
953detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
954 tree comp_type, gcall *call,
955 HOST_WIDE_INT offset)
956{
957 if (!flag_devirtualize)
958 return false;
959
960 if (TREE_CODE (base) == MEM_REF
961 && !param_type_may_change_p (function: current_function_decl,
962 TREE_OPERAND (base, 0),
963 call))
964 return false;
965 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
966 call, offset);
967}
968
969/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
970 SSA name (its dereference will become the base and the offset is assumed to
971 be zero). */
972
973static bool
974detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
975 gcall *call)
976{
977 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
978 if (!flag_devirtualize
979 || !POINTER_TYPE_P (TREE_TYPE (arg)))
980 return false;
981
982 if (!param_type_may_change_p (function: current_function_decl, arg, call))
983 return false;
984
985 arg = build2 (MEM_REF, ptr_type_node, arg,
986 build_int_cst (ptr_type_node, 0));
987
988 return detect_type_change_from_memory_writes (fbi, arg, base: arg, comp_type,
989 call, offset: 0);
990}
991
992/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
993 boolean variable pointed to by DATA. */
994
995static bool
996mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
997 void *data)
998{
999 bool *b = (bool *) data;
1000 *b = true;
1001 return true;
1002}
1003
1004/* Find the nearest valid aa status for parameter specified by INDEX that
1005 dominates BB. */
1006
1007static struct ipa_param_aa_status *
1008find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
1009 int index)
1010{
1011 while (true)
1012 {
1013 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1014 if (!bb)
1015 return NULL;
1016 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1017 if (!bi->param_aa_statuses.is_empty ()
1018 && bi->param_aa_statuses[index].valid)
1019 return &bi->param_aa_statuses[index];
1020 }
1021}
1022
1023/* Get AA status structure for the given BB and parameter with INDEX. Allocate
1024 structures and/or intialize the result with a dominating description as
1025 necessary. */
1026
1027static struct ipa_param_aa_status *
1028parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
1029 int index)
1030{
1031 gcc_checking_assert (fbi);
1032 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1033 if (bi->param_aa_statuses.is_empty ())
1034 bi->param_aa_statuses.safe_grow_cleared (len: fbi->param_count, exact: true);
1035 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
1036 if (!paa->valid)
1037 {
1038 gcc_checking_assert (!paa->parm_modified
1039 && !paa->ref_modified
1040 && !paa->pt_modified);
1041 struct ipa_param_aa_status *dom_paa;
1042 dom_paa = find_dominating_aa_status (fbi, bb, index);
1043 if (dom_paa)
1044 *paa = *dom_paa;
1045 else
1046 paa->valid = true;
1047 }
1048
1049 return paa;
1050}
1051
1052/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1053 a value known not to be modified in this function before reaching the
1054 statement STMT. FBI holds information about the function we have so far
1055 gathered but do not survive the summary building stage. */
1056
1057static bool
1058parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
1059 gimple *stmt, tree parm_load)
1060{
1061 struct ipa_param_aa_status *paa;
1062 bool modified = false;
1063 ao_ref refd;
1064
1065 tree base = get_base_address (t: parm_load);
1066 gcc_assert (TREE_CODE (base) == PARM_DECL);
1067 if (TREE_READONLY (base))
1068 return true;
1069
1070 gcc_checking_assert (fbi);
1071 paa = parm_bb_aa_status_for_bb (fbi, bb: gimple_bb (g: stmt), index);
1072 if (paa->parm_modified || fbi->aa_walk_budget == 0)
1073 return false;
1074
1075 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1076 ao_ref_init (&refd, parm_load);
1077 int walked = walk_aliased_vdefs (&refd, gimple_vuse (g: stmt), mark_modified,
1078 &modified, NULL, NULL,
1079 limit: fbi->aa_walk_budget);
1080 if (walked < 0)
1081 {
1082 modified = true;
1083 fbi->aa_walk_budget = 0;
1084 }
1085 else
1086 fbi->aa_walk_budget -= walked;
1087 if (paa && modified)
1088 paa->parm_modified = true;
1089 return !modified;
1090}
1091
1092/* If STMT is an assignment that loads a value from an parameter declaration,
1093 return the index of the parameter in ipa_node_params which has not been
1094 modified. Otherwise return -1. */
1095
1096static int
1097load_from_unmodified_param (struct ipa_func_body_info *fbi,
1098 vec<ipa_param_descriptor, va_gc> *descriptors,
1099 gimple *stmt)
1100{
1101 int index;
1102 tree op1;
1103
1104 if (!gimple_assign_single_p (gs: stmt))
1105 return -1;
1106
1107 op1 = gimple_assign_rhs1 (gs: stmt);
1108 if (TREE_CODE (op1) != PARM_DECL)
1109 return -1;
1110
1111 index = ipa_get_param_decl_index_1 (descriptors, ptree: op1);
1112 if (index < 0
1113 || !parm_preserved_before_stmt_p (fbi, index, stmt, parm_load: op1))
1114 return -1;
1115
1116 return index;
1117}
1118
1119/* Return true if memory reference REF (which must be a load through parameter
1120 with INDEX) loads data that are known to be unmodified in this function
1121 before reaching statement STMT. */
1122
1123static bool
1124parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1125 int index, gimple *stmt, tree ref)
1126{
1127 struct ipa_param_aa_status *paa;
1128 bool modified = false;
1129 ao_ref refd;
1130
1131 gcc_checking_assert (fbi);
1132 paa = parm_bb_aa_status_for_bb (fbi, bb: gimple_bb (g: stmt), index);
1133 if (paa->ref_modified || fbi->aa_walk_budget == 0)
1134 return false;
1135
1136 gcc_checking_assert (gimple_vuse (stmt));
1137 ao_ref_init (&refd, ref);
1138 int walked = walk_aliased_vdefs (&refd, gimple_vuse (g: stmt), mark_modified,
1139 &modified, NULL, NULL,
1140 limit: fbi->aa_walk_budget);
1141 if (walked < 0)
1142 {
1143 modified = true;
1144 fbi->aa_walk_budget = 0;
1145 }
1146 else
1147 fbi->aa_walk_budget -= walked;
1148 if (modified)
1149 paa->ref_modified = true;
1150 return !modified;
1151}
1152
1153/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1154 is known to be unmodified in this function before reaching call statement
1155 CALL into which it is passed. FBI describes the function body. */
1156
1157static bool
1158parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1159 gimple *call, tree parm)
1160{
1161 bool modified = false;
1162 ao_ref refd;
1163
1164 /* It's unnecessary to calculate anything about memory contnets for a const
1165 function because it is not goin to use it. But do not cache the result
1166 either. Also, no such calculations for non-pointers. */
1167 if (!gimple_vuse (g: call)
1168 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1169 return false;
1170
1171 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1172 bb: gimple_bb (g: call),
1173 index);
1174 if (paa->pt_modified || fbi->aa_walk_budget == 0)
1175 return false;
1176
1177 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1178 int walked = walk_aliased_vdefs (&refd, gimple_vuse (g: call), mark_modified,
1179 &modified, NULL, NULL,
1180 limit: fbi->aa_walk_budget);
1181 if (walked < 0)
1182 {
1183 fbi->aa_walk_budget = 0;
1184 modified = true;
1185 }
1186 else
1187 fbi->aa_walk_budget -= walked;
1188 if (modified)
1189 paa->pt_modified = true;
1190 return !modified;
1191}
1192
1193/* Return true if we can prove that OP is a memory reference loading
1194 data from an aggregate passed as a parameter.
1195
1196 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1197 false if it cannot prove that the value has not been modified before the
1198 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1199 if it cannot prove the value has not been modified, in that case it will
1200 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1201
1202 INFO and PARMS_AINFO describe parameters of the current function (but the
1203 latter can be NULL), STMT is the load statement. If function returns true,
1204 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1205 within the aggregate and whether it is a load from a value passed by
1206 reference respectively.
1207
1208 Return false if the offset divided by BITS_PER_UNIT would not fit into an
1209 unsigned int. */
1210
1211bool
1212ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1213 vec<ipa_param_descriptor, va_gc> *descriptors,
1214 gimple *stmt, tree op, int *index_p,
1215 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1216 bool *by_ref_p, bool *guaranteed_unmodified)
1217{
1218 int index;
1219 HOST_WIDE_INT size;
1220 bool reverse;
1221 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1222
1223 if (!base
1224 || (*offset_p / BITS_PER_UNIT) > UINT_MAX)
1225 return false;
1226
1227 /* We can not propagate across volatile loads. */
1228 if (TREE_THIS_VOLATILE (op))
1229 return false;
1230
1231 if (DECL_P (base))
1232 {
1233 int index = ipa_get_param_decl_index_1 (descriptors, ptree: base);
1234 if (index >= 0
1235 && parm_preserved_before_stmt_p (fbi, index, stmt, parm_load: op))
1236 {
1237 *index_p = index;
1238 *by_ref_p = false;
1239 if (size_p)
1240 *size_p = size;
1241 if (guaranteed_unmodified)
1242 *guaranteed_unmodified = true;
1243 return true;
1244 }
1245 return false;
1246 }
1247
1248 if (TREE_CODE (base) != MEM_REF
1249 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1250 || !integer_zerop (TREE_OPERAND (base, 1)))
1251 return false;
1252
1253 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1254 {
1255 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1256 index = ipa_get_param_decl_index_1 (descriptors, ptree: parm);
1257 }
1258 else
1259 {
1260 /* This branch catches situations where a pointer parameter is not a
1261 gimple register, for example:
1262
1263 void hip7(S*) (struct S * p)
1264 {
1265 void (*<T2e4>) (struct S *) D.1867;
1266 struct S * p.1;
1267
1268 <bb 2>:
1269 p.1_1 = p;
1270 D.1867_2 = p.1_1->f;
1271 D.1867_2 ();
1272 gdp = &p;
1273 */
1274
1275 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1276 index = load_from_unmodified_param (fbi, descriptors, stmt: def);
1277 }
1278
1279 if (index >= 0)
1280 {
1281 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, ref: op);
1282 if (!data_preserved && !guaranteed_unmodified)
1283 return false;
1284
1285 *index_p = index;
1286 *by_ref_p = true;
1287 if (size_p)
1288 *size_p = size;
1289 if (guaranteed_unmodified)
1290 *guaranteed_unmodified = data_preserved;
1291 return true;
1292 }
1293 return false;
1294}
1295
1296/* If STMT is an assignment that loads a value from a parameter declaration,
1297 or from an aggregate passed as the parameter either by value or reference,
1298 return the index of the parameter in ipa_node_params. Otherwise return -1.
1299
1300 FBI holds gathered information about the function. INFO describes
1301 parameters of the function, STMT is the assignment statement. If it is a
1302 memory load from an aggregate, *OFFSET_P is filled with offset within the
1303 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1304 reference. */
1305
1306static int
1307load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1308 class ipa_node_params *info,
1309 gimple *stmt,
1310 HOST_WIDE_INT *offset_p,
1311 bool *by_ref_p)
1312{
1313 int index = load_from_unmodified_param (fbi, descriptors: info->descriptors, stmt);
1314 poly_int64 size;
1315
1316 /* Load value from a parameter declaration. */
1317 if (index >= 0)
1318 {
1319 *offset_p = -1;
1320 return index;
1321 }
1322
1323 if (!gimple_assign_load_p (stmt))
1324 return -1;
1325
1326 tree rhs = gimple_assign_rhs1 (gs: stmt);
1327
1328 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1329 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1330 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1331 return -1;
1332
1333 /* Skip memory reference containing bit-field. */
1334 if (TREE_CODE (rhs) == BIT_FIELD_REF
1335 || contains_bitfld_component_ref_p (rhs))
1336 return -1;
1337
1338 if (!ipa_load_from_parm_agg (fbi, descriptors: info->descriptors, stmt, op: rhs, index_p: &index,
1339 offset_p, size_p: &size, by_ref_p))
1340 return -1;
1341
1342 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1343 size));
1344 if (!*by_ref_p)
1345 {
1346 tree param_type = ipa_get_type (info, i: index);
1347
1348 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1349 return -1;
1350 }
1351 else if (TREE_THIS_VOLATILE (rhs))
1352 return -1;
1353
1354 return index;
1355}
1356
1357/* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1358 to find original pointer. Initialize RET to the pointer which results from
1359 the walk.
1360 If offset is known return true and initialize OFFSET_RET. */
1361
1362bool
1363unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1364{
1365 poly_int64 offset = 0;
1366 bool offset_known = true;
1367 int i;
1368
1369 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1370 {
1371 if (TREE_CODE (op) == ADDR_EXPR)
1372 {
1373 poly_int64 extra_offset = 0;
1374 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1375 &offset);
1376 if (!base)
1377 {
1378 base = get_base_address (TREE_OPERAND (op, 0));
1379 if (TREE_CODE (base) != MEM_REF)
1380 break;
1381 offset_known = false;
1382 }
1383 else
1384 {
1385 if (TREE_CODE (base) != MEM_REF)
1386 break;
1387 offset += extra_offset;
1388 }
1389 op = TREE_OPERAND (base, 0);
1390 if (mem_ref_offset (base).to_shwi (r: &extra_offset))
1391 offset += extra_offset;
1392 else
1393 offset_known = false;
1394 }
1395 else if (TREE_CODE (op) == SSA_NAME
1396 && !SSA_NAME_IS_DEFAULT_DEF (op))
1397 {
1398 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1399
1400 if (gimple_assign_single_p (gs: pstmt))
1401 op = gimple_assign_rhs1 (gs: pstmt);
1402 else if (is_gimple_assign (gs: pstmt)
1403 && gimple_assign_rhs_code (gs: pstmt) == POINTER_PLUS_EXPR)
1404 {
1405 poly_int64 extra_offset = 0;
1406 if (ptrdiff_tree_p (gimple_assign_rhs2 (gs: pstmt),
1407 &extra_offset))
1408 offset += extra_offset;
1409 else
1410 offset_known = false;
1411 op = gimple_assign_rhs1 (gs: pstmt);
1412 }
1413 else
1414 break;
1415 }
1416 else
1417 break;
1418 }
1419 *ret = op;
1420 *offset_ret = offset;
1421 return offset_known;
1422}
1423
1424/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1425 of an assignment statement STMT, try to determine whether we are actually
1426 handling any of the following cases and construct an appropriate jump
1427 function into JFUNC if so:
1428
1429 1) The passed value is loaded from a formal parameter which is not a gimple
1430 register (most probably because it is addressable, the value has to be
1431 scalar) and we can guarantee the value has not changed. This case can
1432 therefore be described by a simple pass-through jump function. For example:
1433
1434 foo (int a)
1435 {
1436 int a.0;
1437
1438 a.0_2 = a;
1439 bar (a.0_2);
1440
1441 2) The passed value can be described by a simple arithmetic pass-through
1442 jump function. E.g.
1443
1444 foo (int a)
1445 {
1446 int D.2064;
1447
1448 D.2064_4 = a.1(D) + 4;
1449 bar (D.2064_4);
1450
1451 This case can also occur in combination of the previous one, e.g.:
1452
1453 foo (int a, int z)
1454 {
1455 int a.0;
1456 int D.2064;
1457
1458 a.0_3 = a;
1459 D.2064_4 = a.0_3 + 4;
1460 foo (D.2064_4);
1461
1462 3) The passed value is an address of an object within another one (which
1463 also passed by reference). Such situations are described by an ancestor
1464 jump function and describe situations such as:
1465
1466 B::foo() (struct B * const this)
1467 {
1468 struct A * D.1845;
1469
1470 D.1845_2 = &this_1(D)->D.1748;
1471 A::bar (D.1845_2);
1472
1473 INFO is the structure describing individual parameters access different
1474 stages of IPA optimizations. PARMS_AINFO contains the information that is
1475 only needed for intraprocedural analysis. */
1476
1477static void
1478compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1479 class ipa_node_params *info,
1480 struct ipa_jump_func *jfunc,
1481 gcall *call, gimple *stmt, tree name,
1482 tree param_type)
1483{
1484 HOST_WIDE_INT offset, size;
1485 tree op1, tc_ssa, base, ssa;
1486 bool reverse;
1487 int index;
1488
1489 op1 = gimple_assign_rhs1 (gs: stmt);
1490
1491 if (TREE_CODE (op1) == SSA_NAME)
1492 {
1493 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1494 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1495 else
1496 index = load_from_unmodified_param (fbi, descriptors: info->descriptors,
1497 SSA_NAME_DEF_STMT (op1));
1498 tc_ssa = op1;
1499 }
1500 else
1501 {
1502 index = load_from_unmodified_param (fbi, descriptors: info->descriptors, stmt);
1503 tc_ssa = gimple_assign_lhs (gs: stmt);
1504 }
1505
1506 if (index >= 0)
1507 {
1508 switch (gimple_assign_rhs_class (gs: stmt))
1509 {
1510 case GIMPLE_BINARY_RHS:
1511 {
1512 tree op2 = gimple_assign_rhs2 (gs: stmt);
1513 if (!is_gimple_ip_invariant (op2)
1514 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1515 != tcc_comparison)
1516 && !useless_type_conversion_p (TREE_TYPE (name),
1517 TREE_TYPE (op1))))
1518 return;
1519
1520 ipa_set_jf_arith_pass_through (jfunc, formal_id: index, operand: op2,
1521 operation: gimple_assign_rhs_code (gs: stmt));
1522 break;
1523 }
1524 case GIMPLE_SINGLE_RHS:
1525 {
1526 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1527 parm: tc_ssa);
1528 ipa_set_jf_simple_pass_through (jfunc, formal_id: index, agg_preserved: agg_p);
1529 break;
1530 }
1531 case GIMPLE_UNARY_RHS:
1532 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1533 ipa_set_jf_unary_pass_through (jfunc, formal_id: index,
1534 operation: gimple_assign_rhs_code (gs: stmt));
1535 default:;
1536 }
1537 return;
1538 }
1539
1540 if (TREE_CODE (op1) != ADDR_EXPR)
1541 return;
1542 op1 = TREE_OPERAND (op1, 0);
1543 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1544 offset_int mem_offset;
1545 if (!base
1546 || TREE_CODE (base) != MEM_REF
1547 || !mem_ref_offset (base).is_constant (const_value: &mem_offset))
1548 return;
1549 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1550 ssa = TREE_OPERAND (base, 0);
1551 if (TREE_CODE (ssa) != SSA_NAME
1552 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1553 || offset < 0)
1554 return;
1555
1556 /* Dynamic types are changed in constructors and destructors. */
1557 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1558 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1559 ipa_set_ancestor_jf (jfunc, offset, formal_id: index,
1560 agg_preserved: parm_ref_data_pass_through_p (fbi, index, call, parm: ssa),
1561 keep_null: false);
1562}
1563
1564/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1565 it looks like:
1566
1567 iftmp.1_3 = &obj_2(D)->D.1762;
1568
1569 The base of the MEM_REF must be a default definition SSA NAME of a
1570 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1571 whole MEM_REF expression is returned and the offset calculated from any
1572 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1573 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1574
1575static tree
1576get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1577{
1578 HOST_WIDE_INT size;
1579 tree expr, parm, obj;
1580 bool reverse;
1581
1582 if (!gimple_assign_single_p (gs: assign))
1583 return NULL_TREE;
1584 expr = gimple_assign_rhs1 (gs: assign);
1585
1586 if (TREE_CODE (expr) != ADDR_EXPR)
1587 return NULL_TREE;
1588 expr = TREE_OPERAND (expr, 0);
1589 obj = expr;
1590 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1591
1592 offset_int mem_offset;
1593 if (!expr
1594 || TREE_CODE (expr) != MEM_REF
1595 || !mem_ref_offset (expr).is_constant (const_value: &mem_offset))
1596 return NULL_TREE;
1597 parm = TREE_OPERAND (expr, 0);
1598 if (TREE_CODE (parm) != SSA_NAME
1599 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1600 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1601 return NULL_TREE;
1602
1603 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1604 *obj_p = obj;
1605 return expr;
1606}
1607
1608
1609/* Given that an actual argument is an SSA_NAME that is a result of a phi
1610 statement PHI, try to find out whether NAME is in fact a
1611 multiple-inheritance typecast from a descendant into an ancestor of a formal
1612 parameter and thus can be described by an ancestor jump function and if so,
1613 write the appropriate function into JFUNC.
1614
1615 Essentially we want to match the following pattern:
1616
1617 if (obj_2(D) != 0B)
1618 goto <bb 3>;
1619 else
1620 goto <bb 4>;
1621
1622 <bb 3>:
1623 iftmp.1_3 = &obj_2(D)->D.1762;
1624
1625 <bb 4>:
1626 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1627 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1628 return D.1879_6; */
1629
1630static void
1631compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1632 class ipa_node_params *info,
1633 struct ipa_jump_func *jfunc,
1634 gcall *call, gphi *phi)
1635{
1636 HOST_WIDE_INT offset;
1637 gimple *assign;
1638 basic_block phi_bb, assign_bb, cond_bb;
1639 tree tmp, parm, expr, obj;
1640 int index, i;
1641
1642 if (gimple_phi_num_args (gs: phi) != 2)
1643 return;
1644
1645 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1646 tmp = PHI_ARG_DEF (phi, 0);
1647 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1648 tmp = PHI_ARG_DEF (phi, 1);
1649 else
1650 return;
1651 if (TREE_CODE (tmp) != SSA_NAME
1652 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1653 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1654 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1655 return;
1656
1657 assign = SSA_NAME_DEF_STMT (tmp);
1658 assign_bb = gimple_bb (g: assign);
1659 if (!single_pred_p (bb: assign_bb))
1660 return;
1661 expr = get_ancestor_addr_info (assign, obj_p: &obj, offset: &offset);
1662 if (!expr)
1663 return;
1664 parm = TREE_OPERAND (expr, 0);
1665 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1666 if (index < 0)
1667 return;
1668
1669 cond_bb = single_pred (bb: assign_bb);
1670 gcond *cond = safe_dyn_cast <gcond *> (p: *gsi_last_bb (bb: cond_bb));
1671 if (!cond
1672 || gimple_cond_code (gs: cond) != NE_EXPR
1673 || gimple_cond_lhs (gs: cond) != parm
1674 || !integer_zerop (gimple_cond_rhs (gs: cond)))
1675 return;
1676
1677 phi_bb = gimple_bb (g: phi);
1678 for (i = 0; i < 2; i++)
1679 {
1680 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1681 if (pred != assign_bb && pred != cond_bb)
1682 return;
1683 }
1684
1685 ipa_set_ancestor_jf (jfunc, offset, formal_id: index,
1686 agg_preserved: parm_ref_data_pass_through_p (fbi, index, call, parm),
1687 keep_null: true);
1688}
1689
1690/* Inspect the given TYPE and return true iff it has the same structure (the
1691 same number of fields of the same types) as a C++ member pointer. If
1692 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1693 corresponding fields there. */
1694
1695static bool
1696type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1697{
1698 tree fld;
1699
1700 if (TREE_CODE (type) != RECORD_TYPE)
1701 return false;
1702
1703 fld = TYPE_FIELDS (type);
1704 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1705 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1706 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1707 return false;
1708
1709 if (method_ptr)
1710 *method_ptr = fld;
1711
1712 fld = DECL_CHAIN (fld);
1713 if (!fld || INTEGRAL_TYPE_P (fld)
1714 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1715 return false;
1716 if (delta)
1717 *delta = fld;
1718
1719 if (DECL_CHAIN (fld))
1720 return false;
1721
1722 return true;
1723}
1724
1725/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1726 return the rhs of its defining statement, and this statement is stored in
1727 *RHS_STMT. Otherwise return RHS as it is. */
1728
1729static inline tree
1730get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1731{
1732 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1733 {
1734 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1735
1736 if (gimple_assign_single_p (gs: def_stmt))
1737 rhs = gimple_assign_rhs1 (gs: def_stmt);
1738 else
1739 break;
1740 *rhs_stmt = def_stmt;
1741 }
1742 return rhs;
1743}
1744
1745/* Simple linked list, describing contents of an aggregate before call. */
1746
1747struct ipa_known_agg_contents_list
1748{
1749 /* Offset and size of the described part of the aggregate. */
1750 HOST_WIDE_INT offset, size;
1751
1752 /* Type of the described part of the aggregate. */
1753 tree type;
1754
1755 /* Known constant value or jump function data describing contents. */
1756 struct ipa_load_agg_data value;
1757
1758 /* Pointer to the next structure in the list. */
1759 struct ipa_known_agg_contents_list *next;
1760};
1761
1762/* Add an aggregate content item into a linked list of
1763 ipa_known_agg_contents_list structure, in which all elements
1764 are sorted ascendingly by offset. */
1765
1766static inline void
1767add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1768 struct ipa_known_agg_contents_list *item)
1769{
1770 struct ipa_known_agg_contents_list *list = *plist;
1771
1772 for (; list; list = list->next)
1773 {
1774 if (list->offset >= item->offset)
1775 break;
1776
1777 plist = &list->next;
1778 }
1779
1780 item->next = list;
1781 *plist = item;
1782}
1783
1784/* Check whether a given aggregate content is clobbered by certain element in
1785 a linked list of ipa_known_agg_contents_list. */
1786
1787static inline bool
1788clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1789 struct ipa_known_agg_contents_list *item)
1790{
1791 for (; list; list = list->next)
1792 {
1793 if (list->offset >= item->offset)
1794 return list->offset < item->offset + item->size;
1795
1796 if (list->offset + list->size > item->offset)
1797 return true;
1798 }
1799
1800 return false;
1801}
1802
1803/* Build aggregate jump function from LIST, assuming there are exactly
1804 VALUE_COUNT entries there and that offset of the passed argument
1805 is ARG_OFFSET and store it into JFUNC. */
1806
1807static void
1808build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1809 int value_count, HOST_WIDE_INT arg_offset,
1810 struct ipa_jump_func *jfunc)
1811{
1812 vec_safe_reserve (v&: jfunc->agg.items, nelems: value_count, exact: true);
1813 for (; list; list = list->next)
1814 {
1815 struct ipa_agg_jf_item item;
1816 tree operand = list->value.pass_through.operand;
1817
1818 if (list->value.pass_through.formal_id >= 0)
1819 {
1820 /* Content value is derived from some formal paramerter. */
1821 if (list->value.offset >= 0)
1822 item.jftype = IPA_JF_LOAD_AGG;
1823 else
1824 item.jftype = IPA_JF_PASS_THROUGH;
1825
1826 item.value.load_agg = list->value;
1827 if (operand)
1828 item.value.pass_through.operand
1829 = unshare_expr_without_location (operand);
1830 }
1831 else if (operand)
1832 {
1833 /* Content value is known constant. */
1834 item.jftype = IPA_JF_CONST;
1835 item.value.constant = unshare_expr_without_location (operand);
1836 }
1837 else
1838 continue;
1839
1840 item.type = list->type;
1841 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1842
1843 item.offset = list->offset - arg_offset;
1844 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1845
1846 jfunc->agg.items->quick_push (obj: item);
1847 }
1848}
1849
1850/* Given an assignment statement STMT, try to collect information into
1851 AGG_VALUE that will be used to construct jump function for RHS of the
1852 assignment, from which content value of an aggregate part comes.
1853
1854 Besides constant and simple pass-through jump functions, also try to
1855 identify whether it matches the following pattern that can be described by
1856 a load-value-from-aggregate jump function, which is a derivative of simple
1857 pass-through jump function.
1858
1859 foo (int *p)
1860 {
1861 ...
1862
1863 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1864 bar (q_5);
1865 }
1866
1867 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1868 constant, simple pass-through and load-vale-from-aggregate. If value
1869 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1870 set to -1. For simple pass-through and load-value-from-aggregate, field
1871 FORMAL_ID specifies the related formal parameter index, and field
1872 OFFSET can be used to distinguish them, -1 means simple pass-through,
1873 otherwise means load-value-from-aggregate. */
1874
1875static void
1876analyze_agg_content_value (struct ipa_func_body_info *fbi,
1877 struct ipa_load_agg_data *agg_value,
1878 gimple *stmt)
1879{
1880 tree lhs = gimple_assign_lhs (gs: stmt);
1881 tree rhs1 = gimple_assign_rhs1 (gs: stmt);
1882 enum tree_code code;
1883 int index = -1;
1884
1885 /* Initialize jump function data for the aggregate part. */
1886 memset (s: agg_value, c: 0, n: sizeof (*agg_value));
1887 agg_value->pass_through.operation = NOP_EXPR;
1888 agg_value->pass_through.formal_id = -1;
1889 agg_value->offset = -1;
1890
1891 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1892 || TREE_THIS_VOLATILE (lhs)
1893 || TREE_CODE (lhs) == BIT_FIELD_REF
1894 || contains_bitfld_component_ref_p (lhs))
1895 return;
1896
1897 /* Skip SSA copies. */
1898 while (gimple_assign_rhs_class (gs: stmt) == GIMPLE_SINGLE_RHS)
1899 {
1900 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1901 break;
1902
1903 stmt = SSA_NAME_DEF_STMT (rhs1);
1904 if (!is_gimple_assign (gs: stmt))
1905 break;
1906
1907 rhs1 = gimple_assign_rhs1 (gs: stmt);
1908 }
1909
1910 if (gphi *phi = dyn_cast<gphi *> (p: stmt))
1911 {
1912 /* Also special case like the following (a is a formal parameter):
1913
1914 _12 = *a_11(D).dim[0].stride;
1915 ...
1916 # iftmp.22_9 = PHI <_12(2), 1(3)>
1917 ...
1918 parm.6.dim[0].stride = iftmp.22_9;
1919 ...
1920 __x_MOD_foo (&parm.6, b_31(D));
1921
1922 The aggregate function describing parm.6.dim[0].stride is encoded as a
1923 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1924 (the constant from the PHI node). */
1925
1926 if (gimple_phi_num_args (gs: phi) != 2)
1927 return;
1928 tree arg0 = gimple_phi_arg_def (gs: phi, index: 0);
1929 tree arg1 = gimple_phi_arg_def (gs: phi, index: 1);
1930 tree operand;
1931
1932 if (is_gimple_ip_invariant (arg1))
1933 {
1934 operand = arg1;
1935 rhs1 = arg0;
1936 }
1937 else if (is_gimple_ip_invariant (arg0))
1938 {
1939 operand = arg0;
1940 rhs1 = arg1;
1941 }
1942 else
1943 return;
1944
1945 rhs1 = get_ssa_def_if_simple_copy (rhs: rhs1, rhs_stmt: &stmt);
1946 if (!is_gimple_assign (gs: stmt))
1947 return;
1948
1949 code = ASSERT_EXPR;
1950 agg_value->pass_through.operand = operand;
1951 }
1952 else if (is_gimple_assign (gs: stmt))
1953 {
1954 code = gimple_assign_rhs_code (gs: stmt);
1955 switch (gimple_assign_rhs_class (gs: stmt))
1956 {
1957 case GIMPLE_SINGLE_RHS:
1958 if (is_gimple_ip_invariant (rhs1))
1959 {
1960 agg_value->pass_through.operand = rhs1;
1961 return;
1962 }
1963 code = NOP_EXPR;
1964 break;
1965
1966 case GIMPLE_UNARY_RHS:
1967 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1968 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1969 tcc_binary, this subtleness is somewhat misleading.
1970
1971 Since tcc_unary is widely used in IPA-CP code to check an operation
1972 with one operand, here we only allow tc_unary operation to avoid
1973 possible problem. Then we can use (opclass == tc_unary) or not to
1974 distinguish unary and binary. */
1975 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1976 return;
1977
1978 rhs1 = get_ssa_def_if_simple_copy (rhs: rhs1, rhs_stmt: &stmt);
1979 break;
1980
1981 case GIMPLE_BINARY_RHS:
1982 {
1983 gimple *rhs1_stmt = stmt;
1984 gimple *rhs2_stmt = stmt;
1985 tree rhs2 = gimple_assign_rhs2 (gs: stmt);
1986
1987 rhs1 = get_ssa_def_if_simple_copy (rhs: rhs1, rhs_stmt: &rhs1_stmt);
1988 rhs2 = get_ssa_def_if_simple_copy (rhs: rhs2, rhs_stmt: &rhs2_stmt);
1989
1990 if (is_gimple_ip_invariant (rhs2))
1991 {
1992 agg_value->pass_through.operand = rhs2;
1993 stmt = rhs1_stmt;
1994 }
1995 else if (is_gimple_ip_invariant (rhs1))
1996 {
1997 if (TREE_CODE_CLASS (code) == tcc_comparison)
1998 code = swap_tree_comparison (code);
1999 else if (!commutative_tree_code (code))
2000 return;
2001
2002 agg_value->pass_through.operand = rhs1;
2003 stmt = rhs2_stmt;
2004 rhs1 = rhs2;
2005 }
2006 else
2007 return;
2008
2009 if (TREE_CODE_CLASS (code) != tcc_comparison
2010 && !useless_type_conversion_p (TREE_TYPE (lhs),
2011 TREE_TYPE (rhs1)))
2012 return;
2013 }
2014 break;
2015
2016 default:
2017 return;
2018 }
2019 }
2020 else
2021 return;
2022
2023 if (TREE_CODE (rhs1) != SSA_NAME)
2024 index = load_from_unmodified_param_or_agg (fbi, info: fbi->info, stmt,
2025 offset_p: &agg_value->offset,
2026 by_ref_p: &agg_value->by_ref);
2027 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
2028 index = ipa_get_param_decl_index (info: fbi->info, SSA_NAME_VAR (rhs1));
2029
2030 if (index >= 0)
2031 {
2032 if (agg_value->offset >= 0)
2033 agg_value->type = TREE_TYPE (rhs1);
2034 agg_value->pass_through.formal_id = index;
2035 agg_value->pass_through.operation = code;
2036 }
2037 else
2038 agg_value->pass_through.operand = NULL_TREE;
2039}
2040
2041/* If STMT is a memory store to the object whose address is BASE, extract
2042 information (offset, size, and value) into CONTENT, and return true,
2043 otherwise we conservatively assume the whole object is modified with
2044 unknown content, and return false. CHECK_REF means that access to object
2045 is expected to be in form of MEM_REF expression. */
2046
2047static bool
2048extract_mem_content (struct ipa_func_body_info *fbi,
2049 gimple *stmt, tree base, bool check_ref,
2050 struct ipa_known_agg_contents_list *content)
2051{
2052 HOST_WIDE_INT lhs_offset, lhs_size;
2053 bool reverse;
2054
2055 if (!is_gimple_assign (gs: stmt))
2056 return false;
2057
2058 tree lhs = gimple_assign_lhs (gs: stmt);
2059 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
2060 &reverse);
2061 if (!lhs_base)
2062 return false;
2063
2064 if (check_ref)
2065 {
2066 if (TREE_CODE (lhs_base) != MEM_REF
2067 || TREE_OPERAND (lhs_base, 0) != base
2068 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
2069 return false;
2070 }
2071 else if (lhs_base != base)
2072 return false;
2073
2074 content->offset = lhs_offset;
2075 content->size = lhs_size;
2076 content->type = TREE_TYPE (lhs);
2077 content->next = NULL;
2078
2079 analyze_agg_content_value (fbi, agg_value: &content->value, stmt);
2080 return true;
2081}
2082
2083/* Traverse statements from CALL backwards, scanning whether an aggregate given
2084 in ARG is filled in constants or values that are derived from caller's
2085 formal parameter in the way described by some kinds of jump functions. FBI
2086 is the context of the caller function for interprocedural analysis. ARG can
2087 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
2088 the type of the aggregate, JFUNC is the jump function for the aggregate. */
2089
2090static void
2091determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
2092 gcall *call, tree arg,
2093 tree arg_type,
2094 struct ipa_jump_func *jfunc)
2095{
2096 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
2097 bitmap visited = NULL;
2098 int item_count = 0, value_count = 0;
2099 HOST_WIDE_INT arg_offset, arg_size;
2100 tree arg_base;
2101 bool check_ref, by_ref;
2102 ao_ref r;
2103 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
2104
2105 if (max_agg_items == 0)
2106 return;
2107
2108 /* The function operates in three stages. First, we prepare check_ref, r,
2109 arg_base and arg_offset based on what is actually passed as an actual
2110 argument. */
2111
2112 if (POINTER_TYPE_P (arg_type))
2113 {
2114 by_ref = true;
2115 if (TREE_CODE (arg) == SSA_NAME)
2116 {
2117 tree type_size;
2118 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
2119 || !POINTER_TYPE_P (TREE_TYPE (arg)))
2120 return;
2121 check_ref = true;
2122 arg_base = arg;
2123 arg_offset = 0;
2124 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
2125 arg_size = tree_to_uhwi (type_size);
2126 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2127 }
2128 else if (TREE_CODE (arg) == ADDR_EXPR)
2129 {
2130 bool reverse;
2131
2132 arg = TREE_OPERAND (arg, 0);
2133 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2134 &arg_size, &reverse);
2135 if (!arg_base)
2136 return;
2137 if (DECL_P (arg_base))
2138 {
2139 check_ref = false;
2140 ao_ref_init (&r, arg_base);
2141 }
2142 else
2143 return;
2144 }
2145 else
2146 return;
2147 }
2148 else
2149 {
2150 bool reverse;
2151
2152 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2153
2154 by_ref = false;
2155 check_ref = false;
2156 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2157 &arg_size, &reverse);
2158 if (!arg_base)
2159 return;
2160
2161 ao_ref_init (&r, arg);
2162 }
2163
2164 /* Second stage traverses virtual SSA web backwards starting from the call
2165 statement, only looks at individual dominating virtual operand (its
2166 definition dominates the call), as long as it is confident that content
2167 of the aggregate is affected by definition of the virtual operand, it
2168 builds a sorted linked list of ipa_agg_jf_list describing that. */
2169
2170 for (tree dom_vuse = gimple_vuse (g: call);
2171 dom_vuse && fbi->aa_walk_budget > 0;)
2172 {
2173 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2174
2175 if (gimple_code (g: stmt) == GIMPLE_PHI)
2176 {
2177 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2178 fbi->aa_walk_budget,
2179 &visited, false, NULL, NULL);
2180 continue;
2181 }
2182
2183 fbi->aa_walk_budget--;
2184 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2185 {
2186 struct ipa_known_agg_contents_list *content
2187 = XALLOCA (struct ipa_known_agg_contents_list);
2188
2189 if (!extract_mem_content (fbi, stmt, base: arg_base, check_ref, content))
2190 break;
2191
2192 /* Now we get a dominating virtual operand, and need to check
2193 whether its value is clobbered any other dominating one. */
2194 if ((content->value.pass_through.formal_id >= 0
2195 || content->value.pass_through.operand)
2196 && !clobber_by_agg_contents_list_p (list: all_list, item: content)
2197 /* Since IPA-CP stores results with unsigned int offsets, we can
2198 discard those which would not fit now before we stream them to
2199 WPA. */
2200 && (content->offset + content->size - arg_offset
2201 <= (HOST_WIDE_INT) UINT_MAX * BITS_PER_UNIT))
2202 {
2203 struct ipa_known_agg_contents_list *copy
2204 = XALLOCA (struct ipa_known_agg_contents_list);
2205
2206 /* Add to the list consisting of only dominating virtual
2207 operands, whose definitions can finally reach the call. */
2208 add_to_agg_contents_list (plist: &list, item: (*copy = *content, copy));
2209
2210 if (++value_count == max_agg_items)
2211 break;
2212 }
2213
2214 /* Add to the list consisting of all dominating virtual operands. */
2215 add_to_agg_contents_list (plist: &all_list, item: content);
2216
2217 if (++item_count == 2 * max_agg_items)
2218 break;
2219 }
2220 dom_vuse = gimple_vuse (g: stmt);
2221 }
2222
2223 if (visited)
2224 BITMAP_FREE (visited);
2225
2226 /* Third stage just goes over the list and creates an appropriate vector of
2227 ipa_agg_jf_item structures out of it, of course only if there are
2228 any meaningful items to begin with. */
2229
2230 if (value_count)
2231 {
2232 jfunc->agg.by_ref = by_ref;
2233 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2234 }
2235}
2236
2237
2238/* Return the Ith param type of callee associated with call graph
2239 edge E. */
2240
2241tree
2242ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2243{
2244 int n;
2245 tree type = (e->callee
2246 ? TREE_TYPE (e->callee->decl)
2247 : gimple_call_fntype (gs: e->call_stmt));
2248 tree t = TYPE_ARG_TYPES (type);
2249
2250 for (n = 0; n < i; n++)
2251 {
2252 if (!t)
2253 break;
2254 t = TREE_CHAIN (t);
2255 }
2256 if (t && t != void_list_node)
2257 return TREE_VALUE (t);
2258 if (!e->callee)
2259 return NULL;
2260 t = DECL_ARGUMENTS (e->callee->decl);
2261 for (n = 0; n < i; n++)
2262 {
2263 if (!t)
2264 return NULL;
2265 t = TREE_CHAIN (t);
2266 }
2267 if (t)
2268 return TREE_TYPE (t);
2269 return NULL;
2270}
2271
2272/* Return a pointer to an ipa_vr just like TMP, but either find it in
2273 ipa_vr_hash_table or allocate it in GC memory. */
2274
2275static ipa_vr *
2276ipa_get_value_range (const vrange &tmp)
2277{
2278 inchash::hash hstate;
2279 inchash::add_vrange (tmp, hstate);
2280 hashval_t hash = hstate.end ();
2281 ipa_vr **slot = ipa_vr_hash_table->find_slot_with_hash (comparable: &tmp, hash, insert: INSERT);
2282 if (*slot)
2283 return *slot;
2284
2285 ipa_vr *vr = new (ggc_alloc<ipa_vr> ()) ipa_vr (tmp);
2286 *slot = vr;
2287 return vr;
2288}
2289
2290/* Assign to JF a pointer to a range just like TMP but either fetch a
2291 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2292
2293static void
2294ipa_set_jfunc_vr (ipa_jump_func *jf, const vrange &tmp)
2295{
2296 jf->m_vr = ipa_get_value_range (tmp);
2297}
2298
2299static void
2300ipa_set_jfunc_vr (ipa_jump_func *jf, const ipa_vr &vr)
2301{
2302 Value_Range tmp;
2303 vr.get_vrange (r&: tmp);
2304 ipa_set_jfunc_vr (jf, tmp);
2305}
2306
2307/* Compute jump function for all arguments of callsite CS and insert the
2308 information in the jump_functions array in the ipa_edge_args corresponding
2309 to this callsite. */
2310
2311static void
2312ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2313 struct cgraph_edge *cs)
2314{
2315 ipa_node_params *info = ipa_node_params_sum->get (node: cs->caller);
2316 ipa_edge_args *args = ipa_edge_args_sum->get_create (edge: cs);
2317 gcall *call = cs->call_stmt;
2318 int n, arg_num = gimple_call_num_args (gs: call);
2319 bool useful_context = false;
2320
2321 if (arg_num == 0 || args->jump_functions)
2322 return;
2323 vec_safe_grow_cleared (v&: args->jump_functions, len: arg_num, exact: true);
2324 if (flag_devirtualize)
2325 vec_safe_grow_cleared (v&: args->polymorphic_call_contexts, len: arg_num, exact: true);
2326
2327 if (gimple_call_internal_p (gs: call))
2328 return;
2329 if (ipa_func_spec_opts_forbid_analysis_p (node: cs->caller))
2330 return;
2331
2332 for (n = 0; n < arg_num; n++)
2333 {
2334 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, i: n);
2335 tree arg = gimple_call_arg (gs: call, index: n);
2336 tree param_type = ipa_get_callee_param_type (e: cs, i: n);
2337 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2338 {
2339 tree instance;
2340 class ipa_polymorphic_call_context context (cs->caller->decl,
2341 arg, cs->call_stmt,
2342 &instance);
2343 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2344 &fbi->aa_walk_budget);
2345 *ipa_get_ith_polymorhic_call_context (args, i: n) = context;
2346 if (!context.useless_p ())
2347 useful_context = true;
2348 }
2349
2350 Value_Range vr (TREE_TYPE (arg));
2351 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2352 {
2353 bool addr_nonzero = false;
2354 bool strict_overflow = false;
2355
2356 if (TREE_CODE (arg) == SSA_NAME
2357 && param_type
2358 && get_range_query (cfun)->range_of_expr (r&: vr, expr: arg, cs->call_stmt)
2359 && vr.nonzero_p ())
2360 addr_nonzero = true;
2361 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2362 addr_nonzero = true;
2363
2364 if (addr_nonzero)
2365 vr.set_nonzero (TREE_TYPE (arg));
2366
2367 unsigned HOST_WIDE_INT bitpos;
2368 unsigned align, prec = TYPE_PRECISION (TREE_TYPE (arg));
2369
2370 get_pointer_alignment_1 (arg, &align, &bitpos);
2371
2372 if (align > BITS_PER_UNIT
2373 && opt_for_fn (cs->caller->decl, flag_ipa_bit_cp))
2374 {
2375 wide_int mask
2376 = wi::bit_and_not (x: wi::mask (width: prec, negate_p: false, precision: prec),
2377 y: wide_int::from (x: align / BITS_PER_UNIT - 1,
2378 precision: prec, sgn: UNSIGNED));
2379 wide_int value = wide_int::from (x: bitpos / BITS_PER_UNIT, precision: prec,
2380 sgn: UNSIGNED);
2381 irange_bitmask bm (value, mask);
2382 if (!addr_nonzero)
2383 vr.set_varying (TREE_TYPE (arg));
2384 irange &r = as_a <irange> (v&: vr);
2385 r.update_bitmask (bm);
2386 ipa_set_jfunc_vr (jf: jfunc, tmp: vr);
2387 }
2388 else if (addr_nonzero)
2389 ipa_set_jfunc_vr (jf: jfunc, tmp: vr);
2390 else
2391 gcc_assert (!jfunc->m_vr);
2392 }
2393 else
2394 {
2395 if (param_type
2396 && Value_Range::supports_type_p (TREE_TYPE (arg))
2397 && Value_Range::supports_type_p (type: param_type)
2398 && irange::supports_p (TREE_TYPE (arg))
2399 && irange::supports_p (type: param_type)
2400 && get_range_query (cfun)->range_of_expr (r&: vr, expr: arg, cs->call_stmt)
2401 && !vr.undefined_p ())
2402 {
2403 Value_Range resvr (vr);
2404 range_cast (r&: resvr, type: param_type);
2405 if (!resvr.undefined_p () && !resvr.varying_p ())
2406 ipa_set_jfunc_vr (jf: jfunc, tmp: resvr);
2407 else
2408 gcc_assert (!jfunc->m_vr);
2409 }
2410 else
2411 gcc_assert (!jfunc->m_vr);
2412 }
2413
2414 if (is_gimple_ip_invariant (arg)
2415 || (VAR_P (arg)
2416 && is_global_var (t: arg)
2417 && TREE_READONLY (arg)))
2418 ipa_set_jf_constant (jfunc, constant: arg, cs);
2419 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2420 && TREE_CODE (arg) == PARM_DECL)
2421 {
2422 int index = ipa_get_param_decl_index (info, ptree: arg);
2423
2424 gcc_assert (index >=0);
2425 /* Aggregate passed by value, check for pass-through, otherwise we
2426 will attempt to fill in aggregate contents later in this
2427 for cycle. */
2428 if (parm_preserved_before_stmt_p (fbi, index, stmt: call, parm_load: arg))
2429 {
2430 ipa_set_jf_simple_pass_through (jfunc, formal_id: index, agg_preserved: false);
2431 continue;
2432 }
2433 }
2434 else if (TREE_CODE (arg) == SSA_NAME)
2435 {
2436 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2437 {
2438 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2439 if (index >= 0)
2440 {
2441 bool agg_p;
2442 agg_p = parm_ref_data_pass_through_p (fbi, index, call, parm: arg);
2443 ipa_set_jf_simple_pass_through (jfunc, formal_id: index, agg_preserved: agg_p);
2444 }
2445 }
2446 else
2447 {
2448 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2449 if (is_gimple_assign (gs: stmt))
2450 compute_complex_assign_jump_func (fbi, info, jfunc,
2451 call, stmt, name: arg, param_type);
2452 else if (gimple_code (g: stmt) == GIMPLE_PHI)
2453 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2454 call,
2455 phi: as_a <gphi *> (p: stmt));
2456 }
2457 }
2458
2459 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2460 passed (because type conversions are ignored in gimple). Usually we can
2461 safely get type from function declaration, but in case of K&R prototypes or
2462 variadic functions we can try our luck with type of the pointer passed.
2463 TODO: Since we look for actual initialization of the memory object, we may better
2464 work out the type based on the memory stores we find. */
2465 if (!param_type)
2466 param_type = TREE_TYPE (arg);
2467
2468 if ((jfunc->type != IPA_JF_PASS_THROUGH
2469 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2470 && (jfunc->type != IPA_JF_ANCESTOR
2471 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2472 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2473 || POINTER_TYPE_P (param_type)))
2474 determine_known_aggregate_parts (fbi, call, arg, arg_type: param_type, jfunc);
2475 }
2476 if (!useful_context)
2477 vec_free (v&: args->polymorphic_call_contexts);
2478}
2479
2480/* Compute jump functions for all edges - both direct and indirect - outgoing
2481 from BB. */
2482
2483static void
2484ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2485{
2486 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2487 int i;
2488 struct cgraph_edge *cs;
2489
2490 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2491 {
2492 struct cgraph_node *callee = cs->callee;
2493
2494 if (callee)
2495 {
2496 callee = callee->ultimate_alias_target ();
2497 /* We do not need to bother analyzing calls to unknown functions
2498 unless they may become known during lto/whopr. */
2499 if (!callee->definition && !flag_lto
2500 && !gimple_call_fnspec (stmt: cs->call_stmt).known_p ())
2501 continue;
2502 }
2503 ipa_compute_jump_functions_for_edge (fbi, cs);
2504 }
2505}
2506
2507/* If STMT looks like a statement loading a value from a member pointer formal
2508 parameter, return that parameter and store the offset of the field to
2509 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2510 might be clobbered). If USE_DELTA, then we look for a use of the delta
2511 field rather than the pfn. */
2512
2513static tree
2514ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2515 HOST_WIDE_INT *offset_p)
2516{
2517 tree rhs, fld, ptr_field, delta_field;
2518 tree ref_field = NULL_TREE;
2519 tree ref_offset = NULL_TREE;
2520
2521 if (!gimple_assign_single_p (gs: stmt))
2522 return NULL_TREE;
2523
2524 rhs = gimple_assign_rhs1 (gs: stmt);
2525 if (TREE_CODE (rhs) == COMPONENT_REF)
2526 {
2527 ref_field = TREE_OPERAND (rhs, 1);
2528 rhs = TREE_OPERAND (rhs, 0);
2529 }
2530
2531 if (TREE_CODE (rhs) == MEM_REF)
2532 {
2533 ref_offset = TREE_OPERAND (rhs, 1);
2534 if (ref_field && integer_nonzerop (ref_offset))
2535 return NULL_TREE;
2536 }
2537 else if (!ref_field)
2538 return NULL_TREE;
2539
2540 if (TREE_CODE (rhs) == MEM_REF
2541 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
2542 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (rhs, 0)))
2543 {
2544 rhs = TREE_OPERAND (rhs, 0);
2545 if (TREE_CODE (SSA_NAME_VAR (rhs)) != PARM_DECL
2546 || !type_like_member_ptr_p (TREE_TYPE (TREE_TYPE (rhs)), method_ptr: &ptr_field,
2547 delta: &delta_field))
2548 return NULL_TREE;
2549 }
2550 else
2551 {
2552 if (TREE_CODE (rhs) == MEM_REF
2553 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR)
2554 rhs = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
2555 if (TREE_CODE (rhs) != PARM_DECL
2556 || !type_like_member_ptr_p (TREE_TYPE (rhs), method_ptr: &ptr_field,
2557 delta: &delta_field))
2558 return NULL_TREE;
2559 }
2560
2561 if (use_delta)
2562 fld = delta_field;
2563 else
2564 fld = ptr_field;
2565
2566 if (ref_field)
2567 {
2568 if (ref_field != fld)
2569 return NULL_TREE;
2570 }
2571 else if (!tree_int_cst_equal (byte_position (fld), ref_offset))
2572 return NULL_TREE;
2573
2574 if (offset_p)
2575 *offset_p = int_bit_position (field: fld);
2576 return rhs;
2577}
2578
2579/* Returns true iff T is an SSA_NAME defined by a statement. */
2580
2581static bool
2582ipa_is_ssa_with_stmt_def (tree t)
2583{
2584 if (TREE_CODE (t) == SSA_NAME
2585 && !SSA_NAME_IS_DEFAULT_DEF (t))
2586 return true;
2587 else
2588 return false;
2589}
2590
2591/* Find the indirect call graph edge corresponding to STMT and mark it as a
2592 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2593 indirect call graph edge.
2594 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2595
2596static struct cgraph_edge *
2597ipa_note_param_call (struct cgraph_node *node, int param_index,
2598 gcall *stmt, bool polymorphic)
2599{
2600 struct cgraph_edge *cs;
2601
2602 cs = node->get_edge (call_stmt: stmt);
2603 cs->indirect_info->param_index = param_index;
2604 cs->indirect_info->agg_contents = 0;
2605 cs->indirect_info->member_ptr = 0;
2606 cs->indirect_info->guaranteed_unmodified = 0;
2607 ipa_node_params *info = ipa_node_params_sum->get (node);
2608 ipa_set_param_used_by_indirect_call (info, i: param_index, val: true);
2609 if (cs->indirect_info->polymorphic || polymorphic)
2610 ipa_set_param_used_by_polymorphic_call (info, i: param_index, val: true);
2611 return cs;
2612}
2613
2614/* Analyze the CALL and examine uses of formal parameters of the caller NODE
2615 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2616 intermediate information about each formal parameter. Currently it checks
2617 whether the call calls a pointer that is a formal parameter and if so, the
2618 parameter is marked with the called flag and an indirect call graph edge
2619 describing the call is created. This is very simple for ordinary pointers
2620 represented in SSA but not-so-nice when it comes to member pointers. The
2621 ugly part of this function does nothing more than trying to match the
2622 pattern of such a call. Look up the documentation of macro
2623 TARGET_PTRMEMFUNC_VBIT_LOCATION for details. An example of such a pattern
2624 is the gimple dump below, the call is on the last line:
2625
2626 <bb 2>:
2627 f$__delta_5 = f.__delta;
2628 f$__pfn_24 = f.__pfn;
2629
2630 or
2631 <bb 2>:
2632 f$__delta_5 = MEM[(struct *)&f];
2633 f$__pfn_24 = MEM[(struct *)&f + 4B];
2634
2635 and a few lines below:
2636
2637 <bb 5>
2638 D.2496_3 = (int) f$__pfn_24;
2639 D.2497_4 = D.2496_3 & 1;
2640 if (D.2497_4 != 0)
2641 goto <bb 3>;
2642 else
2643 goto <bb 4>;
2644
2645 <bb 6>:
2646 D.2500_7 = (unsigned int) f$__delta_5;
2647 D.2501_8 = &S + D.2500_7;
2648 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2649 D.2503_10 = *D.2502_9;
2650 D.2504_12 = f$__pfn_24 + -1;
2651 D.2505_13 = (unsigned int) D.2504_12;
2652 D.2506_14 = D.2503_10 + D.2505_13;
2653 D.2507_15 = *D.2506_14;
2654 iftmp.11_16 = (String:: *) D.2507_15;
2655
2656 <bb 7>:
2657 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2658 D.2500_19 = (unsigned int) f$__delta_5;
2659 D.2508_20 = &S + D.2500_19;
2660 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2661
2662 Such patterns are results of simple calls to a member pointer:
2663
2664 int doprinting (int (MyString::* f)(int) const)
2665 {
2666 MyString S ("somestring");
2667
2668 return (S.*f)(4);
2669 }
2670
2671 Moreover, the function also looks for called pointers loaded from aggregates
2672 passed by value or reference. */
2673
2674static void
2675ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2676 tree target)
2677{
2678 class ipa_node_params *info = fbi->info;
2679 HOST_WIDE_INT offset;
2680 bool by_ref;
2681
2682 if (SSA_NAME_IS_DEFAULT_DEF (target))
2683 {
2684 tree var = SSA_NAME_VAR (target);
2685 int index = ipa_get_param_decl_index (info, ptree: var);
2686 if (index >= 0)
2687 ipa_note_param_call (node: fbi->node, param_index: index, stmt: call, polymorphic: false);
2688 return;
2689 }
2690
2691 int index;
2692 gimple *def = SSA_NAME_DEF_STMT (target);
2693 bool guaranteed_unmodified;
2694 if (gimple_assign_single_p (gs: def)
2695 && ipa_load_from_parm_agg (fbi, descriptors: info->descriptors, stmt: def,
2696 op: gimple_assign_rhs1 (gs: def), index_p: &index, offset_p: &offset,
2697 NULL, by_ref_p: &by_ref, guaranteed_unmodified: &guaranteed_unmodified))
2698 {
2699 struct cgraph_edge *cs = ipa_note_param_call (node: fbi->node, param_index: index,
2700 stmt: call, polymorphic: false);
2701 cs->indirect_info->offset = offset;
2702 cs->indirect_info->agg_contents = 1;
2703 cs->indirect_info->by_ref = by_ref;
2704 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2705 return;
2706 }
2707
2708 /* Now we need to try to match the complex pattern of calling a member
2709 pointer. */
2710 if (gimple_code (g: def) != GIMPLE_PHI
2711 || gimple_phi_num_args (gs: def) != 2
2712 || !POINTER_TYPE_P (TREE_TYPE (target))
2713 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2714 return;
2715
2716 /* First, we need to check whether one of these is a load from a member
2717 pointer that is a parameter to this function. */
2718 tree n1 = PHI_ARG_DEF (def, 0);
2719 tree n2 = PHI_ARG_DEF (def, 1);
2720 if (!ipa_is_ssa_with_stmt_def (t: n1) || !ipa_is_ssa_with_stmt_def (t: n2))
2721 return;
2722 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2723 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2724
2725 tree rec;
2726 basic_block bb, virt_bb;
2727 basic_block join = gimple_bb (g: def);
2728 if ((rec = ipa_get_stmt_member_ptr_load_param (stmt: d1, use_delta: false, offset_p: &offset)))
2729 {
2730 if (ipa_get_stmt_member_ptr_load_param (stmt: d2, use_delta: false, NULL))
2731 return;
2732
2733 bb = EDGE_PRED (join, 0)->src;
2734 virt_bb = gimple_bb (g: d2);
2735 }
2736 else if ((rec = ipa_get_stmt_member_ptr_load_param (stmt: d2, use_delta: false, offset_p: &offset)))
2737 {
2738 bb = EDGE_PRED (join, 1)->src;
2739 virt_bb = gimple_bb (g: d1);
2740 }
2741 else
2742 return;
2743
2744 /* Second, we need to check that the basic blocks are laid out in the way
2745 corresponding to the pattern. */
2746
2747 if (!single_pred_p (bb: virt_bb) || !single_succ_p (bb: virt_bb)
2748 || single_succ (bb: virt_bb) != join)
2749 return;
2750
2751
2752 if (single_pred (bb: virt_bb) != bb)
2753 {
2754 /* In cases when the distinction between a normal and a virtual
2755 function is encoded in the delta field, the load of the
2756 actual non-virtual function pointer can be in its own BB. */
2757
2758 if (!single_pred_p (bb) || !single_succ_p (bb))
2759 return;
2760 bb = single_pred (bb);
2761 if (bb != single_pred (bb: virt_bb))
2762 return;
2763 }
2764
2765 /* Third, let's see that the branching is done depending on the least
2766 significant bit of the pfn. */
2767
2768 gcond *branch = safe_dyn_cast <gcond *> (p: *gsi_last_bb (bb));
2769 if (!branch)
2770 return;
2771
2772 if ((gimple_cond_code (gs: branch) != NE_EXPR
2773 && gimple_cond_code (gs: branch) != EQ_EXPR)
2774 || !integer_zerop (gimple_cond_rhs (gs: branch)))
2775 return;
2776
2777 tree cond = gimple_cond_lhs (gs: branch);
2778 if (!ipa_is_ssa_with_stmt_def (t: cond))
2779 return;
2780
2781 def = SSA_NAME_DEF_STMT (cond);
2782 if (!is_gimple_assign (gs: def)
2783 || gimple_assign_rhs_code (gs: def) != BIT_AND_EXPR
2784 || !integer_onep (gimple_assign_rhs2 (gs: def)))
2785 return;
2786
2787 cond = gimple_assign_rhs1 (gs: def);
2788 if (!ipa_is_ssa_with_stmt_def (t: cond))
2789 return;
2790
2791 def = SSA_NAME_DEF_STMT (cond);
2792
2793 if (is_gimple_assign (gs: def)
2794 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2795 {
2796 cond = gimple_assign_rhs1 (gs: def);
2797 if (!ipa_is_ssa_with_stmt_def (t: cond))
2798 return;
2799 def = SSA_NAME_DEF_STMT (cond);
2800 }
2801
2802 tree rec2;
2803 rec2 = ipa_get_stmt_member_ptr_load_param (stmt: def,
2804 use_delta: (TARGET_PTRMEMFUNC_VBIT_LOCATION
2805 == ptrmemfunc_vbit_in_delta),
2806 NULL);
2807 if (rec != rec2)
2808 return;
2809
2810 if (TREE_CODE (rec) == SSA_NAME)
2811 {
2812 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (rec));
2813 if (index < 0
2814 || !parm_ref_data_preserved_p (fbi, index, stmt: call,
2815 ref: gimple_assign_rhs1 (gs: def)))
2816 return;
2817 by_ref = true;
2818 }
2819 else
2820 {
2821 index = ipa_get_param_decl_index (info, ptree: rec);
2822 if (index < 0
2823 || !parm_preserved_before_stmt_p (fbi, index, stmt: call, parm_load: rec))
2824 return;
2825 by_ref = false;
2826 }
2827
2828 struct cgraph_edge *cs = ipa_note_param_call (node: fbi->node, param_index: index,
2829 stmt: call, polymorphic: false);
2830 cs->indirect_info->offset = offset;
2831 cs->indirect_info->agg_contents = 1;
2832 cs->indirect_info->member_ptr = 1;
2833 cs->indirect_info->by_ref = by_ref;
2834 cs->indirect_info->guaranteed_unmodified = 1;
2835
2836 return;
2837}
2838
2839/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2840 object referenced in the expression is a formal parameter of the caller
2841 FBI->node (described by FBI->info), create a call note for the
2842 statement. */
2843
2844static void
2845ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2846 gcall *call, tree target)
2847{
2848 tree obj = OBJ_TYPE_REF_OBJECT (target);
2849 int index;
2850 HOST_WIDE_INT anc_offset;
2851
2852 if (!flag_devirtualize)
2853 return;
2854
2855 if (TREE_CODE (obj) != SSA_NAME)
2856 return;
2857
2858 class ipa_node_params *info = fbi->info;
2859 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2860 {
2861 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2862 return;
2863
2864 anc_offset = 0;
2865 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2866 gcc_assert (index >= 0);
2867 if (detect_type_change_ssa (fbi, arg: obj, comp_type: obj_type_ref_class (ref: target),
2868 call))
2869 return;
2870 }
2871 else
2872 {
2873 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2874 tree expr;
2875
2876 expr = get_ancestor_addr_info (assign: stmt, obj_p: &obj, offset: &anc_offset);
2877 if (!expr)
2878 return;
2879 index = ipa_get_param_decl_index (info,
2880 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2881 gcc_assert (index >= 0);
2882 if (detect_type_change (fbi, arg: obj, base: expr, comp_type: obj_type_ref_class (ref: target),
2883 call, offset: anc_offset))
2884 return;
2885 }
2886
2887 struct cgraph_edge *cs = ipa_note_param_call (node: fbi->node, param_index: index,
2888 stmt: call, polymorphic: true);
2889 class cgraph_indirect_call_info *ii = cs->indirect_info;
2890 ii->offset = anc_offset;
2891 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2892 ii->otr_type = obj_type_ref_class (ref: target);
2893 ii->polymorphic = 1;
2894}
2895
2896/* Analyze a call statement CALL whether and how it utilizes formal parameters
2897 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2898 containing intermediate information about each formal parameter. */
2899
2900static void
2901ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2902{
2903 tree target = gimple_call_fn (gs: call);
2904
2905 if (!target
2906 || (TREE_CODE (target) != SSA_NAME
2907 && !virtual_method_call_p (target)))
2908 return;
2909
2910 struct cgraph_edge *cs = fbi->node->get_edge (call_stmt: call);
2911 /* If we previously turned the call into a direct call, there is
2912 no need to analyze. */
2913 if (cs && !cs->indirect_unknown_callee)
2914 return;
2915
2916 if (cs->indirect_info->polymorphic && flag_devirtualize)
2917 {
2918 tree instance;
2919 tree target = gimple_call_fn (gs: call);
2920 ipa_polymorphic_call_context context (current_function_decl,
2921 target, call, &instance);
2922
2923 gcc_checking_assert (cs->indirect_info->otr_type
2924 == obj_type_ref_class (target));
2925 gcc_checking_assert (cs->indirect_info->otr_token
2926 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2927
2928 cs->indirect_info->vptr_changed
2929 = !context.get_dynamic_type (instance,
2930 OBJ_TYPE_REF_OBJECT (target),
2931 obj_type_ref_class (ref: target), call,
2932 &fbi->aa_walk_budget);
2933 cs->indirect_info->context = context;
2934 }
2935
2936 if (TREE_CODE (target) == SSA_NAME)
2937 ipa_analyze_indirect_call_uses (fbi, call, target);
2938 else if (virtual_method_call_p (target))
2939 ipa_analyze_virtual_call_uses (fbi, call, target);
2940}
2941
2942
2943/* Analyze the call statement STMT with respect to formal parameters (described
2944 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2945 formal parameters are called. */
2946
2947static void
2948ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2949{
2950 if (is_gimple_call (gs: stmt))
2951 ipa_analyze_call_uses (fbi, call: as_a <gcall *> (p: stmt));
2952}
2953
2954/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2955 If OP is a parameter declaration, mark it as used in the info structure
2956 passed in DATA. */
2957
2958static bool
2959visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2960{
2961 class ipa_node_params *info = (class ipa_node_params *) data;
2962
2963 op = get_base_address (t: op);
2964 if (op
2965 && TREE_CODE (op) == PARM_DECL)
2966 {
2967 int index = ipa_get_param_decl_index (info, ptree: op);
2968 gcc_assert (index >= 0);
2969 ipa_set_param_used (info, i: index, val: true);
2970 }
2971
2972 return false;
2973}
2974
2975/* Scan the statements in BB and inspect the uses of formal parameters. Store
2976 the findings in various structures of the associated ipa_node_params
2977 structure, such as parameter flags, notes etc. FBI holds various data about
2978 the function being analyzed. */
2979
2980static void
2981ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2982{
2983 gimple_stmt_iterator gsi;
2984 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
2985 {
2986 gimple *stmt = gsi_stmt (i: gsi);
2987
2988 if (is_gimple_debug (gs: stmt))
2989 continue;
2990
2991 ipa_analyze_stmt_uses (fbi, stmt);
2992 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2993 visit_ref_for_mod_analysis,
2994 visit_ref_for_mod_analysis,
2995 visit_ref_for_mod_analysis);
2996 }
2997 for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
2998 walk_stmt_load_store_addr_ops (gsi_stmt (i: gsi), fbi->info,
2999 visit_ref_for_mod_analysis,
3000 visit_ref_for_mod_analysis,
3001 visit_ref_for_mod_analysis);
3002}
3003
3004/* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
3005
3006static bool
3007load_from_dereferenced_name (tree expr, tree name)
3008{
3009 tree base = get_base_address (t: expr);
3010 return (TREE_CODE (base) == MEM_REF
3011 && TREE_OPERAND (base, 0) == name);
3012}
3013
3014/* Calculate controlled uses of parameters of NODE. */
3015
3016static void
3017ipa_analyze_controlled_uses (struct cgraph_node *node)
3018{
3019 ipa_node_params *info = ipa_node_params_sum->get (node);
3020
3021 for (int i = 0; i < ipa_get_param_count (info); i++)
3022 {
3023 tree parm = ipa_get_param (info, i);
3024 int call_uses = 0;
3025 bool load_dereferenced = false;
3026
3027 /* For SSA regs see if parameter is used. For non-SSA we compute
3028 the flag during modification analysis. */
3029 if (is_gimple_reg (parm))
3030 {
3031 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
3032 parm);
3033 if (ddef && !has_zero_uses (var: ddef))
3034 {
3035 imm_use_iterator imm_iter;
3036 gimple *stmt;
3037
3038 ipa_set_param_used (info, i, val: true);
3039 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)
3040 {
3041 if (is_gimple_debug (gs: stmt))
3042 continue;
3043
3044 int all_stmt_uses = 0;
3045 use_operand_p use_p;
3046 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3047 all_stmt_uses++;
3048
3049 if (is_gimple_call (gs: stmt))
3050 {
3051 if (gimple_call_internal_p (gs: stmt))
3052 {
3053 call_uses = IPA_UNDESCRIBED_USE;
3054 break;
3055 }
3056 int recognized_stmt_uses;
3057 if (gimple_call_fn (gs: stmt) == ddef)
3058 recognized_stmt_uses = 1;
3059 else
3060 recognized_stmt_uses = 0;
3061 unsigned arg_count = gimple_call_num_args (gs: stmt);
3062 for (unsigned i = 0; i < arg_count; i++)
3063 {
3064 tree arg = gimple_call_arg (gs: stmt, index: i);
3065 if (arg == ddef)
3066 recognized_stmt_uses++;
3067 else if (load_from_dereferenced_name (expr: arg, name: ddef))
3068 {
3069 load_dereferenced = true;
3070 recognized_stmt_uses++;
3071 }
3072 }
3073
3074 if (recognized_stmt_uses != all_stmt_uses)
3075 {
3076 call_uses = IPA_UNDESCRIBED_USE;
3077 break;
3078 }
3079 if (call_uses >= 0)
3080 call_uses += all_stmt_uses;
3081 }
3082 else if (gimple_assign_single_p (gs: stmt))
3083 {
3084 tree rhs = gimple_assign_rhs1 (gs: stmt);
3085 if (all_stmt_uses != 1
3086 || !load_from_dereferenced_name (expr: rhs, name: ddef))
3087 {
3088 call_uses = IPA_UNDESCRIBED_USE;
3089 break;
3090 }
3091 load_dereferenced = true;
3092 }
3093 else
3094 {
3095 call_uses = IPA_UNDESCRIBED_USE;
3096 break;
3097 }
3098 }
3099 }
3100 else
3101 call_uses = 0;
3102 }
3103 else
3104 call_uses = IPA_UNDESCRIBED_USE;
3105 ipa_set_controlled_uses (info, i, val: call_uses);
3106 ipa_set_param_load_dereferenced (info, i, val: load_dereferenced);
3107 }
3108}
3109
3110/* Free stuff in BI. */
3111
3112static void
3113free_ipa_bb_info (struct ipa_bb_info *bi)
3114{
3115 bi->cg_edges.release ();
3116 bi->param_aa_statuses.release ();
3117}
3118
3119/* Dominator walker driving the analysis. */
3120
3121class analysis_dom_walker : public dom_walker
3122{
3123public:
3124 analysis_dom_walker (struct ipa_func_body_info *fbi)
3125 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
3126
3127 edge before_dom_children (basic_block) final override;
3128
3129private:
3130 struct ipa_func_body_info *m_fbi;
3131};
3132
3133edge
3134analysis_dom_walker::before_dom_children (basic_block bb)
3135{
3136 ipa_analyze_params_uses_in_bb (fbi: m_fbi, bb);
3137 ipa_compute_jump_functions_for_bb (fbi: m_fbi, bb);
3138 return NULL;
3139}
3140
3141/* Release body info FBI. */
3142
3143void
3144ipa_release_body_info (struct ipa_func_body_info *fbi)
3145{
3146 int i;
3147 struct ipa_bb_info *bi;
3148
3149 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
3150 free_ipa_bb_info (bi);
3151 fbi->bb_infos.release ();
3152}
3153
3154/* Initialize the array describing properties of formal parameters
3155 of NODE, analyze their uses and compute jump functions associated
3156 with actual arguments of calls from within NODE. */
3157
3158void
3159ipa_analyze_node (struct cgraph_node *node)
3160{
3161 struct ipa_func_body_info fbi;
3162 class ipa_node_params *info;
3163
3164 ipa_check_create_node_params ();
3165 ipa_check_create_edge_args ();
3166 info = ipa_node_params_sum->get_create (node);
3167
3168 if (info->analysis_done)
3169 return;
3170 info->analysis_done = 1;
3171
3172 if (ipa_func_spec_opts_forbid_analysis_p (node)
3173 || (count_formal_params (fndecl: node->decl)
3174 >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS)))
3175 {
3176 gcc_assert (!ipa_get_param_count (info));
3177 return;
3178 }
3179
3180 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3181 push_cfun (new_cfun: func);
3182 calculate_dominance_info (CDI_DOMINATORS);
3183 ipa_initialize_node_params (node);
3184 ipa_analyze_controlled_uses (node);
3185
3186 fbi.node = node;
3187 fbi.info = info;
3188 fbi.bb_infos = vNULL;
3189 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), exact: true);
3190 fbi.param_count = ipa_get_param_count (info);
3191 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3192
3193 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3194 {
3195 ipa_bb_info *bi = ipa_get_bb_info (fbi: &fbi, bb: gimple_bb (g: cs->call_stmt));
3196 bi->cg_edges.safe_push (obj: cs);
3197 }
3198
3199 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3200 {
3201 ipa_bb_info *bi = ipa_get_bb_info (fbi: &fbi, bb: gimple_bb (g: cs->call_stmt));
3202 bi->cg_edges.safe_push (obj: cs);
3203 }
3204
3205 enable_ranger (cfun, use_imm_uses: false);
3206 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3207 disable_ranger (cfun);
3208
3209 ipa_release_body_info (fbi: &fbi);
3210 free_dominance_info (CDI_DOMINATORS);
3211 pop_cfun ();
3212}
3213
3214/* Update the jump functions associated with call graph edge E when the call
3215 graph edge CS is being inlined, assuming that E->caller is already (possibly
3216 indirectly) inlined into CS->callee and that E has not been inlined. */
3217
3218static void
3219update_jump_functions_after_inlining (struct cgraph_edge *cs,
3220 struct cgraph_edge *e)
3221{
3222 ipa_edge_args *top = ipa_edge_args_sum->get (edge: cs);
3223 ipa_edge_args *args = ipa_edge_args_sum->get (edge: e);
3224 if (!args)
3225 return;
3226 int count = ipa_get_cs_argument_count (args);
3227 int i;
3228
3229 for (i = 0; i < count; i++)
3230 {
3231 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3232 class ipa_polymorphic_call_context *dst_ctx
3233 = ipa_get_ith_polymorhic_call_context (args, i);
3234
3235 if (dst->agg.items)
3236 {
3237 struct ipa_agg_jf_item *item;
3238 int j;
3239
3240 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3241 {
3242 int dst_fid;
3243 struct ipa_jump_func *src;
3244
3245 if (item->jftype != IPA_JF_PASS_THROUGH
3246 && item->jftype != IPA_JF_LOAD_AGG)
3247 continue;
3248
3249 dst_fid = item->value.pass_through.formal_id;
3250 if (!top || dst_fid >= ipa_get_cs_argument_count (args: top))
3251 {
3252 item->jftype = IPA_JF_UNKNOWN;
3253 continue;
3254 }
3255
3256 item->value.pass_through.formal_id = -1;
3257 src = ipa_get_ith_jump_func (args: top, i: dst_fid);
3258 if (src->type == IPA_JF_CONST)
3259 {
3260 if (item->jftype == IPA_JF_PASS_THROUGH
3261 && item->value.pass_through.operation == NOP_EXPR)
3262 {
3263 item->jftype = IPA_JF_CONST;
3264 item->value.constant = src->value.constant.value;
3265 continue;
3266 }
3267 }
3268 else if (src->type == IPA_JF_PASS_THROUGH
3269 && src->value.pass_through.operation == NOP_EXPR)
3270 {
3271 if (item->jftype == IPA_JF_PASS_THROUGH
3272 || !item->value.load_agg.by_ref
3273 || src->value.pass_through.agg_preserved)
3274 item->value.pass_through.formal_id
3275 = src->value.pass_through.formal_id;
3276 }
3277 else if (src->type == IPA_JF_ANCESTOR)
3278 {
3279 if (item->jftype == IPA_JF_PASS_THROUGH)
3280 {
3281 if (!src->value.ancestor.offset)
3282 item->value.pass_through.formal_id
3283 = src->value.ancestor.formal_id;
3284 }
3285 else if (src->value.ancestor.agg_preserved)
3286 {
3287 gcc_checking_assert (item->value.load_agg.by_ref);
3288
3289 item->value.pass_through.formal_id
3290 = src->value.ancestor.formal_id;
3291 item->value.load_agg.offset
3292 += src->value.ancestor.offset;
3293 }
3294 }
3295
3296 if (item->value.pass_through.formal_id < 0)
3297 item->jftype = IPA_JF_UNKNOWN;
3298 }
3299 }
3300
3301 if (!top)
3302 {
3303 ipa_set_jf_unknown (jfunc: dst);
3304 continue;
3305 }
3306
3307 if (dst->type == IPA_JF_ANCESTOR)
3308 {
3309 struct ipa_jump_func *src;
3310 int dst_fid = dst->value.ancestor.formal_id;
3311 class ipa_polymorphic_call_context *src_ctx
3312 = ipa_get_ith_polymorhic_call_context (args: top, i: dst_fid);
3313
3314 /* Variable number of arguments can cause havoc if we try to access
3315 one that does not exist in the inlined edge. So make sure we
3316 don't. */
3317 if (dst_fid >= ipa_get_cs_argument_count (args: top))
3318 {
3319 ipa_set_jf_unknown (jfunc: dst);
3320 continue;
3321 }
3322
3323 src = ipa_get_ith_jump_func (args: top, i: dst_fid);
3324
3325 if (src_ctx && !src_ctx->useless_p ())
3326 {
3327 class ipa_polymorphic_call_context ctx = *src_ctx;
3328
3329 /* TODO: Make type preserved safe WRT contexts. */
3330 if (!ipa_get_jf_ancestor_type_preserved (jfunc: dst))
3331 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3332 ctx.offset_by (off: dst->value.ancestor.offset);
3333 if (!ctx.useless_p ())
3334 {
3335 if (!dst_ctx)
3336 {
3337 vec_safe_grow_cleared (v&: args->polymorphic_call_contexts,
3338 len: count, exact: true);
3339 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3340 }
3341
3342 dst_ctx->combine_with (ctx);
3343 }
3344 }
3345
3346 /* Parameter and argument in ancestor jump function must be pointer
3347 type, which means access to aggregate must be by-reference. */
3348 gcc_assert (!src->agg.items || src->agg.by_ref);
3349
3350 if (src->agg.items && dst->value.ancestor.agg_preserved)
3351 {
3352 struct ipa_agg_jf_item *item;
3353 int j;
3354
3355 /* Currently we do not produce clobber aggregate jump functions,
3356 replace with merging when we do. */
3357 gcc_assert (!dst->agg.items);
3358
3359 dst->agg.items = vec_safe_copy (src: src->agg.items);
3360 dst->agg.by_ref = src->agg.by_ref;
3361 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3362 item->offset -= dst->value.ancestor.offset;
3363 }
3364
3365 if (src->type == IPA_JF_PASS_THROUGH
3366 && src->value.pass_through.operation == NOP_EXPR)
3367 {
3368 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3369 dst->value.ancestor.agg_preserved &=
3370 src->value.pass_through.agg_preserved;
3371 }
3372 else if (src->type == IPA_JF_ANCESTOR)
3373 {
3374 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3375 dst->value.ancestor.offset += src->value.ancestor.offset;
3376 dst->value.ancestor.agg_preserved &=
3377 src->value.ancestor.agg_preserved;
3378 dst->value.ancestor.keep_null |= src->value.ancestor.keep_null;
3379 }
3380 else
3381 ipa_set_jf_unknown (jfunc: dst);
3382 }
3383 else if (dst->type == IPA_JF_PASS_THROUGH)
3384 {
3385 struct ipa_jump_func *src;
3386 /* We must check range due to calls with variable number of arguments
3387 and we cannot combine jump functions with operations. */
3388 if (dst->value.pass_through.operation == NOP_EXPR
3389 && (top && dst->value.pass_through.formal_id
3390 < ipa_get_cs_argument_count (args: top)))
3391 {
3392 int dst_fid = dst->value.pass_through.formal_id;
3393 src = ipa_get_ith_jump_func (args: top, i: dst_fid);
3394 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (jfunc: dst);
3395 class ipa_polymorphic_call_context *src_ctx
3396 = ipa_get_ith_polymorhic_call_context (args: top, i: dst_fid);
3397
3398 if (src_ctx && !src_ctx->useless_p ())
3399 {
3400 class ipa_polymorphic_call_context ctx = *src_ctx;
3401
3402 /* TODO: Make type preserved safe WRT contexts. */
3403 if (!ipa_get_jf_pass_through_type_preserved (jfunc: dst))
3404 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3405 if (!ctx.useless_p ())
3406 {
3407 if (!dst_ctx)
3408 {
3409 vec_safe_grow_cleared (v&: args->polymorphic_call_contexts,
3410 len: count, exact: true);
3411 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3412 }
3413 dst_ctx->combine_with (ctx);
3414 }
3415 }
3416 switch (src->type)
3417 {
3418 case IPA_JF_UNKNOWN:
3419 ipa_set_jf_unknown (jfunc: dst);
3420 break;
3421 case IPA_JF_CONST:
3422 {
3423 bool rd = ipa_get_jf_pass_through_refdesc_decremented (jfunc: dst);
3424 ipa_set_jf_cst_copy (dst, src);
3425 if (rd)
3426 ipa_zap_jf_refdesc (jfunc: dst);
3427 }
3428
3429 break;
3430
3431 case IPA_JF_PASS_THROUGH:
3432 {
3433 int formal_id = ipa_get_jf_pass_through_formal_id (jfunc: src);
3434 enum tree_code operation;
3435 operation = ipa_get_jf_pass_through_operation (jfunc: src);
3436
3437 if (operation == NOP_EXPR)
3438 {
3439 bool agg_p;
3440 agg_p = dst_agg_p
3441 && ipa_get_jf_pass_through_agg_preserved (jfunc: src);
3442 ipa_set_jf_simple_pass_through (jfunc: dst, formal_id, agg_preserved: agg_p);
3443 }
3444 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3445 ipa_set_jf_unary_pass_through (jfunc: dst, formal_id, operation);
3446 else
3447 {
3448 tree operand = ipa_get_jf_pass_through_operand (jfunc: src);
3449 ipa_set_jf_arith_pass_through (jfunc: dst, formal_id, operand,
3450 operation);
3451 }
3452 break;
3453 }
3454 case IPA_JF_ANCESTOR:
3455 {
3456 bool agg_p;
3457 agg_p = dst_agg_p
3458 && ipa_get_jf_ancestor_agg_preserved (jfunc: src);
3459 ipa_set_ancestor_jf (jfunc: dst,
3460 offset: ipa_get_jf_ancestor_offset (jfunc: src),
3461 formal_id: ipa_get_jf_ancestor_formal_id (jfunc: src),
3462 agg_preserved: agg_p,
3463 keep_null: ipa_get_jf_ancestor_keep_null (jfunc: src));
3464 break;
3465 }
3466 default:
3467 gcc_unreachable ();
3468 }
3469
3470 if (src->agg.items
3471 && (dst_agg_p || !src->agg.by_ref))
3472 {
3473 /* Currently we do not produce clobber aggregate jump
3474 functions, replace with merging when we do. */
3475 gcc_assert (!dst->agg.items);
3476
3477 dst->agg.by_ref = src->agg.by_ref;
3478 dst->agg.items = vec_safe_copy (src: src->agg.items);
3479 }
3480 }
3481 else
3482 ipa_set_jf_unknown (jfunc: dst);
3483 }
3484 }
3485}
3486
3487/* If TARGET is an addr_expr of a function declaration, make it the
3488 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3489 Otherwise, return NULL. */
3490
3491struct cgraph_edge *
3492ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3493 bool speculative)
3494{
3495 struct cgraph_node *callee;
3496 bool unreachable = false;
3497
3498 if (TREE_CODE (target) == ADDR_EXPR)
3499 target = TREE_OPERAND (target, 0);
3500 if (TREE_CODE (target) != FUNCTION_DECL)
3501 {
3502 target = canonicalize_constructor_val (target, NULL);
3503 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3504 {
3505 /* Member pointer call that goes through a VMT lookup. */
3506 if (ie->indirect_info->member_ptr
3507 /* Or if target is not an invariant expression and we do not
3508 know if it will evaulate to function at runtime.
3509 This can happen when folding through &VAR, where &VAR
3510 is IP invariant, but VAR itself is not.
3511
3512 TODO: Revisit this when GCC 5 is branched. It seems that
3513 member_ptr check is not needed and that we may try to fold
3514 the expression and see if VAR is readonly. */
3515 || !is_gimple_ip_invariant (target))
3516 {
3517 if (dump_enabled_p ())
3518 {
3519 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3520 "discovered direct call non-invariant %s\n",
3521 ie->caller->dump_name ());
3522 }
3523 return NULL;
3524 }
3525
3526
3527 if (dump_enabled_p ())
3528 {
3529 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3530 "discovered direct call to non-function in %s, "
3531 "making it __builtin_unreachable\n",
3532 ie->caller->dump_name ());
3533 }
3534
3535 target = builtin_decl_unreachable ();
3536 callee = cgraph_node::get_create (target);
3537 unreachable = true;
3538 }
3539 else
3540 callee = cgraph_node::get (decl: target);
3541 }
3542 else
3543 callee = cgraph_node::get (decl: target);
3544
3545 /* Because may-edges are not explicitely represented and vtable may be external,
3546 we may create the first reference to the object in the unit. */
3547 if (!callee || callee->inlined_to)
3548 {
3549
3550 /* We are better to ensure we can refer to it.
3551 In the case of static functions we are out of luck, since we already
3552 removed its body. In the case of public functions we may or may
3553 not introduce the reference. */
3554 if (!canonicalize_constructor_val (target, NULL)
3555 || !TREE_PUBLIC (target))
3556 {
3557 if (dump_file)
3558 fprintf (stream: dump_file, format: "ipa-prop: Discovered call to a known target "
3559 "(%s -> %s) but cannot refer to it. Giving up.\n",
3560 ie->caller->dump_name (),
3561 ie->callee->dump_name ());
3562 return NULL;
3563 }
3564 callee = cgraph_node::get_create (target);
3565 }
3566
3567 /* If the edge is already speculated. */
3568 if (speculative && ie->speculative)
3569 {
3570 if (dump_file)
3571 {
3572 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3573 if (!e2)
3574 {
3575 if (dump_file)
3576 fprintf (stream: dump_file, format: "ipa-prop: Discovered call to a "
3577 "speculative target (%s -> %s) but the call is "
3578 "already speculated to different target. "
3579 "Giving up.\n",
3580 ie->caller->dump_name (), callee->dump_name ());
3581 }
3582 else
3583 {
3584 if (dump_file)
3585 fprintf (stream: dump_file,
3586 format: "ipa-prop: Discovered call to a speculative target "
3587 "(%s -> %s) this agree with previous speculation.\n",
3588 ie->caller->dump_name (), callee->dump_name ());
3589 }
3590 }
3591 return NULL;
3592 }
3593
3594 if (!dbg_cnt (index: devirt))
3595 return NULL;
3596
3597 ipa_check_create_node_params ();
3598
3599 /* We cannot make edges to inline clones. It is bug that someone removed
3600 the cgraph node too early. */
3601 gcc_assert (!callee->inlined_to);
3602
3603 if (dump_file && !unreachable)
3604 {
3605 fprintf (stream: dump_file, format: "ipa-prop: Discovered %s call to a %s target "
3606 "(%s -> %s), for stmt ",
3607 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3608 speculative ? "speculative" : "known",
3609 ie->caller->dump_name (),
3610 callee->dump_name ());
3611 if (ie->call_stmt)
3612 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3613 else
3614 fprintf (stream: dump_file, format: "with uid %i\n", ie->lto_stmt_uid);
3615 }
3616 if (dump_enabled_p ())
3617 {
3618 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3619 "converting indirect call in %s to direct call to %s\n",
3620 ie->caller->dump_name (), callee->dump_name ());
3621 }
3622 if (!speculative)
3623 {
3624 struct cgraph_edge *orig = ie;
3625 ie = cgraph_edge::make_direct (edge: ie, callee);
3626 /* If we resolved speculative edge the cost is already up to date
3627 for direct call (adjusted by inline_edge_duplication_hook). */
3628 if (ie == orig)
3629 {
3630 ipa_call_summary *es = ipa_call_summaries->get (edge: ie);
3631 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3632 - eni_size_weights.call_cost);
3633 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3634 - eni_time_weights.call_cost);
3635 }
3636 }
3637 else
3638 {
3639 if (!callee->can_be_discarded_p ())
3640 {
3641 cgraph_node *alias;
3642 alias = dyn_cast<cgraph_node *> (p: callee->noninterposable_alias ());
3643 if (alias)
3644 callee = alias;
3645 }
3646 /* make_speculative will update ie's cost to direct call cost. */
3647 ie = ie->make_speculative
3648 (n2: callee, direct_count: ie->count.apply_scale (num: 8, den: 10));
3649 }
3650
3651 return ie;
3652}
3653
3654/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3655 CONSTRUCTOR and return it. Return NULL if the search fails for some
3656 reason. */
3657
3658static tree
3659find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3660{
3661 tree type = TREE_TYPE (constructor);
3662 if (TREE_CODE (type) != ARRAY_TYPE
3663 && TREE_CODE (type) != RECORD_TYPE)
3664 return NULL;
3665
3666 unsigned ix;
3667 tree index, val;
3668 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3669 {
3670 HOST_WIDE_INT elt_offset;
3671 if (TREE_CODE (type) == ARRAY_TYPE)
3672 {
3673 offset_int off;
3674 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3675 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3676
3677 if (index)
3678 {
3679 if (TREE_CODE (index) == RANGE_EXPR)
3680 off = wi::to_offset (TREE_OPERAND (index, 0));
3681 else
3682 off = wi::to_offset (t: index);
3683 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3684 {
3685 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3686 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3687 off = wi::sext (x: off - wi::to_offset (t: low_bound),
3688 TYPE_PRECISION (TREE_TYPE (index)));
3689 }
3690 off *= wi::to_offset (t: unit_size);
3691 /* ??? Handle more than just the first index of a
3692 RANGE_EXPR. */
3693 }
3694 else
3695 off = wi::to_offset (t: unit_size) * ix;
3696
3697 off = wi::lshift (x: off, LOG2_BITS_PER_UNIT);
3698 if (!wi::fits_shwi_p (x: off) || wi::neg_p (x: off))
3699 continue;
3700 elt_offset = off.to_shwi ();
3701 }
3702 else if (TREE_CODE (type) == RECORD_TYPE)
3703 {
3704 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3705 if (DECL_BIT_FIELD (index))
3706 continue;
3707 elt_offset = int_bit_position (field: index);
3708 }
3709 else
3710 gcc_unreachable ();
3711
3712 if (elt_offset > req_offset)
3713 return NULL;
3714
3715 if (TREE_CODE (val) == CONSTRUCTOR)
3716 return find_constructor_constant_at_offset (constructor: val,
3717 req_offset: req_offset - elt_offset);
3718
3719 if (elt_offset == req_offset
3720 && is_gimple_reg_type (TREE_TYPE (val))
3721 && is_gimple_ip_invariant (val))
3722 return val;
3723 }
3724 return NULL;
3725}
3726
3727/* Check whether SCALAR could be used to look up an aggregate interprocedural
3728 invariant from a static constructor and if so, return it. Otherwise return
3729 NULL. */
3730
3731tree
3732ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3733{
3734 if (by_ref)
3735 {
3736 if (TREE_CODE (scalar) != ADDR_EXPR)
3737 return NULL;
3738 scalar = TREE_OPERAND (scalar, 0);
3739 }
3740
3741 if (!VAR_P (scalar)
3742 || !is_global_var (t: scalar)
3743 || !TREE_READONLY (scalar)
3744 || !DECL_INITIAL (scalar)
3745 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3746 return NULL;
3747
3748 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), req_offset: offset);
3749}
3750
3751/* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
3752 is none. BY_REF specifies whether the value has to be passed by reference
3753 or by value. */
3754
3755static tree
3756ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function *agg_jfunc,
3757 ipa_node_params *src_info,
3758 cgraph_node *src_node,
3759 HOST_WIDE_INT offset, bool by_ref)
3760{
3761 if (by_ref != agg_jfunc->by_ref)
3762 return NULL_TREE;
3763
3764 for (const ipa_agg_jf_item &item : agg_jfunc->items)
3765 if (item.offset == offset)
3766 return ipa_agg_value_from_jfunc (info: src_info, node: src_node, item: &item);
3767
3768 return NULL_TREE;
3769}
3770
3771/* Remove a reference to SYMBOL from the list of references of a node given by
3772 reference description RDESC. Return true if the reference has been
3773 successfully found and removed. */
3774
3775static bool
3776remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3777{
3778 struct ipa_ref *to_del;
3779 struct cgraph_edge *origin;
3780
3781 origin = rdesc->cs;
3782 if (!origin)
3783 return false;
3784 to_del = origin->caller->find_reference (referred_node: symbol, stmt: origin->call_stmt,
3785 lto_stmt_uid: origin->lto_stmt_uid, use_type: IPA_REF_ADDR);
3786 if (!to_del)
3787 return false;
3788
3789 to_del->remove_reference ();
3790 if (dump_file)
3791 fprintf (stream: dump_file, format: "ipa-prop: Removed a reference from %s to %s.\n",
3792 origin->caller->dump_name (), symbol->dump_name ());
3793 return true;
3794}
3795
3796/* If JFUNC has a reference description with refcount different from
3797 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3798 NULL. JFUNC must be a constant jump function. */
3799
3800static struct ipa_cst_ref_desc *
3801jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3802{
3803 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3804 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3805 return rdesc;
3806 else
3807 return NULL;
3808}
3809
3810/* If the value of constant jump function JFUNC is an address of a function
3811 declaration, return the associated call graph node. Otherwise return
3812 NULL. */
3813
3814static symtab_node *
3815symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
3816{
3817 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3818 tree cst = ipa_get_jf_constant (jfunc);
3819 if (TREE_CODE (cst) != ADDR_EXPR
3820 || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL
3821 && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL))
3822 return NULL;
3823
3824 return symtab_node::get (TREE_OPERAND (cst, 0));
3825}
3826
3827
3828/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3829 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3830 the edge specified in the rdesc. Return false if either the symbol or the
3831 reference could not be found, otherwise return true. */
3832
3833static bool
3834try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3835{
3836 struct ipa_cst_ref_desc *rdesc;
3837 if (jfunc->type == IPA_JF_CONST
3838 && (rdesc = jfunc_rdesc_usable (jfunc))
3839 && --rdesc->refcount == 0)
3840 {
3841 symtab_node *symbol = symtab_node_for_jfunc (jfunc);
3842 if (!symbol)
3843 return false;
3844
3845 return remove_described_reference (symbol, rdesc);
3846 }
3847 return true;
3848}
3849
3850/* Try to find a destination for indirect edge IE that corresponds to a simple
3851 call or a call of a member function pointer and where the destination is a
3852 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3853 the type of the parameter to which the result of JFUNC is passed. If it can
3854 be determined, return the newly direct edge, otherwise return NULL.
3855 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3856 relative to. */
3857
3858static struct cgraph_edge *
3859try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3860 struct ipa_jump_func *jfunc, tree target_type,
3861 struct cgraph_node *new_root,
3862 class ipa_node_params *new_root_info)
3863{
3864 struct cgraph_edge *cs;
3865 tree target = NULL_TREE;
3866 bool agg_contents = ie->indirect_info->agg_contents;
3867 tree scalar = ipa_value_from_jfunc (info: new_root_info, jfunc, type: target_type);
3868 if (agg_contents)
3869 {
3870 if (scalar)
3871 target = ipa_find_agg_cst_from_init (scalar, offset: ie->indirect_info->offset,
3872 by_ref: ie->indirect_info->by_ref);
3873 if (!target && ie->indirect_info->guaranteed_unmodified)
3874 target = ipa_find_agg_cst_from_jfunc_items (agg_jfunc: &jfunc->agg, src_info: new_root_info,
3875 src_node: new_root,
3876 offset: ie->indirect_info->offset,
3877 by_ref: ie->indirect_info->by_ref);
3878 }
3879 else
3880 target = scalar;
3881 if (!target)
3882 return NULL;
3883 cs = ipa_make_edge_direct_to_target (ie, target);
3884
3885 if (cs && !agg_contents)
3886 {
3887 bool ok;
3888 gcc_checking_assert (cs->callee
3889 && (cs != ie
3890 || jfunc->type != IPA_JF_CONST
3891 || !symtab_node_for_jfunc (jfunc)
3892 || cs->callee == symtab_node_for_jfunc (jfunc)));
3893 ok = try_decrement_rdesc_refcount (jfunc);
3894 gcc_checking_assert (ok);
3895 }
3896
3897 return cs;
3898}
3899
3900/* Return the target to be used in cases of impossible devirtualization. IE
3901 and target (the latter can be NULL) are dumped when dumping is enabled. */
3902
3903tree
3904ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3905{
3906 if (dump_file)
3907 {
3908 if (target)
3909 fprintf (stream: dump_file,
3910 format: "Type inconsistent devirtualization: %s->%s\n",
3911 ie->caller->dump_name (),
3912 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3913 else
3914 fprintf (stream: dump_file,
3915 format: "No devirtualization target in %s\n",
3916 ie->caller->dump_name ());
3917 }
3918 tree new_target = builtin_decl_unreachable ();
3919 cgraph_node::get_create (new_target);
3920 return new_target;
3921}
3922
3923/* Try to find a destination for indirect edge IE that corresponds to a virtual
3924 call based on a formal parameter which is described by jump function JFUNC
3925 and if it can be determined, make it direct and return the direct edge.
3926 Otherwise, return NULL. CTX describes the polymorphic context that the
3927 parameter the call is based on brings along with it. NEW_ROOT and
3928 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3929 to. */
3930
3931static struct cgraph_edge *
3932try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3933 struct ipa_jump_func *jfunc,
3934 class ipa_polymorphic_call_context ctx,
3935 struct cgraph_node *new_root,
3936 class ipa_node_params *new_root_info)
3937{
3938 tree target = NULL;
3939 bool speculative = false;
3940
3941 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3942 return NULL;
3943
3944 gcc_assert (!ie->indirect_info->by_ref);
3945
3946 /* Try to do lookup via known virtual table pointer value. */
3947 if (!ie->indirect_info->vptr_changed
3948 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3949 {
3950 tree vtable;
3951 unsigned HOST_WIDE_INT offset;
3952 tree t = NULL_TREE;
3953 if (jfunc->type == IPA_JF_CONST)
3954 t = ipa_find_agg_cst_from_init (scalar: ipa_get_jf_constant (jfunc),
3955 offset: ie->indirect_info->offset, by_ref: true);
3956 if (!t)
3957 t = ipa_find_agg_cst_from_jfunc_items (agg_jfunc: &jfunc->agg, src_info: new_root_info,
3958 src_node: new_root,
3959 offset: ie->indirect_info->offset, by_ref: true);
3960 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3961 {
3962 bool can_refer;
3963 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3964 vtable, offset, can_refer: &can_refer);
3965 if (can_refer)
3966 {
3967 if (!t
3968 || fndecl_built_in_p (node: t, name1: BUILT_IN_UNREACHABLE,
3969 names: BUILT_IN_UNREACHABLE_TRAP)
3970 || !possible_polymorphic_call_target_p
3971 (e: ie, n: cgraph_node::get (decl: t)))
3972 {
3973 /* Do not speculate builtin_unreachable, it is stupid! */
3974 if (!ie->indirect_info->vptr_changed)
3975 target = ipa_impossible_devirt_target (ie, target);
3976 else
3977 target = NULL;
3978 }
3979 else
3980 {
3981 target = t;
3982 speculative = ie->indirect_info->vptr_changed;
3983 }
3984 }
3985 }
3986 }
3987
3988 ipa_polymorphic_call_context ie_context (ie);
3989 vec <cgraph_node *>targets;
3990 bool final;
3991
3992 ctx.offset_by (off: ie->indirect_info->offset);
3993 if (ie->indirect_info->vptr_changed)
3994 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3995 otr_type: ie->indirect_info->otr_type);
3996 ctx.combine_with (ie_context, otr_type: ie->indirect_info->otr_type);
3997 targets = possible_polymorphic_call_targets
3998 (ie->indirect_info->otr_type,
3999 ie->indirect_info->otr_token,
4000 ctx, copletep: &final);
4001 if (final && targets.length () <= 1)
4002 {
4003 speculative = false;
4004 if (targets.length () == 1)
4005 target = targets[0]->decl;
4006 else
4007 target = ipa_impossible_devirt_target (ie, NULL_TREE);
4008 }
4009 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
4010 && !ie->speculative && ie->maybe_hot_p ())
4011 {
4012 cgraph_node *n;
4013 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
4014 ie->indirect_info->otr_token,
4015 ie->indirect_info->context);
4016 if (n)
4017 {
4018 target = n->decl;
4019 speculative = true;
4020 }
4021 }
4022
4023 if (target)
4024 {
4025 if (!possible_polymorphic_call_target_p
4026 (e: ie, n: cgraph_node::get_create (target)))
4027 {
4028 if (speculative)
4029 return NULL;
4030 target = ipa_impossible_devirt_target (ie, target);
4031 }
4032 return ipa_make_edge_direct_to_target (ie, target, speculative);
4033 }
4034 else
4035 return NULL;
4036}
4037
4038/* Update the param called notes associated with NODE when CS is being inlined,
4039 assuming NODE is (potentially indirectly) inlined into CS->callee.
4040 Moreover, if the callee is discovered to be constant, create a new cgraph
4041 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
4042 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
4043
4044static bool
4045update_indirect_edges_after_inlining (struct cgraph_edge *cs,
4046 struct cgraph_node *node,
4047 vec<cgraph_edge *> *new_edges)
4048{
4049 class ipa_edge_args *top;
4050 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
4051 struct cgraph_node *new_root;
4052 class ipa_node_params *new_root_info, *inlined_node_info;
4053 bool res = false;
4054
4055 ipa_check_create_edge_args ();
4056 top = ipa_edge_args_sum->get (edge: cs);
4057 new_root = cs->caller->inlined_to
4058 ? cs->caller->inlined_to : cs->caller;
4059 new_root_info = ipa_node_params_sum->get (node: new_root);
4060 inlined_node_info = ipa_node_params_sum->get (node: cs->callee->function_symbol ());
4061
4062 for (ie = node->indirect_calls; ie; ie = next_ie)
4063 {
4064 class cgraph_indirect_call_info *ici = ie->indirect_info;
4065 struct ipa_jump_func *jfunc;
4066 int param_index;
4067
4068 next_ie = ie->next_callee;
4069
4070 if (ici->param_index == -1)
4071 continue;
4072
4073 /* We must check range due to calls with variable number of arguments: */
4074 if (!top || ici->param_index >= ipa_get_cs_argument_count (args: top))
4075 {
4076 ici->param_index = -1;
4077 continue;
4078 }
4079
4080 param_index = ici->param_index;
4081 jfunc = ipa_get_ith_jump_func (args: top, i: param_index);
4082
4083 auto_vec<cgraph_node *, 4> spec_targets;
4084 if (ie->speculative)
4085 for (cgraph_edge *direct = ie->first_speculative_call_target ();
4086 direct;
4087 direct = direct->next_speculative_call_target ())
4088 spec_targets.safe_push (obj: direct->callee);
4089
4090 if (!opt_for_fn (node->decl, flag_indirect_inlining))
4091 new_direct_edge = NULL;
4092 else if (ici->polymorphic)
4093 {
4094 ipa_polymorphic_call_context ctx;
4095 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
4096 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
4097 new_root,
4098 new_root_info);
4099 }
4100 else
4101 {
4102 tree target_type = ipa_get_type (info: inlined_node_info, i: param_index);
4103 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
4104 target_type,
4105 new_root,
4106 new_root_info);
4107 }
4108
4109 /* If speculation was removed, then we need to do nothing. */
4110 if (new_direct_edge && new_direct_edge != ie
4111 && spec_targets.contains (search: new_direct_edge->callee))
4112 {
4113 new_direct_edge->indirect_inlining_edge = 1;
4114 res = true;
4115 if (!new_direct_edge->speculative)
4116 continue;
4117 }
4118 else if (new_direct_edge)
4119 {
4120 new_direct_edge->indirect_inlining_edge = 1;
4121 if (new_edges)
4122 {
4123 new_edges->safe_push (obj: new_direct_edge);
4124 res = true;
4125 }
4126 /* If speculative edge was introduced we still need to update
4127 call info of the indirect edge. */
4128 if (!new_direct_edge->speculative)
4129 continue;
4130 }
4131 if (jfunc->type == IPA_JF_PASS_THROUGH
4132 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4133 {
4134 if (ici->agg_contents
4135 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
4136 && !ici->polymorphic)
4137 ici->param_index = -1;
4138 else
4139 {
4140 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4141 if (ici->polymorphic
4142 && !ipa_get_jf_pass_through_type_preserved (jfunc))
4143 ici->vptr_changed = true;
4144 ipa_set_param_used_by_indirect_call (info: new_root_info,
4145 i: ici->param_index, val: true);
4146 if (ici->polymorphic)
4147 ipa_set_param_used_by_polymorphic_call (info: new_root_info,
4148 i: ici->param_index, val: true);
4149 }
4150 }
4151 else if (jfunc->type == IPA_JF_ANCESTOR)
4152 {
4153 if (ici->agg_contents
4154 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
4155 && !ici->polymorphic)
4156 ici->param_index = -1;
4157 else
4158 {
4159 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4160 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
4161 if (ici->polymorphic
4162 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4163 ici->vptr_changed = true;
4164 ipa_set_param_used_by_indirect_call (info: new_root_info,
4165 i: ici->param_index, val: true);
4166 if (ici->polymorphic)
4167 ipa_set_param_used_by_polymorphic_call (info: new_root_info,
4168 i: ici->param_index, val: true);
4169 }
4170 }
4171 else
4172 /* Either we can find a destination for this edge now or never. */
4173 ici->param_index = -1;
4174 }
4175
4176 return res;
4177}
4178
4179/* Recursively traverse subtree of NODE (including node) made of inlined
4180 cgraph_edges when CS has been inlined and invoke
4181 update_indirect_edges_after_inlining on all nodes and
4182 update_jump_functions_after_inlining on all non-inlined edges that lead out
4183 of this subtree. Newly discovered indirect edges will be added to
4184 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4185 created. */
4186
4187static bool
4188propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4189 struct cgraph_node *node,
4190 vec<cgraph_edge *> *new_edges)
4191{
4192 struct cgraph_edge *e;
4193 bool res;
4194
4195 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4196
4197 for (e = node->callees; e; e = e->next_callee)
4198 if (!e->inline_failed)
4199 res |= propagate_info_to_inlined_callees (cs, node: e->callee, new_edges);
4200 else
4201 update_jump_functions_after_inlining (cs, e);
4202 for (e = node->indirect_calls; e; e = e->next_callee)
4203 update_jump_functions_after_inlining (cs, e);
4204
4205 return res;
4206}
4207
4208/* Combine two controlled uses counts as done during inlining. */
4209
4210static int
4211combine_controlled_uses_counters (int c, int d)
4212{
4213 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4214 return IPA_UNDESCRIBED_USE;
4215 else
4216 return c + d - 1;
4217}
4218
4219/* Propagate number of controlled users from CS->caleee to the new root of the
4220 tree of inlined nodes. */
4221
4222static void
4223propagate_controlled_uses (struct cgraph_edge *cs)
4224{
4225 ipa_edge_args *args = ipa_edge_args_sum->get (edge: cs);
4226 if (!args)
4227 return;
4228 struct cgraph_node *new_root = cs->caller->inlined_to
4229 ? cs->caller->inlined_to : cs->caller;
4230 ipa_node_params *new_root_info = ipa_node_params_sum->get (node: new_root);
4231 ipa_node_params *old_root_info = ipa_node_params_sum->get (node: cs->callee);
4232 int count, i;
4233
4234 if (!old_root_info)
4235 return;
4236
4237 count = MIN (ipa_get_cs_argument_count (args),
4238 ipa_get_param_count (old_root_info));
4239 for (i = 0; i < count; i++)
4240 {
4241 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4242 struct ipa_cst_ref_desc *rdesc;
4243
4244 if (jf->type == IPA_JF_PASS_THROUGH
4245 && !ipa_get_jf_pass_through_refdesc_decremented (jfunc: jf))
4246 {
4247 int src_idx, c, d;
4248 src_idx = ipa_get_jf_pass_through_formal_id (jfunc: jf);
4249 c = ipa_get_controlled_uses (info: new_root_info, i: src_idx);
4250 d = ipa_get_controlled_uses (info: old_root_info, i);
4251
4252 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4253 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4254 c = combine_controlled_uses_counters (c, d);
4255 ipa_set_controlled_uses (info: new_root_info, i: src_idx, val: c);
4256 bool lderef = true;
4257 if (c != IPA_UNDESCRIBED_USE)
4258 {
4259 lderef = (ipa_get_param_load_dereferenced (info: new_root_info, i: src_idx)
4260 || ipa_get_param_load_dereferenced (info: old_root_info, i));
4261 ipa_set_param_load_dereferenced (info: new_root_info, i: src_idx, val: lderef);
4262 }
4263
4264 if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4265 {
4266 struct cgraph_node *n;
4267 struct ipa_ref *ref;
4268 tree t = new_root_info->known_csts[src_idx];
4269
4270 if (t && TREE_CODE (t) == ADDR_EXPR
4271 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4272 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4273 && (ref = new_root->find_reference (referred_node: n, NULL, lto_stmt_uid: 0,
4274 use_type: IPA_REF_ADDR)))
4275 {
4276 if (dump_file)
4277 fprintf (stream: dump_file, format: "ipa-prop: Removing cloning-created "
4278 "reference from %s to %s.\n",
4279 new_root->dump_name (),
4280 n->dump_name ());
4281 ref->remove_reference ();
4282 }
4283 }
4284 }
4285 else if (jf->type == IPA_JF_CONST
4286 && (rdesc = jfunc_rdesc_usable (jfunc: jf)))
4287 {
4288 int d = ipa_get_controlled_uses (info: old_root_info, i);
4289 int c = rdesc->refcount;
4290 tree cst = ipa_get_jf_constant (jfunc: jf);
4291 rdesc->refcount = combine_controlled_uses_counters (c, d);
4292 if (rdesc->refcount != IPA_UNDESCRIBED_USE
4293 && ipa_get_param_load_dereferenced (info: old_root_info, i)
4294 && TREE_CODE (cst) == ADDR_EXPR
4295 && VAR_P (TREE_OPERAND (cst, 0)))
4296 {
4297 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4298 new_root->create_reference (referred_node: n, use_type: IPA_REF_LOAD, NULL);
4299 if (dump_file)
4300 fprintf (stream: dump_file, format: "ipa-prop: Address IPA constant will reach "
4301 "a load so adding LOAD reference from %s to %s.\n",
4302 new_root->dump_name (), n->dump_name ());
4303 }
4304 if (rdesc->refcount == 0)
4305 {
4306 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4307 && ((TREE_CODE (TREE_OPERAND (cst, 0))
4308 == FUNCTION_DECL)
4309 || VAR_P (TREE_OPERAND (cst, 0))));
4310
4311 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4312 if (n)
4313 {
4314 remove_described_reference (symbol: n, rdesc);
4315 cgraph_node *clone = cs->caller;
4316 while (clone->inlined_to
4317 && clone->ipcp_clone
4318 && clone != rdesc->cs->caller)
4319 {
4320 struct ipa_ref *ref;
4321 ref = clone->find_reference (referred_node: n, NULL, lto_stmt_uid: 0, use_type: IPA_REF_ADDR);
4322 if (ref)
4323 {
4324 if (dump_file)
4325 fprintf (stream: dump_file, format: "ipa-prop: Removing "
4326 "cloning-created reference "
4327 "from %s to %s.\n",
4328 clone->dump_name (),
4329 n->dump_name ());
4330 ref->remove_reference ();
4331 }
4332 clone = clone->callers->caller;
4333 }
4334 }
4335 }
4336 }
4337 }
4338
4339 for (i = ipa_get_param_count (info: old_root_info);
4340 i < ipa_get_cs_argument_count (args);
4341 i++)
4342 {
4343 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4344
4345 if (jf->type == IPA_JF_CONST)
4346 {
4347 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jfunc: jf);
4348 if (rdesc)
4349 rdesc->refcount = IPA_UNDESCRIBED_USE;
4350 }
4351 else if (jf->type == IPA_JF_PASS_THROUGH)
4352 ipa_set_controlled_uses (info: new_root_info,
4353 i: jf->value.pass_through.formal_id,
4354 IPA_UNDESCRIBED_USE);
4355 }
4356}
4357
4358/* Update jump functions and call note functions on inlining the call site CS.
4359 CS is expected to lead to a node already cloned by
4360 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4361 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4362 created. */
4363
4364bool
4365ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4366 vec<cgraph_edge *> *new_edges)
4367{
4368 bool changed;
4369 /* Do nothing if the preparation phase has not been carried out yet
4370 (i.e. during early inlining). */
4371 if (!ipa_node_params_sum)
4372 return false;
4373 gcc_assert (ipa_edge_args_sum);
4374
4375 propagate_controlled_uses (cs);
4376 changed = propagate_info_to_inlined_callees (cs, node: cs->callee, new_edges);
4377 ipa_node_params_sum->remove (node: cs->callee);
4378
4379 ipa_edge_args *args = ipa_edge_args_sum->get (edge: cs);
4380 if (args)
4381 {
4382 bool ok = true;
4383 if (args->jump_functions)
4384 {
4385 struct ipa_jump_func *jf;
4386 int i;
4387 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4388 if (jf->type == IPA_JF_CONST
4389 && ipa_get_jf_constant_rdesc (jfunc: jf))
4390 {
4391 ok = false;
4392 break;
4393 }
4394 }
4395 if (ok)
4396 ipa_edge_args_sum->remove (edge: cs);
4397 }
4398 if (ipcp_transformation_sum)
4399 ipcp_transformation_sum->remove (node: cs->callee);
4400
4401 return changed;
4402}
4403
4404/* Ensure that array of edge arguments infos is big enough to accommodate a
4405 structure for all edges and reallocates it if not. Also, allocate
4406 associated hash tables is they do not already exist. */
4407
4408void
4409ipa_check_create_edge_args (void)
4410{
4411 if (!ipa_edge_args_sum)
4412 ipa_edge_args_sum
4413 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4414 ipa_edge_args_sum_t (symtab, true));
4415 if (!ipa_vr_hash_table)
4416 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (n: 37);
4417}
4418
4419/* Free all ipa_edge structures. */
4420
4421void
4422ipa_free_all_edge_args (void)
4423{
4424 if (!ipa_edge_args_sum)
4425 return;
4426
4427 ggc_delete (ptr: ipa_edge_args_sum);
4428 ipa_edge_args_sum = NULL;
4429}
4430
4431/* Free all ipa_node_params structures. */
4432
4433void
4434ipa_free_all_node_params (void)
4435{
4436 if (ipa_node_params_sum)
4437 ggc_delete (ptr: ipa_node_params_sum);
4438 ipa_node_params_sum = NULL;
4439}
4440
4441/* Initialize IPA CP transformation summary and also allocate any necessary hash
4442 tables if they do not already exist. */
4443
4444void
4445ipcp_transformation_initialize (void)
4446{
4447 if (!ipa_vr_hash_table)
4448 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (n: 37);
4449 if (ipcp_transformation_sum == NULL)
4450 {
4451 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4452 ipcp_transformation_sum->disable_insertion_hook ();
4453 }
4454}
4455
4456/* Release the IPA CP transformation summary. */
4457
4458void
4459ipcp_free_transformation_sum (void)
4460{
4461 if (!ipcp_transformation_sum)
4462 return;
4463
4464 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4465 ggc_free (ipcp_transformation_sum);
4466 ipcp_transformation_sum = NULL;
4467}
4468
4469/* Set the aggregate replacements of NODE to be AGGVALS. */
4470
4471void
4472ipa_set_node_agg_value_chain (struct cgraph_node *node,
4473 vec<ipa_argagg_value, va_gc> *aggs)
4474{
4475 ipcp_transformation_initialize ();
4476 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4477 s->m_agg_values = aggs;
4478}
4479
4480/* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4481 count data structures accordingly. */
4482
4483void
4484ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4485{
4486 if (args->jump_functions)
4487 {
4488 struct ipa_jump_func *jf;
4489 int i;
4490 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4491 {
4492 struct ipa_cst_ref_desc *rdesc;
4493 try_decrement_rdesc_refcount (jfunc: jf);
4494 if (jf->type == IPA_JF_CONST
4495 && (rdesc = ipa_get_jf_constant_rdesc (jfunc: jf))
4496 && rdesc->cs == cs)
4497 rdesc->cs = NULL;
4498 }
4499 }
4500}
4501
4502/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4503 reference count data strucutres accordingly. */
4504
4505void
4506ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4507 ipa_edge_args *old_args, ipa_edge_args *new_args)
4508{
4509 unsigned int i;
4510
4511 new_args->jump_functions = vec_safe_copy (src: old_args->jump_functions);
4512 if (old_args->polymorphic_call_contexts)
4513 new_args->polymorphic_call_contexts
4514 = vec_safe_copy (src: old_args->polymorphic_call_contexts);
4515
4516 for (i = 0; i < vec_safe_length (v: old_args->jump_functions); i++)
4517 {
4518 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (args: old_args, i);
4519 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (args: new_args, i);
4520
4521 dst_jf->agg.items = vec_safe_copy (src: dst_jf->agg.items);
4522
4523 if (src_jf->type == IPA_JF_CONST)
4524 {
4525 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (jfunc: src_jf);
4526
4527 if (!src_rdesc)
4528 dst_jf->value.constant.rdesc = NULL;
4529 else if (src->caller == dst->caller)
4530 {
4531 /* Creation of a speculative edge. If the source edge is the one
4532 grabbing a reference, we must create a new (duplicate)
4533 reference description. Otherwise they refer to the same
4534 description corresponding to a reference taken in a function
4535 src->caller is inlined to. In that case we just must
4536 increment the refcount. */
4537 if (src_rdesc->cs == src)
4538 {
4539 symtab_node *n = symtab_node_for_jfunc (jfunc: src_jf);
4540 gcc_checking_assert (n);
4541 ipa_ref *ref
4542 = src->caller->find_reference (referred_node: n, stmt: src->call_stmt,
4543 lto_stmt_uid: src->lto_stmt_uid,
4544 use_type: IPA_REF_ADDR);
4545 gcc_checking_assert (ref);
4546 dst->caller->clone_reference (ref, stmt: ref->stmt);
4547
4548 ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4549 dst_rdesc->cs = dst;
4550 dst_rdesc->refcount = src_rdesc->refcount;
4551 dst_rdesc->next_duplicate = NULL;
4552 dst_jf->value.constant.rdesc = dst_rdesc;
4553 }
4554 else
4555 {
4556 src_rdesc->refcount++;
4557 dst_jf->value.constant.rdesc = src_rdesc;
4558 }
4559 }
4560 else if (src_rdesc->cs == src)
4561 {
4562 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4563 dst_rdesc->cs = dst;
4564 dst_rdesc->refcount = src_rdesc->refcount;
4565 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4566 src_rdesc->next_duplicate = dst_rdesc;
4567 dst_jf->value.constant.rdesc = dst_rdesc;
4568 }
4569 else
4570 {
4571 struct ipa_cst_ref_desc *dst_rdesc;
4572 /* This can happen during inlining, when a JFUNC can refer to a
4573 reference taken in a function up in the tree of inline clones.
4574 We need to find the duplicate that refers to our tree of
4575 inline clones. */
4576
4577 gcc_assert (dst->caller->inlined_to);
4578 for (dst_rdesc = src_rdesc->next_duplicate;
4579 dst_rdesc;
4580 dst_rdesc = dst_rdesc->next_duplicate)
4581 {
4582 struct cgraph_node *top;
4583 top = dst_rdesc->cs->caller->inlined_to
4584 ? dst_rdesc->cs->caller->inlined_to
4585 : dst_rdesc->cs->caller;
4586 if (dst->caller->inlined_to == top)
4587 break;
4588 }
4589 gcc_assert (dst_rdesc);
4590 dst_jf->value.constant.rdesc = dst_rdesc;
4591 }
4592 }
4593 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4594 && src->caller == dst->caller)
4595 {
4596 struct cgraph_node *inline_root = dst->caller->inlined_to
4597 ? dst->caller->inlined_to : dst->caller;
4598 ipa_node_params *root_info = ipa_node_params_sum->get (node: inline_root);
4599 int idx = ipa_get_jf_pass_through_formal_id (jfunc: dst_jf);
4600
4601 int c = ipa_get_controlled_uses (info: root_info, i: idx);
4602 if (c != IPA_UNDESCRIBED_USE)
4603 {
4604 c++;
4605 ipa_set_controlled_uses (info: root_info, i: idx, val: c);
4606 }
4607 }
4608 }
4609}
4610
4611/* Analyze newly added function into callgraph. */
4612
4613static void
4614ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4615{
4616 if (node->has_gimple_body_p ())
4617 ipa_analyze_node (node);
4618}
4619
4620/* Hook that is called by summary when a node is duplicated. */
4621
4622void
4623ipa_node_params_t::duplicate(cgraph_node *, cgraph_node *,
4624 ipa_node_params *old_info,
4625 ipa_node_params *new_info)
4626{
4627 new_info->descriptors = vec_safe_copy (src: old_info->descriptors);
4628 gcc_assert (new_info->lattices.is_empty ());
4629 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4630 new_info->known_csts = old_info->known_csts.copy ();
4631 new_info->known_contexts = old_info->known_contexts.copy ();
4632
4633 new_info->analysis_done = old_info->analysis_done;
4634 new_info->node_enqueued = old_info->node_enqueued;
4635 new_info->versionable = old_info->versionable;
4636}
4637
4638/* Duplication of ipcp transformation summaries. */
4639
4640void
4641ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4642 ipcp_transformation *src_trans,
4643 ipcp_transformation *dst_trans)
4644{
4645 /* Avoid redundant work of duplicating vectors we will never use. */
4646 if (dst->inlined_to)
4647 return;
4648 dst_trans->m_agg_values = vec_safe_copy (src: src_trans->m_agg_values);
4649 dst_trans->m_vr = vec_safe_copy (src: src_trans->m_vr);
4650}
4651
4652/* Register our cgraph hooks if they are not already there. */
4653
4654void
4655ipa_register_cgraph_hooks (void)
4656{
4657 ipa_check_create_node_params ();
4658 ipa_check_create_edge_args ();
4659
4660 function_insertion_hook_holder =
4661 symtab->add_cgraph_insertion_hook (hook: &ipa_add_new_function, NULL);
4662}
4663
4664/* Unregister our cgraph hooks if they are not already there. */
4665
4666static void
4667ipa_unregister_cgraph_hooks (void)
4668{
4669 if (function_insertion_hook_holder)
4670 symtab->remove_cgraph_insertion_hook (entry: function_insertion_hook_holder);
4671 function_insertion_hook_holder = NULL;
4672}
4673
4674/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4675 longer needed after ipa-cp. */
4676
4677void
4678ipa_free_all_structures_after_ipa_cp (void)
4679{
4680 if (!optimize && !in_lto_p)
4681 {
4682 ipa_free_all_edge_args ();
4683 ipa_free_all_node_params ();
4684 ipcp_sources_pool.release ();
4685 ipcp_cst_values_pool.release ();
4686 ipcp_poly_ctx_values_pool.release ();
4687 ipcp_agg_lattice_pool.release ();
4688 ipa_unregister_cgraph_hooks ();
4689 ipa_refdesc_pool.release ();
4690 }
4691}
4692
4693/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4694 longer needed after indirect inlining. */
4695
4696void
4697ipa_free_all_structures_after_iinln (void)
4698{
4699 ipa_free_all_edge_args ();
4700 ipa_free_all_node_params ();
4701 ipa_unregister_cgraph_hooks ();
4702 ipcp_sources_pool.release ();
4703 ipcp_cst_values_pool.release ();
4704 ipcp_poly_ctx_values_pool.release ();
4705 ipcp_agg_lattice_pool.release ();
4706 ipa_refdesc_pool.release ();
4707}
4708
4709/* Print ipa_tree_map data structures of all functions in the
4710 callgraph to F. */
4711
4712void
4713ipa_print_node_params (FILE *f, struct cgraph_node *node)
4714{
4715 int i, count;
4716 class ipa_node_params *info;
4717
4718 if (!node->definition)
4719 return;
4720 info = ipa_node_params_sum->get (node);
4721 fprintf (stream: f, format: " function %s parameter descriptors:\n", node->dump_name ());
4722 if (!info)
4723 {
4724 fprintf (stream: f, format: " no params return\n");
4725 return;
4726 }
4727 count = ipa_get_param_count (info);
4728 for (i = 0; i < count; i++)
4729 {
4730 int c;
4731
4732 fprintf (stream: f, format: " ");
4733 ipa_dump_param (file: f, info, i);
4734 if (ipa_is_param_used (info, i))
4735 fprintf (stream: f, format: " used");
4736 if (ipa_is_param_used_by_ipa_predicates (info, i))
4737 fprintf (stream: f, format: " used_by_ipa_predicates");
4738 if (ipa_is_param_used_by_indirect_call (info, i))
4739 fprintf (stream: f, format: " used_by_indirect_call");
4740 if (ipa_is_param_used_by_polymorphic_call (info, i))
4741 fprintf (stream: f, format: " used_by_polymorphic_call");
4742 c = ipa_get_controlled_uses (info, i);
4743 if (c == IPA_UNDESCRIBED_USE)
4744 fprintf (stream: f, format: " undescribed_use");
4745 else
4746 fprintf (stream: f, format: " controlled_uses=%i %s", c,
4747 ipa_get_param_load_dereferenced (info, i)
4748 ? "(load_dereferenced)" : "");
4749 fprintf (stream: f, format: "\n");
4750 }
4751}
4752
4753/* Print ipa_tree_map data structures of all functions in the
4754 callgraph to F. */
4755
4756void
4757ipa_print_all_params (FILE * f)
4758{
4759 struct cgraph_node *node;
4760
4761 fprintf (stream: f, format: "\nFunction parameters:\n");
4762 FOR_EACH_FUNCTION (node)
4763 ipa_print_node_params (f, node);
4764}
4765
4766/* Stream out jump function JUMP_FUNC to OB. */
4767
4768static void
4769ipa_write_jump_function (struct output_block *ob,
4770 struct ipa_jump_func *jump_func)
4771{
4772 struct ipa_agg_jf_item *item;
4773 struct bitpack_d bp;
4774 int i, count;
4775 int flag = 0;
4776
4777 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4778 as well as WPA memory by handling them specially. */
4779 if (jump_func->type == IPA_JF_CONST
4780 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4781 flag = 1;
4782
4783 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4784 switch (jump_func->type)
4785 {
4786 case IPA_JF_UNKNOWN:
4787 break;
4788 case IPA_JF_CONST:
4789 gcc_assert (
4790 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4791 stream_write_tree (ob,
4792 flag
4793 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4794 : jump_func->value.constant.value, true);
4795 break;
4796 case IPA_JF_PASS_THROUGH:
4797 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4798 if (jump_func->value.pass_through.operation == NOP_EXPR)
4799 {
4800 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4801 bp = bitpack_create (s: ob->main_stream);
4802 bp_pack_value (bp: &bp, val: jump_func->value.pass_through.agg_preserved, nbits: 1);
4803 gcc_assert (!jump_func->value.pass_through.refdesc_decremented);
4804 streamer_write_bitpack (bp: &bp);
4805 }
4806 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4807 == tcc_unary)
4808 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4809 else
4810 {
4811 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4812 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4813 }
4814 break;
4815 case IPA_JF_ANCESTOR:
4816 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4817 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4818 bp = bitpack_create (s: ob->main_stream);
4819 bp_pack_value (bp: &bp, val: jump_func->value.ancestor.agg_preserved, nbits: 1);
4820 bp_pack_value (bp: &bp, val: jump_func->value.ancestor.keep_null, nbits: 1);
4821 streamer_write_bitpack (bp: &bp);
4822 break;
4823 default:
4824 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4825 }
4826
4827 count = vec_safe_length (v: jump_func->agg.items);
4828 streamer_write_uhwi (ob, count);
4829 if (count)
4830 {
4831 bp = bitpack_create (s: ob->main_stream);
4832 bp_pack_value (bp: &bp, val: jump_func->agg.by_ref, nbits: 1);
4833 streamer_write_bitpack (bp: &bp);
4834 }
4835
4836 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4837 {
4838 stream_write_tree (ob, item->type, true);
4839 streamer_write_uhwi (ob, item->offset);
4840 streamer_write_uhwi (ob, item->jftype);
4841 switch (item->jftype)
4842 {
4843 case IPA_JF_UNKNOWN:
4844 break;
4845 case IPA_JF_CONST:
4846 stream_write_tree (ob, item->value.constant, true);
4847 break;
4848 case IPA_JF_PASS_THROUGH:
4849 case IPA_JF_LOAD_AGG:
4850 streamer_write_uhwi (ob, item->value.pass_through.operation);
4851 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4852 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4853 != tcc_unary)
4854 stream_write_tree (ob, item->value.pass_through.operand, true);
4855 if (item->jftype == IPA_JF_LOAD_AGG)
4856 {
4857 stream_write_tree (ob, item->value.load_agg.type, true);
4858 streamer_write_uhwi (ob, item->value.load_agg.offset);
4859 bp = bitpack_create (s: ob->main_stream);
4860 bp_pack_value (bp: &bp, val: item->value.load_agg.by_ref, nbits: 1);
4861 streamer_write_bitpack (bp: &bp);
4862 }
4863 break;
4864 default:
4865 fatal_error (UNKNOWN_LOCATION,
4866 "invalid jump function in LTO stream");
4867 }
4868 }
4869
4870 bp = bitpack_create (s: ob->main_stream);
4871 if (jump_func->m_vr)
4872 jump_func->m_vr->streamer_write (ob);
4873 else
4874 {
4875 bp_pack_value (bp: &bp, val: false, nbits: 1);
4876 streamer_write_bitpack (bp: &bp);
4877 }
4878}
4879
4880/* Read in jump function JUMP_FUNC from IB. */
4881
4882static void
4883ipa_read_jump_function (class lto_input_block *ib,
4884 struct ipa_jump_func *jump_func,
4885 struct cgraph_edge *cs,
4886 class data_in *data_in,
4887 bool prevails)
4888{
4889 enum jump_func_type jftype;
4890 enum tree_code operation;
4891 int i, count;
4892 int val = streamer_read_uhwi (ib);
4893 bool flag = val & 1;
4894
4895 jftype = (enum jump_func_type) (val / 2);
4896 switch (jftype)
4897 {
4898 case IPA_JF_UNKNOWN:
4899 ipa_set_jf_unknown (jfunc: jump_func);
4900 break;
4901 case IPA_JF_CONST:
4902 {
4903 tree t = stream_read_tree (ib, data_in);
4904 if (flag && prevails)
4905 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4906 ipa_set_jf_constant (jfunc: jump_func, constant: t, cs);
4907 }
4908 break;
4909 case IPA_JF_PASS_THROUGH:
4910 operation = (enum tree_code) streamer_read_uhwi (ib);
4911 if (operation == NOP_EXPR)
4912 {
4913 int formal_id = streamer_read_uhwi (ib);
4914 struct bitpack_d bp = streamer_read_bitpack (ib);
4915 bool agg_preserved = bp_unpack_value (bp: &bp, nbits: 1);
4916 ipa_set_jf_simple_pass_through (jfunc: jump_func, formal_id, agg_preserved);
4917 }
4918 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4919 {
4920 int formal_id = streamer_read_uhwi (ib);
4921 ipa_set_jf_unary_pass_through (jfunc: jump_func, formal_id, operation);
4922 }
4923 else
4924 {
4925 tree operand = stream_read_tree (ib, data_in);
4926 int formal_id = streamer_read_uhwi (ib);
4927 ipa_set_jf_arith_pass_through (jfunc: jump_func, formal_id, operand,
4928 operation);
4929 }
4930 break;
4931 case IPA_JF_ANCESTOR:
4932 {
4933 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4934 int formal_id = streamer_read_uhwi (ib);
4935 struct bitpack_d bp = streamer_read_bitpack (ib);
4936 bool agg_preserved = bp_unpack_value (bp: &bp, nbits: 1);
4937 bool keep_null = bp_unpack_value (bp: &bp, nbits: 1);
4938 ipa_set_ancestor_jf (jfunc: jump_func, offset, formal_id, agg_preserved,
4939 keep_null);
4940 break;
4941 }
4942 default:
4943 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4944 }
4945
4946 count = streamer_read_uhwi (ib);
4947 if (prevails)
4948 {
4949 jump_func->agg.items = NULL;
4950 vec_safe_reserve (v&: jump_func->agg.items, nelems: count, exact: true);
4951 }
4952 if (count)
4953 {
4954 struct bitpack_d bp = streamer_read_bitpack (ib);
4955 jump_func->agg.by_ref = bp_unpack_value (bp: &bp, nbits: 1);
4956 }
4957 for (i = 0; i < count; i++)
4958 {
4959 struct ipa_agg_jf_item item;
4960 item.type = stream_read_tree (ib, data_in);
4961 item.offset = streamer_read_uhwi (ib);
4962 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4963
4964 switch (item.jftype)
4965 {
4966 case IPA_JF_UNKNOWN:
4967 break;
4968 case IPA_JF_CONST:
4969 item.value.constant = stream_read_tree (ib, data_in);
4970 break;
4971 case IPA_JF_PASS_THROUGH:
4972 case IPA_JF_LOAD_AGG:
4973 operation = (enum tree_code) streamer_read_uhwi (ib);
4974 item.value.pass_through.operation = operation;
4975 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4976 if (TREE_CODE_CLASS (operation) == tcc_unary)
4977 item.value.pass_through.operand = NULL_TREE;
4978 else
4979 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4980 if (item.jftype == IPA_JF_LOAD_AGG)
4981 {
4982 struct bitpack_d bp;
4983 item.value.load_agg.type = stream_read_tree (ib, data_in);
4984 item.value.load_agg.offset = streamer_read_uhwi (ib);
4985 bp = streamer_read_bitpack (ib);
4986 item.value.load_agg.by_ref = bp_unpack_value (bp: &bp, nbits: 1);
4987 }
4988 break;
4989 default:
4990 fatal_error (UNKNOWN_LOCATION,
4991 "invalid jump function in LTO stream");
4992 }
4993 if (prevails)
4994 jump_func->agg.items->quick_push (obj: item);
4995 }
4996
4997 ipa_vr vr;
4998 vr.streamer_read (ib, data_in);
4999 if (vr.known_p ())
5000 {
5001 if (prevails)
5002 ipa_set_jfunc_vr (jf: jump_func, vr);
5003 }
5004 else
5005 jump_func->m_vr = NULL;
5006}
5007
5008/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
5009 relevant to indirect inlining to OB. */
5010
5011static void
5012ipa_write_indirect_edge_info (struct output_block *ob,
5013 struct cgraph_edge *cs)
5014{
5015 class cgraph_indirect_call_info *ii = cs->indirect_info;
5016 struct bitpack_d bp;
5017
5018 streamer_write_hwi (ob, ii->param_index);
5019 bp = bitpack_create (s: ob->main_stream);
5020 bp_pack_value (bp: &bp, val: ii->polymorphic, nbits: 1);
5021 bp_pack_value (bp: &bp, val: ii->agg_contents, nbits: 1);
5022 bp_pack_value (bp: &bp, val: ii->member_ptr, nbits: 1);
5023 bp_pack_value (bp: &bp, val: ii->by_ref, nbits: 1);
5024 bp_pack_value (bp: &bp, val: ii->guaranteed_unmodified, nbits: 1);
5025 bp_pack_value (bp: &bp, val: ii->vptr_changed, nbits: 1);
5026 streamer_write_bitpack (bp: &bp);
5027 if (ii->agg_contents || ii->polymorphic)
5028 streamer_write_hwi (ob, ii->offset);
5029 else
5030 gcc_assert (ii->offset == 0);
5031
5032 if (ii->polymorphic)
5033 {
5034 streamer_write_hwi (ob, ii->otr_token);
5035 stream_write_tree (ob, ii->otr_type, true);
5036 ii->context.stream_out (ob);
5037 }
5038}
5039
5040/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5041 relevant to indirect inlining from IB. */
5042
5043static void
5044ipa_read_indirect_edge_info (class lto_input_block *ib,
5045 class data_in *data_in,
5046 struct cgraph_edge *cs,
5047 class ipa_node_params *info)
5048{
5049 class cgraph_indirect_call_info *ii = cs->indirect_info;
5050 struct bitpack_d bp;
5051
5052 ii->param_index = (int) streamer_read_hwi (ib);
5053 bp = streamer_read_bitpack (ib);
5054 ii->polymorphic = bp_unpack_value (bp: &bp, nbits: 1);
5055 ii->agg_contents = bp_unpack_value (bp: &bp, nbits: 1);
5056 ii->member_ptr = bp_unpack_value (bp: &bp, nbits: 1);
5057 ii->by_ref = bp_unpack_value (bp: &bp, nbits: 1);
5058 ii->guaranteed_unmodified = bp_unpack_value (bp: &bp, nbits: 1);
5059 ii->vptr_changed = bp_unpack_value (bp: &bp, nbits: 1);
5060 if (ii->agg_contents || ii->polymorphic)
5061 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5062 else
5063 ii->offset = 0;
5064 if (ii->polymorphic)
5065 {
5066 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5067 ii->otr_type = stream_read_tree (ib, data_in);
5068 ii->context.stream_in (ib, data_in);
5069 }
5070 if (info && ii->param_index >= 0)
5071 {
5072 if (ii->polymorphic)
5073 ipa_set_param_used_by_polymorphic_call (info,
5074 i: ii->param_index , val: true);
5075 ipa_set_param_used_by_indirect_call (info,
5076 i: ii->param_index, val: true);
5077 }
5078}
5079
5080/* Stream out NODE info to OB. */
5081
5082static void
5083ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5084{
5085 int node_ref;
5086 lto_symtab_encoder_t encoder;
5087 ipa_node_params *info = ipa_node_params_sum->get (node);
5088 int j;
5089 struct cgraph_edge *e;
5090 struct bitpack_d bp;
5091
5092 encoder = ob->decl_state->symtab_node_encoder;
5093 node_ref = lto_symtab_encoder_encode (encoder, node);
5094 streamer_write_uhwi (ob, node_ref);
5095
5096 streamer_write_uhwi (ob, ipa_get_param_count (info));
5097 for (j = 0; j < ipa_get_param_count (info); j++)
5098 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, i: j));
5099 bp = bitpack_create (s: ob->main_stream);
5100 gcc_assert (info->analysis_done
5101 || ipa_get_param_count (info) == 0);
5102 gcc_assert (!info->node_enqueued);
5103 gcc_assert (!info->ipcp_orig_node);
5104 for (j = 0; j < ipa_get_param_count (info); j++)
5105 {
5106 /* TODO: We could just not stream the bit in the undescribed case. */
5107 bool d = (ipa_get_controlled_uses (info, i: j) != IPA_UNDESCRIBED_USE)
5108 ? ipa_get_param_load_dereferenced (info, i: j) : true;
5109 bp_pack_value (bp: &bp, val: d, nbits: 1);
5110 bp_pack_value (bp: &bp, val: ipa_is_param_used (info, i: j), nbits: 1);
5111 }
5112 streamer_write_bitpack (bp: &bp);
5113 for (j = 0; j < ipa_get_param_count (info); j++)
5114 {
5115 streamer_write_hwi (ob, ipa_get_controlled_uses (info, i: j));
5116 stream_write_tree (ob, ipa_get_type (info, j), true);
5117 }
5118 for (e = node->callees; e; e = e->next_callee)
5119 {
5120 ipa_edge_args *args = ipa_edge_args_sum->get (edge: e);
5121
5122 if (!args)
5123 {
5124 streamer_write_uhwi (ob, 0);
5125 continue;
5126 }
5127
5128 streamer_write_uhwi (ob,
5129 ipa_get_cs_argument_count (args) * 2
5130 + (args->polymorphic_call_contexts != NULL));
5131 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5132 {
5133 ipa_write_jump_function (ob, jump_func: ipa_get_ith_jump_func (args, i: j));
5134 if (args->polymorphic_call_contexts != NULL)
5135 ipa_get_ith_polymorhic_call_context (args, i: j)->stream_out (ob);
5136 }
5137 }
5138 for (e = node->indirect_calls; e; e = e->next_callee)
5139 {
5140 ipa_edge_args *args = ipa_edge_args_sum->get (edge: e);
5141 if (!args)
5142 streamer_write_uhwi (ob, 0);
5143 else
5144 {
5145 streamer_write_uhwi (ob,
5146 ipa_get_cs_argument_count (args) * 2
5147 + (args->polymorphic_call_contexts != NULL));
5148 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5149 {
5150 ipa_write_jump_function (ob, jump_func: ipa_get_ith_jump_func (args, i: j));
5151 if (args->polymorphic_call_contexts != NULL)
5152 ipa_get_ith_polymorhic_call_context (args, i: j)->stream_out (ob);
5153 }
5154 }
5155 ipa_write_indirect_edge_info (ob, cs: e);
5156 }
5157}
5158
5159/* Stream in edge E from IB. */
5160
5161static void
5162ipa_read_edge_info (class lto_input_block *ib,
5163 class data_in *data_in,
5164 struct cgraph_edge *e, bool prevails)
5165{
5166 int count = streamer_read_uhwi (ib);
5167 bool contexts_computed = count & 1;
5168
5169 count /= 2;
5170 if (!count)
5171 return;
5172 if (prevails
5173 && (e->possibly_call_in_translation_unit_p ()
5174 /* Also stream in jump functions to builtins in hope that they
5175 will get fnspecs. */
5176 || fndecl_built_in_p (node: e->callee->decl, klass: BUILT_IN_NORMAL)))
5177 {
5178 ipa_edge_args *args = ipa_edge_args_sum->get_create (edge: e);
5179 vec_safe_grow_cleared (v&: args->jump_functions, len: count, exact: true);
5180 if (contexts_computed)
5181 vec_safe_grow_cleared (v&: args->polymorphic_call_contexts, len: count, exact: true);
5182 for (int k = 0; k < count; k++)
5183 {
5184 ipa_read_jump_function (ib, jump_func: ipa_get_ith_jump_func (args, i: k), cs: e,
5185 data_in, prevails);
5186 if (contexts_computed)
5187 ipa_get_ith_polymorhic_call_context (args, i: k)->stream_in
5188 (ib, data_in);
5189 }
5190 }
5191 else
5192 {
5193 for (int k = 0; k < count; k++)
5194 {
5195 struct ipa_jump_func dummy;
5196 ipa_read_jump_function (ib, jump_func: &dummy, cs: e,
5197 data_in, prevails);
5198 if (contexts_computed)
5199 {
5200 class ipa_polymorphic_call_context ctx;
5201 ctx.stream_in (ib, data_in);
5202 }
5203 }
5204 }
5205}
5206
5207/* Stream in NODE info from IB. */
5208
5209static void
5210ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5211 class data_in *data_in)
5212{
5213 int k;
5214 struct cgraph_edge *e;
5215 struct bitpack_d bp;
5216 bool prevails = node->prevailing_p ();
5217 ipa_node_params *info
5218 = prevails ? ipa_node_params_sum->get_create (node) : NULL;
5219
5220 int param_count = streamer_read_uhwi (ib);
5221 if (prevails)
5222 {
5223 ipa_alloc_node_params (node, param_count);
5224 for (k = 0; k < param_count; k++)
5225 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5226 if (ipa_get_param_count (info) != 0)
5227 info->analysis_done = true;
5228 info->node_enqueued = false;
5229 }
5230 else
5231 for (k = 0; k < param_count; k++)
5232 streamer_read_uhwi (ib);
5233
5234 bp = streamer_read_bitpack (ib);
5235 for (k = 0; k < param_count; k++)
5236 {
5237 bool load_dereferenced = bp_unpack_value (bp: &bp, nbits: 1);
5238 bool used = bp_unpack_value (bp: &bp, nbits: 1);
5239
5240 if (prevails)
5241 {
5242 ipa_set_param_load_dereferenced (info, i: k, val: load_dereferenced);
5243 ipa_set_param_used (info, i: k, val: used);
5244 }
5245 }
5246 for (k = 0; k < param_count; k++)
5247 {
5248 int nuses = streamer_read_hwi (ib);
5249 tree type = stream_read_tree (ib, data_in);
5250
5251 if (prevails)
5252 {
5253 ipa_set_controlled_uses (info, i: k, val: nuses);
5254 (*info->descriptors)[k].decl_or_type = type;
5255 }
5256 }
5257 for (e = node->callees; e; e = e->next_callee)
5258 ipa_read_edge_info (ib, data_in, e, prevails);
5259 for (e = node->indirect_calls; e; e = e->next_callee)
5260 {
5261 ipa_read_edge_info (ib, data_in, e, prevails);
5262 ipa_read_indirect_edge_info (ib, data_in, cs: e, info);
5263 }
5264}
5265
5266/* Write jump functions for nodes in SET. */
5267
5268void
5269ipa_prop_write_jump_functions (void)
5270{
5271 struct output_block *ob;
5272 unsigned int count = 0;
5273 lto_symtab_encoder_iterator lsei;
5274 lto_symtab_encoder_t encoder;
5275
5276 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5277 return;
5278
5279 ob = create_output_block (LTO_section_jump_functions);
5280 encoder = ob->decl_state->symtab_node_encoder;
5281 ob->symbol = NULL;
5282 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5283 lsei_next_function_in_partition (lsei: &lsei))
5284 {
5285 cgraph_node *node = lsei_cgraph_node (lsei);
5286 if (node->has_gimple_body_p ()
5287 && ipa_node_params_sum->get (node) != NULL)
5288 count++;
5289 }
5290
5291 streamer_write_uhwi (ob, count);
5292
5293 /* Process all of the functions. */
5294 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5295 lsei_next_function_in_partition (lsei: &lsei))
5296 {
5297 cgraph_node *node = lsei_cgraph_node (lsei);
5298 if (node->has_gimple_body_p ()
5299 && ipa_node_params_sum->get (node) != NULL)
5300 ipa_write_node_info (ob, node);
5301 }
5302 streamer_write_char_stream (obs: ob->main_stream, c: 0);
5303 produce_asm (ob, NULL);
5304 destroy_output_block (ob);
5305}
5306
5307/* Read section in file FILE_DATA of length LEN with data DATA. */
5308
5309static void
5310ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5311 size_t len)
5312{
5313 const struct lto_function_header *header =
5314 (const struct lto_function_header *) data;
5315 const int cfg_offset = sizeof (struct lto_function_header);
5316 const int main_offset = cfg_offset + header->cfg_size;
5317 const int string_offset = main_offset + header->main_size;
5318 class data_in *data_in;
5319 unsigned int i;
5320 unsigned int count;
5321
5322 lto_input_block ib_main ((const char *) data + main_offset,
5323 header->main_size, file_data);
5324
5325 data_in =
5326 lto_data_in_create (file_data, (const char *) data + string_offset,
5327 header->string_size, vNULL);
5328 count = streamer_read_uhwi (&ib_main);
5329
5330 for (i = 0; i < count; i++)
5331 {
5332 unsigned int index;
5333 struct cgraph_node *node;
5334 lto_symtab_encoder_t encoder;
5335
5336 index = streamer_read_uhwi (&ib_main);
5337 encoder = file_data->symtab_node_encoder;
5338 node = dyn_cast<cgraph_node *> (p: lto_symtab_encoder_deref (encoder,
5339 ref: index));
5340 gcc_assert (node->definition);
5341 ipa_read_node_info (ib: &ib_main, node, data_in);
5342 }
5343 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5344 len);
5345 lto_data_in_delete (data_in);
5346}
5347
5348/* Read ipcp jump functions. */
5349
5350void
5351ipa_prop_read_jump_functions (void)
5352{
5353 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5354 struct lto_file_decl_data *file_data;
5355 unsigned int j = 0;
5356
5357 ipa_check_create_node_params ();
5358 ipa_check_create_edge_args ();
5359 ipa_register_cgraph_hooks ();
5360
5361 while ((file_data = file_data_vec[j++]))
5362 {
5363 size_t len;
5364 const char *data
5365 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5366 &len);
5367 if (data)
5368 ipa_prop_read_section (file_data, data, len);
5369 }
5370}
5371
5372/* Return true if the IPA-CP transformation summary TS is non-NULL and contains
5373 useful info. */
5374static bool
5375useful_ipcp_transformation_info_p (ipcp_transformation *ts)
5376{
5377 if (!ts)
5378 return false;
5379 if (!vec_safe_is_empty (v: ts->m_agg_values)
5380 || !vec_safe_is_empty (v: ts->m_vr))
5381 return true;
5382 return false;
5383}
5384
5385/* Write into OB IPA-CP transfromation summary TS describing NODE. */
5386
5387void
5388write_ipcp_transformation_info (output_block *ob, cgraph_node *node,
5389 ipcp_transformation *ts)
5390{
5391 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
5392 int node_ref = lto_symtab_encoder_encode (encoder, node);
5393 streamer_write_uhwi (ob, node_ref);
5394
5395 streamer_write_uhwi (ob, vec_safe_length (v: ts->m_agg_values));
5396 for (const ipa_argagg_value &av : ts->m_agg_values)
5397 {
5398 struct bitpack_d bp;
5399
5400 stream_write_tree (ob, av.value, true);
5401 streamer_write_uhwi (ob, av.unit_offset);
5402 streamer_write_uhwi (ob, av.index);
5403
5404 bp = bitpack_create (s: ob->main_stream);
5405 bp_pack_value (bp: &bp, val: av.by_ref, nbits: 1);
5406 bp_pack_value (bp: &bp, val: av.killed, nbits: 1);
5407 streamer_write_bitpack (bp: &bp);
5408 }
5409
5410 streamer_write_uhwi (ob, vec_safe_length (v: ts->m_vr));
5411 for (const ipa_vr &parm_vr : ts->m_vr)
5412 parm_vr.streamer_write (ob);
5413}
5414
5415/* Stream in the aggregate value replacement chain for NODE from IB. */
5416
5417static void
5418read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5419 data_in *data_in)
5420{
5421 unsigned int count, i;
5422 ipcp_transformation_initialize ();
5423 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5424
5425 count = streamer_read_uhwi (ib);
5426 if (count > 0)
5427 {
5428 vec_safe_grow_cleared (v&: ts->m_agg_values, len: count, exact: true);
5429 for (i = 0; i <count; i++)
5430 {
5431 ipa_argagg_value *av = &(*ts->m_agg_values)[i];;
5432
5433 av->value = stream_read_tree (ib, data_in);
5434 av->unit_offset = streamer_read_uhwi (ib);
5435 av->index = streamer_read_uhwi (ib);
5436
5437 bitpack_d bp = streamer_read_bitpack (ib);
5438 av->by_ref = bp_unpack_value (bp: &bp, nbits: 1);
5439 av->killed = bp_unpack_value (bp: &bp, nbits: 1);
5440 }
5441 }
5442
5443 count = streamer_read_uhwi (ib);
5444 if (count > 0)
5445 {
5446 vec_safe_grow_cleared (v&: ts->m_vr, len: count, exact: true);
5447 for (i = 0; i < count; i++)
5448 {
5449 ipa_vr *parm_vr;
5450 parm_vr = &(*ts->m_vr)[i];
5451 parm_vr->streamer_read (ib, data_in);
5452 }
5453 }
5454}
5455
5456/* Write all aggregate replacement for nodes in set. */
5457
5458void
5459ipcp_write_transformation_summaries (void)
5460{
5461 struct output_block *ob;
5462 unsigned int count = 0;
5463 lto_symtab_encoder_t encoder;
5464
5465 ob = create_output_block (LTO_section_ipcp_transform);
5466 encoder = ob->decl_state->symtab_node_encoder;
5467 ob->symbol = NULL;
5468
5469 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5470 {
5471 symtab_node *snode = lto_symtab_encoder_deref (encoder, ref: i);
5472 cgraph_node *cnode = dyn_cast <cgraph_node *> (p: snode);
5473 if (!cnode)
5474 continue;
5475 ipcp_transformation *ts = ipcp_get_transformation_summary (node: cnode);
5476 if (useful_ipcp_transformation_info_p (ts)
5477 && lto_symtab_encoder_encode_body_p (encoder, cnode))
5478 count++;
5479 }
5480
5481 streamer_write_uhwi (ob, count);
5482
5483 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5484 {
5485 symtab_node *snode = lto_symtab_encoder_deref (encoder, ref: i);
5486 cgraph_node *cnode = dyn_cast <cgraph_node *> (p: snode);
5487 if (!cnode)
5488 continue;
5489 ipcp_transformation *ts = ipcp_get_transformation_summary (node: cnode);
5490 if (useful_ipcp_transformation_info_p (ts)
5491 && lto_symtab_encoder_encode_body_p (encoder, cnode))
5492 write_ipcp_transformation_info (ob, node: cnode, ts);
5493 }
5494 streamer_write_char_stream (obs: ob->main_stream, c: 0);
5495 produce_asm (ob, NULL);
5496 destroy_output_block (ob);
5497}
5498
5499/* Read replacements section in file FILE_DATA of length LEN with data
5500 DATA. */
5501
5502static void
5503read_replacements_section (struct lto_file_decl_data *file_data,
5504 const char *data,
5505 size_t len)
5506{
5507 const struct lto_function_header *header =
5508 (const struct lto_function_header *) data;
5509 const int cfg_offset = sizeof (struct lto_function_header);
5510 const int main_offset = cfg_offset + header->cfg_size;
5511 const int string_offset = main_offset + header->main_size;
5512 class data_in *data_in;
5513 unsigned int i;
5514 unsigned int count;
5515
5516 lto_input_block ib_main ((const char *) data + main_offset,
5517 header->main_size, file_data);
5518
5519 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5520 header->string_size, vNULL);
5521 count = streamer_read_uhwi (&ib_main);
5522
5523 for (i = 0; i < count; i++)
5524 {
5525 unsigned int index;
5526 struct cgraph_node *node;
5527 lto_symtab_encoder_t encoder;
5528
5529 index = streamer_read_uhwi (&ib_main);
5530 encoder = file_data->symtab_node_encoder;
5531 node = dyn_cast<cgraph_node *> (p: lto_symtab_encoder_deref (encoder,
5532 ref: index));
5533 read_ipcp_transformation_info (ib: &ib_main, node, data_in);
5534 }
5535 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5536 len);
5537 lto_data_in_delete (data_in);
5538}
5539
5540/* Read IPA-CP aggregate replacements. */
5541
5542void
5543ipcp_read_transformation_summaries (void)
5544{
5545 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5546 struct lto_file_decl_data *file_data;
5547 unsigned int j = 0;
5548
5549 while ((file_data = file_data_vec[j++]))
5550 {
5551 size_t len;
5552 const char *data
5553 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5554 &len);
5555 if (data)
5556 read_replacements_section (file_data, data, len);
5557 }
5558}
5559
5560/* Adjust the aggregate replacements in TS to reflect any parameter removals
5561 which might have already taken place. If after adjustments there are no
5562 aggregate replacements left, the m_agg_values will be set to NULL. In other
5563 cases, it may be shrunk. */
5564
5565static void
5566adjust_agg_replacement_values (cgraph_node *node, ipcp_transformation *ts)
5567{
5568 clone_info *cinfo = clone_info::get (node);
5569 if (!cinfo || !cinfo->param_adjustments)
5570 return;
5571
5572 auto_vec<int, 16> new_indices;
5573 cinfo->param_adjustments->get_updated_indices (new_indices: &new_indices);
5574 bool removed_item = false;
5575 unsigned dst_index = 0;
5576 unsigned count = ts->m_agg_values->length ();
5577 for (unsigned i = 0; i < count; i++)
5578 {
5579 ipa_argagg_value *v = &(*ts->m_agg_values)[i];
5580 gcc_checking_assert (v->index >= 0);
5581
5582 int new_idx = -1;
5583 if ((unsigned) v->index < new_indices.length ())
5584 new_idx = new_indices[v->index];
5585
5586 if (new_idx >= 0)
5587 {
5588 v->index = new_idx;
5589 if (removed_item)
5590 (*ts->m_agg_values)[dst_index] = *v;
5591 dst_index++;
5592 }
5593 else
5594 removed_item = true;
5595 }
5596
5597 if (dst_index == 0)
5598 {
5599 ggc_free (ts->m_agg_values);
5600 ts->m_agg_values = NULL;
5601 }
5602 else if (removed_item)
5603 ts->m_agg_values->truncate (size: dst_index);
5604
5605 return;
5606}
5607
5608/* Dominator walker driving the ipcp modification phase. */
5609
5610class ipcp_modif_dom_walker : public dom_walker
5611{
5612public:
5613 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5614 vec<ipa_param_descriptor, va_gc> *descs,
5615 ipcp_transformation *ts, bool *sc)
5616 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5617 m_ts (ts), m_something_changed (sc) {}
5618
5619 edge before_dom_children (basic_block) final override;
5620 bool cleanup_eh ()
5621 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
5622
5623private:
5624 struct ipa_func_body_info *m_fbi;
5625 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5626 ipcp_transformation *m_ts;
5627 bool *m_something_changed;
5628 auto_bitmap m_need_eh_cleanup;
5629};
5630
5631edge
5632ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5633{
5634 gimple_stmt_iterator gsi;
5635 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
5636 {
5637 gimple *stmt = gsi_stmt (i: gsi);
5638 tree rhs, val, t;
5639 HOST_WIDE_INT bit_offset;
5640 poly_int64 size;
5641 int index;
5642 bool by_ref, vce;
5643
5644 if (!gimple_assign_load_p (stmt))
5645 continue;
5646 rhs = gimple_assign_rhs1 (gs: stmt);
5647 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5648 continue;
5649
5650 vce = false;
5651 t = rhs;
5652 while (handled_component_p (t))
5653 {
5654 /* V_C_E can do things like convert an array of integers to one
5655 bigger integer and similar things we do not handle below. */
5656 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5657 {
5658 vce = true;
5659 break;
5660 }
5661 t = TREE_OPERAND (t, 0);
5662 }
5663 if (vce)
5664 continue;
5665
5666 if (!ipa_load_from_parm_agg (fbi: m_fbi, descriptors: m_descriptors, stmt, op: rhs, index_p: &index,
5667 offset_p: &bit_offset, size_p: &size, by_ref_p: &by_ref))
5668 continue;
5669 unsigned unit_offset = bit_offset / BITS_PER_UNIT;
5670 ipa_argagg_value_list avl (m_ts);
5671 tree v = avl.get_value (index, unit_offset, by_ref);
5672
5673 if (!v
5674 || maybe_ne (a: tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), b: size))
5675 continue;
5676
5677 gcc_checking_assert (is_gimple_ip_invariant (v));
5678 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v)))
5679 {
5680 if (fold_convertible_p (TREE_TYPE (rhs), v))
5681 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v);
5682 else if (TYPE_SIZE (TREE_TYPE (rhs))
5683 == TYPE_SIZE (TREE_TYPE (v)))
5684 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v);
5685 else
5686 {
5687 if (dump_file)
5688 {
5689 fprintf (stream: dump_file, format: " const ");
5690 print_generic_expr (dump_file, v);
5691 fprintf (stream: dump_file, format: " can't be converted to type of ");
5692 print_generic_expr (dump_file, rhs);
5693 fprintf (stream: dump_file, format: "\n");
5694 }
5695 continue;
5696 }
5697 }
5698 else
5699 val = v;
5700
5701 if (dump_file && (dump_flags & TDF_DETAILS))
5702 {
5703 fprintf (stream: dump_file, format: "Modifying stmt:\n ");
5704 print_gimple_stmt (dump_file, stmt, 0);
5705 }
5706 gimple_assign_set_rhs_from_tree (&gsi, val);
5707 update_stmt (s: stmt);
5708
5709 if (dump_file && (dump_flags & TDF_DETAILS))
5710 {
5711 fprintf (stream: dump_file, format: "into:\n ");
5712 print_gimple_stmt (dump_file, stmt, 0);
5713 fprintf (stream: dump_file, format: "\n");
5714 }
5715
5716 *m_something_changed = true;
5717 if (maybe_clean_eh_stmt (stmt))
5718 bitmap_set_bit (m_need_eh_cleanup, bb->index);
5719 }
5720 return NULL;
5721}
5722
5723/* If IPA-CP discovered a constant in parameter PARM at OFFSET of a given SIZE
5724 - whether passed by reference or not is given by BY_REF - return that
5725 constant. Otherwise return NULL_TREE. The is supposed to be used only
5726 after clone materialization and transformation is done (because it asserts
5727 that killed constants have been pruned). */
5728
5729tree
5730ipcp_get_aggregate_const (struct function *func, tree parm, bool by_ref,
5731 HOST_WIDE_INT bit_offset, HOST_WIDE_INT bit_size)
5732{
5733 cgraph_node *node = cgraph_node::get (decl: func->decl);
5734 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5735
5736 if (!ts || !ts->m_agg_values)
5737 return NULL_TREE;
5738
5739 int index = ts->get_param_index (fndecl: func->decl, param: parm);
5740 if (index < 0)
5741 return NULL_TREE;
5742
5743 ipa_argagg_value_list avl (ts);
5744 unsigned unit_offset = bit_offset / BITS_PER_UNIT;
5745 const ipa_argagg_value *av = avl.get_elt (index, unit_offset);
5746 if (!av || av->by_ref != by_ref)
5747 return NULL_TREE;
5748 gcc_assert (!av->killed);
5749 tree v = av->value;
5750 if (!v
5751 || maybe_ne (a: tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), b: bit_size))
5752 return NULL_TREE;
5753
5754 return v;
5755}
5756
5757/* Return true if we have recorded VALUE and MASK about PARM.
5758 Set VALUE and MASk accordingly. */
5759
5760bool
5761ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5762{
5763 cgraph_node *cnode = cgraph_node::get (decl: current_function_decl);
5764 ipcp_transformation *ts = ipcp_get_transformation_summary (node: cnode);
5765 if (!ts
5766 || vec_safe_length (v: ts->m_vr) == 0
5767 || !irange::supports_p (TREE_TYPE (parm)))
5768 return false;
5769
5770 int i = ts->get_param_index (fndecl: current_function_decl, param: parm);
5771 if (i < 0)
5772 return false;
5773 clone_info *cinfo = clone_info::get (node: cnode);
5774 if (cinfo && cinfo->param_adjustments)
5775 {
5776 i = cinfo->param_adjustments->get_original_index (newidx: i);
5777 if (i < 0)
5778 return false;
5779 }
5780
5781 vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5782 if (!vr[i].known_p ())
5783 return false;
5784 Value_Range tmp;
5785 vr[i].get_vrange (r&: tmp);
5786 if (tmp.undefined_p () || tmp.varying_p ())
5787 return false;
5788 irange &r = as_a <irange> (v&: tmp);
5789 irange_bitmask bm = r.get_bitmask ();
5790 *mask = widest_int::from (x: bm.mask (), TYPE_SIGN (TREE_TYPE (parm)));
5791 *value = wide_int_to_tree (TREE_TYPE (parm), cst: bm.value ());
5792 return true;
5793}
5794
5795/* Update value range of formal parameters of NODE as described in TS. */
5796
5797static void
5798ipcp_update_vr (struct cgraph_node *node, ipcp_transformation *ts)
5799{
5800 if (vec_safe_is_empty (v: ts->m_vr))
5801 return;
5802 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5803 unsigned count = vr.length ();
5804 if (!count)
5805 return;
5806
5807 auto_vec<int, 16> new_indices;
5808 bool need_remapping = false;
5809 clone_info *cinfo = clone_info::get (node);
5810 if (cinfo && cinfo->param_adjustments)
5811 {
5812 cinfo->param_adjustments->get_updated_indices (new_indices: &new_indices);
5813 need_remapping = true;
5814 }
5815 auto_vec <tree, 16> parm_decls;
5816 push_function_arg_decls (args: &parm_decls, fndecl: node->decl);
5817
5818 for (unsigned i = 0; i < count; ++i)
5819 {
5820 tree parm;
5821 int remapped_idx;
5822 if (need_remapping)
5823 {
5824 if (i >= new_indices.length ())
5825 continue;
5826 remapped_idx = new_indices[i];
5827 if (remapped_idx < 0)
5828 continue;
5829 }
5830 else
5831 remapped_idx = i;
5832
5833 parm = parm_decls[remapped_idx];
5834
5835 gcc_checking_assert (parm);
5836 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5837
5838 if (!ddef || !is_gimple_reg (parm))
5839 continue;
5840
5841 if (vr[i].known_p ())
5842 {
5843 Value_Range tmp;
5844 vr[i].get_vrange (r&: tmp);
5845
5846 if (!tmp.undefined_p () && !tmp.varying_p ())
5847 {
5848 if (dump_file)
5849 {
5850 fprintf (stream: dump_file, format: "Setting value range of param %u "
5851 "(now %i) ", i, remapped_idx);
5852 tmp.dump (dump_file);
5853 fprintf (stream: dump_file, format: "]\n");
5854 }
5855 set_range_info (ddef, tmp);
5856
5857 if (POINTER_TYPE_P (TREE_TYPE (parm))
5858 && opt_for_fn (node->decl, flag_ipa_bit_cp))
5859 {
5860 irange &r = as_a<irange> (v&: tmp);
5861 irange_bitmask bm = r.get_bitmask ();
5862 unsigned tem = bm.mask ().to_uhwi ();
5863 unsigned HOST_WIDE_INT bitpos = bm.value ().to_uhwi ();
5864 unsigned align = tem & -tem;
5865 unsigned misalign = bitpos & (align - 1);
5866
5867 if (align > 1)
5868 {
5869 if (dump_file)
5870 {
5871 fprintf (stream: dump_file,
5872 format: "Adjusting mask for param %u to ", i);
5873 print_hex (wi: bm.mask (), file: dump_file);
5874 fprintf (stream: dump_file, format: "\n");
5875 }
5876
5877 if (dump_file)
5878 fprintf (stream: dump_file,
5879 format: "Adjusting align: %u, misalign: %u\n",
5880 align, misalign);
5881
5882 unsigned old_align, old_misalign;
5883 struct ptr_info_def *pi = get_ptr_info (ddef);
5884 bool old_known = get_ptr_info_alignment (pi, &old_align,
5885 &old_misalign);
5886
5887 if (old_known && old_align > align)
5888 {
5889 if (dump_file)
5890 {
5891 fprintf (stream: dump_file,
5892 format: "But alignment was already %u.\n",
5893 old_align);
5894 if ((old_misalign & (align - 1)) != misalign)
5895 fprintf (stream: dump_file,
5896 format: "old_misalign (%u) and misalign "
5897 "(%u) mismatch\n",
5898 old_misalign, misalign);
5899 }
5900 continue;
5901 }
5902
5903 if (dump_file
5904 && old_known
5905 && ((misalign & (old_align - 1)) != old_misalign))
5906 fprintf (stream: dump_file,
5907 format: "old_misalign (%u) and misalign (%u) "
5908 "mismatch\n",
5909 old_misalign, misalign);
5910
5911 set_ptr_info_alignment (pi, align, misalign);
5912 }
5913 }
5914 else if (dump_file && INTEGRAL_TYPE_P (TREE_TYPE (parm)))
5915 {
5916 irange &r = as_a<irange> (v&: tmp);
5917 irange_bitmask bm = r.get_bitmask ();
5918 unsigned prec = TYPE_PRECISION (TREE_TYPE (parm));
5919 if (wi::ne_p (x: bm.mask (), y: wi::shwi (val: -1, precision: prec)))
5920 {
5921 fprintf (stream: dump_file,
5922 format: "Adjusting mask for param %u to ", i);
5923 print_hex (wi: bm.mask (), file: dump_file);
5924 fprintf (stream: dump_file, format: "\n");
5925 }
5926 }
5927 }
5928 }
5929 }
5930}
5931
5932/* IPCP transformation phase doing propagation of aggregate values. */
5933
5934unsigned int
5935ipcp_transform_function (struct cgraph_node *node)
5936{
5937 struct ipa_func_body_info fbi;
5938 int param_count;
5939
5940 gcc_checking_assert (cfun);
5941 gcc_checking_assert (current_function_decl);
5942
5943 if (dump_file)
5944 fprintf (stream: dump_file, format: "Modification phase of node %s\n",
5945 node->dump_name ());
5946
5947 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5948 if (!ts
5949 || (vec_safe_is_empty (v: ts->m_agg_values)
5950 && vec_safe_is_empty (v: ts->m_vr)))
5951 return 0;
5952
5953 ts->maybe_create_parm_idx_map (cfun->decl);
5954 ipcp_update_vr (node, ts);
5955 if (vec_safe_is_empty (v: ts->m_agg_values))
5956 return 0;
5957 param_count = count_formal_params (fndecl: node->decl);
5958 if (param_count == 0)
5959 return 0;
5960
5961 adjust_agg_replacement_values (node, ts);
5962 if (vec_safe_is_empty (v: ts->m_agg_values))
5963 {
5964 if (dump_file)
5965 fprintf (stream: dump_file, format: " All affected aggregate parameters were either "
5966 "removed or converted into scalars, phase done.\n");
5967 return 0;
5968 }
5969 if (dump_file)
5970 {
5971 fprintf (stream: dump_file, format: " Aggregate replacements:");
5972 ipa_argagg_value_list avs (ts);
5973 avs.dump (f: dump_file);
5974 }
5975
5976 fbi.node = node;
5977 fbi.info = NULL;
5978 fbi.bb_infos = vNULL;
5979 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), exact: true);
5980 fbi.param_count = param_count;
5981 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5982
5983 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5984 vec_safe_grow_cleared (v&: descriptors, len: param_count, exact: true);
5985 ipa_populate_param_decls (node, descriptors&: *descriptors);
5986 bool modified_mem_access = false;
5987 calculate_dominance_info (CDI_DOMINATORS);
5988 ipcp_modif_dom_walker walker (&fbi, descriptors, ts, &modified_mem_access);
5989 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5990 free_dominance_info (CDI_DOMINATORS);
5991 bool cfg_changed = walker.cleanup_eh ();
5992
5993 int i;
5994 struct ipa_bb_info *bi;
5995 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5996 free_ipa_bb_info (bi);
5997 fbi.bb_infos.release ();
5998
5999 ts->remove_argaggs_if (predicate: [](const ipa_argagg_value &v)
6000 {
6001 return v.killed;
6002 });
6003
6004 vec_free (v&: descriptors);
6005 if (cfg_changed)
6006 delete_unreachable_blocks_update_callgraph (dst_node: node, update_clones: false);
6007
6008 return modified_mem_access ? TODO_update_ssa_only_virtuals : 0;
6009}
6010
6011/* Record that current function return value range is VAL. */
6012
6013void
6014ipa_record_return_value_range (Value_Range val)
6015{
6016 cgraph_node *n = cgraph_node::get (decl: current_function_decl);
6017 if (!ipa_return_value_sum)
6018 {
6019 if (!ipa_vr_hash_table)
6020 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (n: 37);
6021 ipa_return_value_sum = new (ggc_alloc_no_dtor <ipa_return_value_sum_t> ())
6022 ipa_return_value_sum_t (symtab, true);
6023 ipa_return_value_sum->disable_insertion_hook ();
6024 }
6025 ipa_return_value_sum->get_create (node: n)->vr = ipa_get_value_range (tmp: val);
6026 if (dump_file && (dump_flags & TDF_DETAILS))
6027 {
6028 fprintf (stream: dump_file, format: "Recording return range ");
6029 val.dump (dump_file);
6030 fprintf (stream: dump_file, format: "\n");
6031 }
6032}
6033
6034/* Return true if value range of DECL is known and if so initialize RANGE. */
6035
6036bool
6037ipa_return_value_range (Value_Range &range, tree decl)
6038{
6039 cgraph_node *n = cgraph_node::get (decl);
6040 if (!n || !ipa_return_value_sum)
6041 return false;
6042 enum availability avail;
6043 n = n->ultimate_alias_target (availability: &avail);
6044 if (avail < AVAIL_AVAILABLE)
6045 return false;
6046 if (n->decl != decl && !useless_type_conversion_p (TREE_TYPE (decl), TREE_TYPE (n->decl)))
6047 return false;
6048 ipa_return_value_summary *v = ipa_return_value_sum->get (node: n);
6049 if (!v)
6050 return false;
6051 v->vr->get_vrange (r&: range);
6052 return true;
6053}
6054
6055/* Reset all state within ipa-prop.cc so that we can rerun the compiler
6056 within the same process. For use by toplev::finalize. */
6057
6058void
6059ipa_prop_cc_finalize (void)
6060{
6061 if (function_insertion_hook_holder)
6062 symtab->remove_cgraph_insertion_hook (entry: function_insertion_hook_holder);
6063 function_insertion_hook_holder = NULL;
6064
6065 if (ipa_edge_args_sum)
6066 ggc_delete (ptr: ipa_edge_args_sum);
6067 ipa_edge_args_sum = NULL;
6068
6069 if (ipa_node_params_sum)
6070 ggc_delete (ptr: ipa_node_params_sum);
6071 ipa_node_params_sum = NULL;
6072}
6073
6074/* Return true if the two pass_through components of two jump functions are
6075 known to be equivalent. AGG_JF denotes whether they are part of aggregate
6076 functions or not. The function can be used before the IPA phase of IPA-CP
6077 or inlining because it cannot cope with refdesc changes these passes can
6078 carry out. */
6079
6080static bool
6081ipa_agg_pass_through_jf_equivalent_p (ipa_pass_through_data *ipt1,
6082 ipa_pass_through_data *ipt2,
6083 bool agg_jf)
6084
6085{
6086 gcc_assert (agg_jf ||
6087 (!ipt1->refdesc_decremented && !ipt2->refdesc_decremented));
6088 if (ipt1->operation != ipt2->operation
6089 || ipt1->formal_id != ipt2->formal_id
6090 || (!agg_jf && (ipt1->agg_preserved != ipt2->agg_preserved)))
6091 return false;
6092 if (((ipt1->operand != NULL_TREE) != (ipt2->operand != NULL_TREE))
6093 || (ipt1->operand
6094 && !values_equal_for_ipcp_p (x: ipt1->operand, y: ipt2->operand)))
6095 return false;
6096 return true;
6097}
6098
6099/* Return true if the two aggregate jump functions are known to be equivalent.
6100 The function can be used before the IPA phase of IPA-CP or inlining because
6101 it cannot cope with refdesc changes these passes can carry out. */
6102
6103static bool
6104ipa_agg_jump_functions_equivalent_p (ipa_agg_jf_item *ajf1,
6105 ipa_agg_jf_item *ajf2)
6106{
6107 if (ajf1->offset != ajf2->offset
6108 || ajf1->jftype != ajf2->jftype
6109 || !types_compatible_p (type1: ajf1->type, type2: ajf2->type))
6110 return false;
6111
6112 switch (ajf1->jftype)
6113 {
6114 case IPA_JF_CONST:
6115 if (!values_equal_for_ipcp_p (x: ajf1->value.constant,
6116 y: ajf2->value.constant))
6117 return false;
6118 break;
6119 case IPA_JF_PASS_THROUGH:
6120 {
6121 ipa_pass_through_data *ipt1 = &ajf1->value.pass_through;
6122 ipa_pass_through_data *ipt2 = &ajf2->value.pass_through;
6123 if (!ipa_agg_pass_through_jf_equivalent_p (ipt1, ipt2, agg_jf: true))
6124 return false;
6125 }
6126 break;
6127 case IPA_JF_LOAD_AGG:
6128 {
6129 ipa_load_agg_data *ila1 = &ajf1->value.load_agg;
6130 ipa_load_agg_data *ila2 = &ajf2->value.load_agg;
6131 if (!ipa_agg_pass_through_jf_equivalent_p (ipt1: &ila1->pass_through,
6132 ipt2: &ila2->pass_through, agg_jf: true))
6133 return false;
6134 if (ila1->offset != ila2->offset
6135 || ila1->by_ref != ila2->by_ref
6136 || !types_compatible_p (type1: ila1->type, type2: ila2->type))
6137 return false;
6138 }
6139 break;
6140 default:
6141 gcc_unreachable ();
6142 }
6143 return true;
6144}
6145
6146/* Return true if the two jump functions are known to be equivalent. The
6147 function can be used before the IPA phase of IPA-CP or inlining because it
6148 cannot cope with refdesc changes these passes can carry out. */
6149
6150bool
6151ipa_jump_functions_equivalent_p (ipa_jump_func *jf1, ipa_jump_func *jf2)
6152{
6153 if (jf1->type != jf2->type)
6154 return false;
6155
6156 switch (jf1->type)
6157 {
6158 case IPA_JF_UNKNOWN:
6159 break;
6160 case IPA_JF_CONST:
6161 {
6162 tree cst1 = ipa_get_jf_constant (jfunc: jf1);
6163 tree cst2 = ipa_get_jf_constant (jfunc: jf2);
6164 if (!values_equal_for_ipcp_p (x: cst1, y: cst2))
6165 return false;
6166
6167 ipa_cst_ref_desc *rd1 = jfunc_rdesc_usable (jfunc: jf1);
6168 ipa_cst_ref_desc *rd2 = jfunc_rdesc_usable (jfunc: jf2);
6169 if (rd1 && rd2)
6170 {
6171 gcc_assert (rd1->refcount == 1
6172 && rd2->refcount == 1);
6173 gcc_assert (!rd1->next_duplicate && !rd2->next_duplicate);
6174 }
6175 else if (rd1)
6176 return false;
6177 else if (rd2)
6178 return false;
6179 }
6180 break;
6181 case IPA_JF_PASS_THROUGH:
6182 {
6183 ipa_pass_through_data *ipt1 = &jf1->value.pass_through;
6184 ipa_pass_through_data *ipt2 = &jf2->value.pass_through;
6185 if (!ipa_agg_pass_through_jf_equivalent_p (ipt1, ipt2, agg_jf: false))
6186 return false;
6187 }
6188 break;
6189 case IPA_JF_ANCESTOR:
6190 {
6191 ipa_ancestor_jf_data *ia1 = &jf1->value.ancestor;
6192 ipa_ancestor_jf_data *ia2 = &jf2->value.ancestor;
6193
6194 if (ia1->formal_id != ia2->formal_id
6195 || ia1->agg_preserved != ia2->agg_preserved
6196 || ia1->keep_null != ia2->keep_null
6197 || ia1->offset != ia2->offset)
6198 return false;
6199 }
6200 break;
6201 default:
6202 gcc_unreachable ();
6203 }
6204
6205 if (((jf1->m_vr != nullptr) != (jf2->m_vr != nullptr))
6206 || (jf1->m_vr && !jf1->m_vr->equal_p (o: *jf2->m_vr)))
6207 return false;
6208
6209 unsigned alen = vec_safe_length (v: jf1->agg.items);
6210 if (vec_safe_length (v: jf2->agg.items) != alen)
6211 return false;
6212
6213 if (!alen)
6214 return true;
6215
6216 if (jf1->agg.by_ref != jf2->agg.by_ref)
6217 return false;
6218
6219 for (unsigned i = 0 ; i < alen; i++)
6220 if (!ipa_agg_jump_functions_equivalent_p (ajf1: &(*jf1->agg.items)[i],
6221 ajf2: &(*jf2->agg.items)[i]))
6222 return false;
6223
6224 return true;
6225}
6226
6227#include "gt-ipa-prop.h"
6228

source code of gcc/ipa-prop.cc