1/* Manipulation of formal and actual parameters of functions and function
2 calls.
3 Copyright (C) 2017-2024 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#define INCLUDE_ALGORITHM
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "backend.h"
26#include "tree.h"
27#include "gimple.h"
28#include "ssa.h"
29#include "cgraph.h"
30#include "fold-const.h"
31#include "tree-eh.h"
32#include "stor-layout.h"
33#include "gimplify.h"
34#include "gimple-iterator.h"
35#include "gimplify-me.h"
36#include "tree-cfg.h"
37#include "tree-dfa.h"
38#include "ipa-param-manipulation.h"
39#include "print-tree.h"
40#include "gimple-pretty-print.h"
41#include "builtins.h"
42#include "tree-ssa.h"
43#include "tree-inline.h"
44#include "alloc-pool.h"
45#include "symbol-summary.h"
46#include "symtab-clones.h"
47#include "tree-phinodes.h"
48#include "cfgexpand.h"
49#include "attribs.h"
50#include "sreal.h"
51#include "ipa-cp.h"
52#include "ipa-prop.h"
53
54/* Actual prefixes of different newly synthetized parameters. Keep in sync
55 with IPA_PARAM_PREFIX_* defines. */
56
57static const char *ipa_param_prefixes[IPA_PARAM_PREFIX_COUNT]
58 = {"SYNTH",
59 "ISRA",
60 "simd",
61 "mask"};
62
63/* Names of parameters for dumping. Keep in sync with enum ipa_parm_op. */
64
65static const char *ipa_param_op_names[IPA_PARAM_PREFIX_COUNT]
66 = {"IPA_PARAM_OP_UNDEFINED",
67 "IPA_PARAM_OP_COPY",
68 "IPA_PARAM_OP_NEW",
69 "IPA_PARAM_OP_SPLIT"};
70
71/* Structure to hold declarations representing pass-through IPA-SRA splits. In
72 essence, it tells new index for a combination of original index and
73 offset. */
74
75struct pass_through_split_map
76{
77 /* Original argument index. */
78 unsigned base_index;
79 /* Offset of the split part in the original argument. */
80 unsigned unit_offset;
81 /* Index of the split part in the call statement - where clone
82 materialization put it. */
83 int new_index;
84};
85
86/* Information about some call statements that needs to be conveyed from clone
87 materialization to edge redirection. */
88
89class ipa_edge_modification_info
90{
91 public:
92 ipa_edge_modification_info ()
93 {}
94
95 /* Mapping of original argument indices to where those arguments sit in the
96 call statement now or to a negative index if they were removed. */
97 auto_vec<int> index_map;
98 /* Information about ISRA replacements put into the call statement at the
99 clone materialization stages. */
100 auto_vec<pass_through_split_map> pass_through_map;
101 /* Necessary adjustment to ipa_param_adjustments::m_always_copy_start when
102 redirecting the call. */
103 int always_copy_delta = 0;
104};
105
106/* Class for storing and retrieving summaries about cal statement
107 modifications. */
108
109class ipa_edge_modification_sum
110 : public call_summary <ipa_edge_modification_info *>
111{
112 public:
113 ipa_edge_modification_sum (symbol_table *table)
114 : call_summary<ipa_edge_modification_info *> (table)
115 {
116 }
117
118 /* Hook that is called by summary when an edge is duplicated. */
119
120 void duplicate (cgraph_edge *,
121 cgraph_edge *,
122 ipa_edge_modification_info *old_info,
123 ipa_edge_modification_info *new_info) final override
124 {
125 new_info->index_map.safe_splice (src: old_info->index_map);
126 new_info->pass_through_map.safe_splice (src: old_info->pass_through_map);
127 new_info->always_copy_delta = old_info->always_copy_delta;
128 }
129};
130
131/* Call summary to store information about edges which have had their arguments
132 partially modified already. */
133
134static ipa_edge_modification_sum *ipa_edge_modifications;
135
136/* Fail compilation if CS has any summary associated with it in
137 ipa_edge_modifications. */
138
139DEBUG_FUNCTION void
140ipa_verify_edge_has_no_modifications (cgraph_edge *cs)
141{
142 gcc_assert (!ipa_edge_modifications || !ipa_edge_modifications->get (cs));
143}
144
145/* Fill an empty vector ARGS with PARM_DECLs representing formal parameters of
146 FNDECL. The function should not be called during LTO WPA phase except for
147 thunks (or functions with bodies streamed in). */
148
149void
150push_function_arg_decls (vec<tree> *args, tree fndecl)
151{
152 int count;
153 tree parm;
154
155 /* Safety check that we do not attempt to use the function in WPA, except
156 when the function is a thunk and then we have DECL_ARGUMENTS or when we
157 have already explicitely loaded its body. */
158 gcc_assert (!flag_wpa
159 || DECL_ARGUMENTS (fndecl)
160 || gimple_has_body_p (fndecl));
161 count = 0;
162 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
163 count++;
164
165 args->reserve_exact (nelems: count);
166 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
167 args->quick_push (obj: parm);
168}
169
170/* Fill an empty vector TYPES with trees representing formal parameters of
171 function type FNTYPE. */
172
173void
174push_function_arg_types (vec<tree> *types, tree fntype)
175{
176 int count = 0;
177 tree t;
178
179 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
180 count++;
181
182 types->reserve_exact (nelems: count);
183 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
184 types->quick_push (TREE_VALUE (t));
185}
186
187/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
188 friendly way, assuming they are meant to be applied to FNDECL. */
189
190void
191ipa_dump_adjusted_parameters (FILE *f,
192 vec<ipa_adjusted_param, va_gc> *adj_params)
193{
194 unsigned i, len = vec_safe_length (v: adj_params);
195 bool first = true;
196
197 if (!len)
198 return;
199
200 fprintf (stream: f, format: " IPA adjusted parameters: ");
201 for (i = 0; i < len; i++)
202 {
203 struct ipa_adjusted_param *apm;
204 apm = &(*adj_params)[i];
205
206 if (!first)
207 fprintf (stream: f, format: " ");
208 else
209 first = false;
210
211 fprintf (stream: f, format: "%i. %s %s", i, ipa_param_op_names[apm->op],
212 apm->prev_clone_adjustment ? "prev_clone_adjustment " : "");
213 switch (apm->op)
214 {
215 case IPA_PARAM_OP_UNDEFINED:
216 break;
217
218 case IPA_PARAM_OP_COPY:
219 fprintf (stream: f, format: ", base_index: %u", apm->base_index);
220 fprintf (stream: f, format: ", prev_clone_index: %u", apm->prev_clone_index);
221 break;
222
223 case IPA_PARAM_OP_SPLIT:
224 fprintf (stream: f, format: ", offset: %u", apm->unit_offset);
225 /* fall-through */
226 case IPA_PARAM_OP_NEW:
227 fprintf (stream: f, format: ", base_index: %u", apm->base_index);
228 fprintf (stream: f, format: ", prev_clone_index: %u", apm->prev_clone_index);
229 print_node_brief (f, ", type: ", apm->type, 0);
230 print_node_brief (f, ", alias type: ", apm->alias_ptr_type, 0);
231 fprintf (stream: f, format: " prefix: %s",
232 ipa_param_prefixes[apm->param_prefix_index]);
233 if (apm->reverse)
234 fprintf (stream: f, format: ", reverse");
235 break;
236 }
237 fprintf (stream: f, format: "\n");
238 }
239}
240
241/* Fill NEW_TYPES with types of a function after its current OTYPES have been
242 modified as described in ADJ_PARAMS. When USE_PREV_INDICES is true, use
243 prev_clone_index from ADJ_PARAMS as opposed to base_index when the parameter
244 is false. */
245
246static void
247fill_vector_of_new_param_types (vec<tree> *new_types, vec<tree> *otypes,
248 vec<ipa_adjusted_param, va_gc> *adj_params,
249 bool use_prev_indices)
250{
251 unsigned adj_len = vec_safe_length (v: adj_params);
252 new_types->reserve_exact (nelems: adj_len);
253 for (unsigned i = 0; i < adj_len ; i++)
254 {
255 ipa_adjusted_param *apm = &(*adj_params)[i];
256 if (apm->op == IPA_PARAM_OP_COPY)
257 {
258 unsigned index
259 = use_prev_indices ? apm->prev_clone_index : apm->base_index;
260 /* The following needs to be handled gracefully because of type
261 mismatches. This happens with LTO but apparently also in Fortran
262 with -fcoarray=lib -O2 -lcaf_single -latomic. */
263 if (index >= otypes->length ())
264 continue;
265 new_types->quick_push (obj: (*otypes)[index]);
266 }
267 else if (apm->op == IPA_PARAM_OP_NEW
268 || apm->op == IPA_PARAM_OP_SPLIT)
269 {
270 tree ntype = apm->type;
271 if (is_gimple_reg_type (type: ntype)
272 && TYPE_MODE (ntype) != BLKmode)
273 {
274 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ntype));
275 if (TYPE_ALIGN (ntype) != malign)
276 ntype = build_aligned_type (ntype, malign);
277 }
278 new_types->quick_push (obj: ntype);
279 }
280 else
281 gcc_unreachable ();
282 }
283}
284
285/* Return false if given attribute should prevent type adjustments. */
286
287bool
288ipa_param_adjustments::type_attribute_allowed_p (tree name)
289{
290 if ((is_attribute_p (attr_name: "fn spec", ident: name) && flag_ipa_modref)
291 || is_attribute_p (attr_name: "access", ident: name)
292 || is_attribute_p (attr_name: "returns_nonnull", ident: name)
293 || is_attribute_p (attr_name: "assume_aligned", ident: name)
294 || is_attribute_p (attr_name: "nocf_check", ident: name)
295 || is_attribute_p (attr_name: "warn_unused_result", ident: name))
296 return true;
297 return false;
298}
299
300/* Return true if attribute should be dropped if parameter changed. */
301
302static bool
303drop_type_attribute_if_params_changed_p (tree name)
304{
305 if (is_attribute_p (attr_name: "fn spec", ident: name)
306 || is_attribute_p (attr_name: "access", ident: name))
307 return true;
308 return false;
309}
310
311/* Build and return a function type just like ORIG_TYPE but with parameter
312 types given in NEW_PARAM_TYPES - which can be NULL if, but only if,
313 ORIG_TYPE itself has NULL TREE_ARG_TYPEs. If METHOD2FUNC is true, also make
314 it a FUNCTION_TYPE instead of FUNCTION_TYPE.
315 If ARG_MODIFIED is true drop attributes that are no longer up to date. */
316
317static tree
318build_adjusted_function_type (tree orig_type, vec<tree> *new_param_types,
319 bool method2func, bool skip_return,
320 bool args_modified)
321{
322 tree new_arg_types = NULL;
323 if (TYPE_ARG_TYPES (orig_type))
324 {
325 gcc_checking_assert (new_param_types);
326 bool last_parm_void = (TREE_VALUE (tree_last (TYPE_ARG_TYPES (orig_type)))
327 == void_type_node);
328 unsigned len = new_param_types->length ();
329 for (unsigned i = 0; i < len; i++)
330 new_arg_types = tree_cons (NULL_TREE, (*new_param_types)[i],
331 new_arg_types);
332
333 tree new_reversed = nreverse (new_arg_types);
334 if (last_parm_void)
335 {
336 if (new_reversed)
337 TREE_CHAIN (new_arg_types) = void_list_node;
338 else
339 new_reversed = void_list_node;
340 }
341 new_arg_types = new_reversed;
342 }
343
344 /* Use build_distinct_type_copy to preserve as much as possible from original
345 type (debug info, attribute lists etc.). The one exception is
346 METHOD_TYPEs which must have THIS argument and when we are asked to remove
347 it, we need to build new FUNCTION_TYPE instead. */
348 tree new_type = NULL;
349 if (method2func)
350 {
351 tree ret_type;
352 if (skip_return)
353 ret_type = void_type_node;
354 else
355 ret_type = TREE_TYPE (orig_type);
356
357 new_type
358 = build_distinct_type_copy (build_function_type (ret_type,
359 new_arg_types));
360 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
361 }
362 else
363 {
364 new_type = build_distinct_type_copy (orig_type);
365 TYPE_ARG_TYPES (new_type) = new_arg_types;
366 if (skip_return)
367 TREE_TYPE (new_type) = void_type_node;
368 }
369 if (args_modified && TYPE_ATTRIBUTES (new_type))
370 {
371 tree t = TYPE_ATTRIBUTES (new_type);
372 tree *last = &TYPE_ATTRIBUTES (new_type);
373 TYPE_ATTRIBUTES (new_type) = NULL;
374 for (;t; t = TREE_CHAIN (t))
375 if (!drop_type_attribute_if_params_changed_p
376 (name: get_attribute_name (t)))
377 {
378 *last = copy_node (t);
379 TREE_CHAIN (*last) = NULL;
380 last = &TREE_CHAIN (*last);
381 }
382 }
383
384 return new_type;
385}
386
387/* Return the maximum index in any IPA_PARAM_OP_COPY adjustment or -1 if there
388 is none. */
389
390int
391ipa_param_adjustments::get_max_base_index ()
392{
393 unsigned adj_len = vec_safe_length (v: m_adj_params);
394 int max_index = -1;
395 for (unsigned i = 0; i < adj_len ; i++)
396 {
397 ipa_adjusted_param *apm = &(*m_adj_params)[i];
398 if (apm->op == IPA_PARAM_OP_COPY
399 && max_index < apm->base_index)
400 max_index = apm->base_index;
401 }
402 return max_index;
403}
404
405
406/* Fill SURVIVING_PARAMS with an array of bools where each one says whether a
407 parameter that originally was at that position still survives in the given
408 clone or is removed/replaced. If the final array is smaller than an index
409 of an original parameter, that parameter also did not survive. That a
410 parameter survives does not mean it has the same index as before. */
411
412void
413ipa_param_adjustments::get_surviving_params (vec<bool> *surviving_params)
414{
415 unsigned adj_len = vec_safe_length (v: m_adj_params);
416 int max_index = get_max_base_index ();
417
418 if (max_index < 0)
419 return;
420 surviving_params->reserve_exact (nelems: max_index + 1);
421 surviving_params->quick_grow_cleared (len: max_index + 1);
422 for (unsigned i = 0; i < adj_len ; i++)
423 {
424 ipa_adjusted_param *apm = &(*m_adj_params)[i];
425 if (apm->op == IPA_PARAM_OP_COPY)
426 (*surviving_params)[apm->base_index] = true;
427 }
428}
429
430/* Fill NEW_INDICES with new indices of each surviving parameter or -1 for
431 those which do not survive. Any parameter outside of lenght of the vector
432 does not survive. There is currently no support for a parameter to be
433 copied to two distinct new parameters. */
434
435void
436ipa_param_adjustments::get_updated_indices (vec<int> *new_indices)
437{
438 unsigned adj_len = vec_safe_length (v: m_adj_params);
439 int max_index = get_max_base_index ();
440
441 if (max_index < 0)
442 return;
443 unsigned res_len = max_index + 1;
444 new_indices->reserve_exact (nelems: res_len);
445 for (unsigned i = 0; i < res_len ; i++)
446 new_indices->quick_push (obj: -1);
447 for (unsigned i = 0; i < adj_len ; i++)
448 {
449 ipa_adjusted_param *apm = &(*m_adj_params)[i];
450 if (apm->op == IPA_PARAM_OP_COPY)
451 (*new_indices)[apm->base_index] = i;
452 }
453}
454
455/* Return the original index for the given new parameter index. Return a
456 negative number if not available. */
457
458int
459ipa_param_adjustments::get_original_index (int newidx)
460{
461 const ipa_adjusted_param *adj = &(*m_adj_params)[newidx];
462 if (adj->op != IPA_PARAM_OP_COPY)
463 return -1;
464 return adj->base_index;
465}
466
467/* Return true if the first parameter (assuming there was one) survives the
468 transformation intact and remains the first one. */
469
470bool
471ipa_param_adjustments::first_param_intact_p ()
472{
473 return (!vec_safe_is_empty (v: m_adj_params)
474 && (*m_adj_params)[0].op == IPA_PARAM_OP_COPY
475 && (*m_adj_params)[0].base_index == 0);
476}
477
478/* Return true if we have to change what has formerly been a method into a
479 function. */
480
481bool
482ipa_param_adjustments::method2func_p (tree orig_type)
483{
484 return ((TREE_CODE (orig_type) == METHOD_TYPE) && !first_param_intact_p ());
485}
486
487/* Given function type OLD_TYPE, return a new type derived from it after
488 performing all atored modifications. TYPE_ORIGINAL_P should be true when
489 OLD_TYPE refers to the type before any IPA transformations, as opposed to a
490 type that can be an intermediate one in between various IPA
491 transformations. */
492
493tree
494ipa_param_adjustments::build_new_function_type (tree old_type,
495 bool type_original_p)
496{
497 auto_vec<tree,16> new_param_types, *new_param_types_p;
498 if (prototype_p (old_type))
499 {
500 auto_vec<tree, 16> otypes;
501 push_function_arg_types (types: &otypes, fntype: old_type);
502 fill_vector_of_new_param_types (new_types: &new_param_types, otypes: &otypes, adj_params: m_adj_params,
503 use_prev_indices: !type_original_p);
504 new_param_types_p = &new_param_types;
505 }
506 else
507 new_param_types_p = NULL;
508
509 /* Check if any params type cares about are modified. In this case will
510 need to drop some type attributes. */
511 bool modified = false;
512 size_t index = 0;
513 if (m_adj_params)
514 for (tree t = TYPE_ARG_TYPES (old_type);
515 t && (int)index < m_always_copy_start && !modified;
516 t = TREE_CHAIN (t), index++)
517 if (index >= m_adj_params->length ()
518 || get_original_index (newidx: index) != (int)index)
519 modified = true;
520
521
522 return build_adjusted_function_type (orig_type: old_type, new_param_types: new_param_types_p,
523 method2func: method2func_p (orig_type: old_type), skip_return: m_skip_return,
524 args_modified: modified);
525}
526
527/* Build variant of function decl ORIG_DECL which has no return value if
528 M_SKIP_RETURN is true and, if ORIG_DECL's types or parameters is known, has
529 this type adjusted as indicated in M_ADJ_PARAMS. Arguments from
530 DECL_ARGUMENTS list are not processed now, since they are linked by
531 TREE_CHAIN directly and not accessible in LTO during WPA. The caller is
532 responsible for eliminating them when clones are properly materialized. */
533
534tree
535ipa_param_adjustments::adjust_decl (tree orig_decl)
536{
537 tree new_decl = copy_node (orig_decl);
538 tree orig_type = TREE_TYPE (orig_decl);
539 if (prototype_p (orig_type)
540 || (m_skip_return && !VOID_TYPE_P (TREE_TYPE (orig_type))))
541 {
542 tree new_type = build_new_function_type (old_type: orig_type, type_original_p: false);
543 TREE_TYPE (new_decl) = new_type;
544 }
545 if (method2func_p (orig_type))
546 DECL_VINDEX (new_decl) = NULL_TREE;
547
548 /* When signature changes, we need to clear builtin info. */
549 if (fndecl_built_in_p (node: new_decl))
550 set_decl_built_in_function (decl: new_decl, fclass: NOT_BUILT_IN, fcode: 0);
551
552 DECL_VIRTUAL_P (new_decl) = 0;
553 DECL_LANG_SPECIFIC (new_decl) = NULL;
554
555 /* Drop MALLOC attribute for a void function. */
556 if (m_skip_return)
557 DECL_IS_MALLOC (new_decl) = 0;
558
559 return new_decl;
560}
561
562/* Wrapper around get_base_ref_and_offset for cases interesting for IPA-SRA
563 transformations. Return true if EXPR has an interesting form and fill in
564 *BASE_P and *UNIT_OFFSET_P with the appropriate info. */
565
566static bool
567isra_get_ref_base_and_offset (tree expr, tree *base_p, unsigned *unit_offset_p)
568{
569 HOST_WIDE_INT offset, size;
570 bool reverse;
571 tree base
572 = get_ref_base_and_extent_hwi (expr, &offset, &size, &reverse);
573 if (!base || size < 0)
574 return false;
575
576 if ((offset % BITS_PER_UNIT) != 0)
577 return false;
578
579 if (TREE_CODE (base) == MEM_REF)
580 {
581 poly_int64 plmoff = mem_ref_offset (base).force_shwi ();
582 HOST_WIDE_INT moff;
583 bool is_cst = plmoff.is_constant (const_value: &moff);
584 if (!is_cst)
585 return false;
586 offset += moff * BITS_PER_UNIT;
587 base = TREE_OPERAND (base, 0);
588 }
589
590 if (offset < 0 || (offset / BITS_PER_UNIT) > UINT_MAX)
591 return false;
592
593 *base_p = base;
594 *unit_offset_p = offset / BITS_PER_UNIT;
595 return true;
596}
597
598/* Remove all statements that use NAME directly or indirectly. KILLED_SSAS
599 contains the SSA_NAMEs that are already being or have been processed and new
600 ones need to be added to it. The function only has to process situations
601 handled by ssa_name_only_returned_p in ipa-sra.cc with the exception that it
602 can assume it must never reach a use in a return statement. */
603
604static void
605purge_all_uses (tree name, hash_set <tree> *killed_ssas)
606{
607 imm_use_iterator imm_iter;
608 gimple *stmt;
609 auto_vec <tree, 4> worklist;
610
611 worklist.safe_push (obj: name);
612 while (!worklist.is_empty ())
613 {
614 tree cur_name = worklist.pop ();
615 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, cur_name)
616 {
617 if (gimple_debug_bind_p (s: stmt))
618 {
619 /* When runing within tree-inline, we will never end up here but
620 adding the SSAs to killed_ssas will do the trick in this case
621 and the respective debug statements will get reset. */
622 gimple_debug_bind_reset_value (dbg: stmt);
623 update_stmt (s: stmt);
624 continue;
625 }
626
627 tree lhs = NULL_TREE;
628 if (is_gimple_assign (gs: stmt))
629 lhs = gimple_assign_lhs (gs: stmt);
630 else if (gimple_code (g: stmt) == GIMPLE_PHI)
631 lhs = gimple_phi_result (gs: stmt);
632 gcc_assert (lhs
633 && (TREE_CODE (lhs) == SSA_NAME)
634 && !gimple_vdef (stmt));
635 if (!killed_ssas->add (k: lhs))
636 {
637 worklist.safe_push (obj: lhs);
638 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
639 gsi_remove (&gsi, true);
640 }
641 }
642 }
643}
644
645/* Modify actual arguments of a function call in statement currently belonging
646 to CS, and make it call CS->callee->decl. Return the new statement that
647 replaced the old one. When invoked, cfun and current_function_decl have to
648 be set to the caller. When called from within tree-inline, KILLED_SSAs has
649 to contain the pointer to killed_new_ssa_names within the copy_body_data
650 structure and SSAs discovered to be useless (if LHS is removed) will be
651 added to it, otherwise it needs to be NULL. */
652
653gcall *
654ipa_param_adjustments::modify_call (cgraph_edge *cs,
655 bool update_references,
656 hash_set <tree> *killed_ssas)
657{
658 gcall *stmt = cs->call_stmt;
659 tree callee_decl = cs->callee->decl;
660
661 ipa_edge_modification_info *mod_info
662 = ipa_edge_modifications ? ipa_edge_modifications->get (edge: cs) : NULL;
663 if (mod_info && symtab->dump_file)
664 {
665 fprintf (stream: symtab->dump_file, format: "Information about pre-exiting "
666 "modifications.\n Index map:");
667 unsigned idx_len = mod_info->index_map.length ();
668 for (unsigned i = 0; i < idx_len; i++)
669 fprintf (stream: symtab->dump_file, format: " %i", mod_info->index_map[i]);
670 fprintf (stream: symtab->dump_file, format: "\n Pass-through split map: ");
671 unsigned ptm_len = mod_info->pass_through_map.length ();
672 for (unsigned i = 0; i < ptm_len; i++)
673 fprintf (stream: symtab->dump_file,
674 format: " (base_index: %u, offset: %u, new_index: %i)",
675 mod_info->pass_through_map[i].base_index,
676 mod_info->pass_through_map[i].unit_offset,
677 mod_info->pass_through_map[i].new_index);
678 fprintf (stream: symtab->dump_file, format: "\n Always-copy delta: %i\n",
679 mod_info->always_copy_delta);
680 }
681
682 unsigned len = vec_safe_length (v: m_adj_params);
683 auto_vec<tree, 16> vargs (len);
684 unsigned old_nargs = gimple_call_num_args (gs: stmt);
685 unsigned orig_nargs = mod_info ? mod_info->index_map.length () : old_nargs;
686 auto_vec<bool, 16> kept (old_nargs);
687 kept.quick_grow_cleared (len: old_nargs);
688
689 cgraph_node *current_node = cgraph_node::get (decl: current_function_decl);
690 if (update_references)
691 current_node->remove_stmt_references (stmt);
692
693 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
694 gimple_stmt_iterator prev_gsi = gsi;
695 gsi_prev (i: &prev_gsi);
696 for (unsigned i = 0; i < len; i++)
697 {
698 ipa_adjusted_param *apm = &(*m_adj_params)[i];
699 if (apm->op == IPA_PARAM_OP_COPY)
700 {
701 int index = apm->base_index;
702 if ((unsigned) index >= orig_nargs)
703 /* Can happen if the original call has argument mismatch,
704 ignore. */
705 continue;
706 if (mod_info)
707 {
708 index = mod_info->index_map[apm->base_index];
709 gcc_assert (index >= 0);
710 }
711
712 tree arg = gimple_call_arg (gs: stmt, index);
713
714 vargs.quick_push (obj: arg);
715 kept[index] = true;
716 continue;
717 }
718
719 /* At the moment the only user of IPA_PARAM_OP_NEW modifies calls itself.
720 If we ever want to support it during WPA IPA stage, we'll need a
721 mechanism to call into the IPA passes that introduced them. Currently
722 we simply mandate that IPA infrastructure understands all argument
723 modifications. Remember, edge redirection/modification is done only
724 once, not in steps for each pass modifying the callee like clone
725 materialization. */
726 gcc_assert (apm->op == IPA_PARAM_OP_SPLIT);
727
728 /* We have to handle pass-through changes differently using the map
729 clone materialziation might have left behind. */
730 tree repl = NULL_TREE;
731 unsigned ptm_len = mod_info ? mod_info->pass_through_map.length () : 0;
732 for (unsigned j = 0; j < ptm_len; j++)
733 if (mod_info->pass_through_map[j].base_index == apm->base_index
734 && mod_info->pass_through_map[j].unit_offset == apm->unit_offset)
735 {
736 int repl_idx = mod_info->pass_through_map[j].new_index;
737 gcc_assert (repl_idx >= 0);
738 repl = gimple_call_arg (gs: stmt, index: repl_idx);
739 break;
740 }
741 if (repl)
742 {
743 if (!useless_type_conversion_p(apm->type, repl->typed.type))
744 {
745 repl = force_value_to_type (type: apm->type, value: repl);
746 repl = force_gimple_operand_gsi (&gsi, repl,
747 true, NULL, true, GSI_SAME_STMT);
748 }
749 vargs.quick_push (obj: repl);
750 continue;
751 }
752
753 int index = apm->base_index;
754 if ((unsigned) index >= orig_nargs)
755 /* Can happen if the original call has argument mismatch, ignore. */
756 continue;
757 if (mod_info)
758 {
759 index = mod_info->index_map[apm->base_index];
760 gcc_assert (index >= 0);
761 }
762 tree base = gimple_call_arg (gs: stmt, index);
763
764 /* We create a new parameter out of the value of the old one, we can
765 do the following kind of transformations:
766
767 - A scalar passed by reference, potentially as a part of a larger
768 aggregate, is converted to a scalar passed by value.
769
770 - A part of an aggregate is passed instead of the whole aggregate. */
771
772 location_t loc = gimple_location (g: stmt);
773 tree off;
774 bool deref_base = false;
775 unsigned int deref_align = 0;
776 if (TREE_CODE (base) != ADDR_EXPR
777 && is_gimple_reg_type (TREE_TYPE (base)))
778 {
779 /* Detect type mismatches in calls in invalid programs and make a
780 poor attempt to gracefully convert them so that we don't ICE. */
781 if (!POINTER_TYPE_P (TREE_TYPE (base)))
782 base = force_value_to_type (ptr_type_node, value: base);
783
784 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
785 }
786 else
787 {
788 bool addrof;
789 if (TREE_CODE (base) == ADDR_EXPR)
790 {
791 base = TREE_OPERAND (base, 0);
792 addrof = true;
793 }
794 else
795 addrof = false;
796
797 tree prev_base = base;
798 poly_int64 base_offset;
799 base = get_addr_base_and_unit_offset (base, &base_offset);
800
801 /* Aggregate arguments can have non-invariant addresses. */
802 if (!base)
803 {
804 base = build_fold_addr_expr (prev_base);
805 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
806 }
807 else if (TREE_CODE (base) == MEM_REF)
808 {
809 if (!addrof)
810 {
811 deref_base = true;
812 deref_align = TYPE_ALIGN (TREE_TYPE (base));
813 }
814 off = build_int_cst (apm->alias_ptr_type,
815 base_offset + apm->unit_offset);
816 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
817 off);
818 base = TREE_OPERAND (base, 0);
819 }
820 else
821 {
822 off = build_int_cst (apm->alias_ptr_type,
823 base_offset + apm->unit_offset);
824 base = build_fold_addr_expr (base);
825 }
826 }
827
828 tree type = apm->type;
829 unsigned int align;
830 unsigned HOST_WIDE_INT misalign;
831
832 if (deref_base)
833 {
834 align = deref_align;
835 misalign = 0;
836 }
837 else
838 {
839 get_pointer_alignment_1 (base, &align, &misalign);
840 /* All users must make sure that we can be optimistic when it
841 comes to alignment in this case (by inspecting the final users
842 of these new parameters). */
843 if (TYPE_ALIGN (type) > align)
844 align = TYPE_ALIGN (type);
845 }
846 misalign
847 += (offset_int::from (x: wi::to_wide (t: off), sgn: SIGNED).to_short_addr ()
848 * BITS_PER_UNIT);
849 misalign = misalign & (align - 1);
850 if (misalign != 0)
851 align = least_bit_hwi (x: misalign);
852 if (align < TYPE_ALIGN (type))
853 type = build_aligned_type (type, align);
854 base = force_gimple_operand_gsi (&gsi, base,
855 true, NULL, true, GSI_SAME_STMT);
856 tree expr = fold_build2_loc (loc, MEM_REF, type, base, off);
857 REF_REVERSE_STORAGE_ORDER (expr) = apm->reverse;
858 /* If expr is not a valid gimple call argument emit
859 a load into a temporary. */
860 if (is_gimple_reg_type (TREE_TYPE (expr)))
861 {
862 gimple *tem = gimple_build_assign (NULL_TREE, expr);
863 if (gimple_in_ssa_p (cfun))
864 {
865 gimple_set_vuse (g: tem, vuse: gimple_vuse (g: stmt));
866 expr = make_ssa_name (TREE_TYPE (expr), stmt: tem);
867 }
868 else
869 expr = create_tmp_reg (TREE_TYPE (expr));
870 gimple_assign_set_lhs (gs: tem, lhs: expr);
871 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
872 }
873 vargs.quick_push (obj: expr);
874 }
875
876 if (m_always_copy_start >= 0)
877 {
878 int always_copy_start = m_always_copy_start;
879 if (mod_info)
880 {
881 always_copy_start += mod_info->always_copy_delta;
882 gcc_assert (always_copy_start >= 0);
883 }
884 for (unsigned i = always_copy_start; i < old_nargs; i++)
885 vargs.safe_push (obj: gimple_call_arg (gs: stmt, index: i));
886 }
887
888 /* For optimized away parameters, add on the caller side
889 before the call
890 DEBUG D#X => parm_Y(D)
891 stmts and associate D#X with parm in decl_debug_args_lookup
892 vector to say for debug info that if parameter parm had been passed,
893 it would have value parm_Y(D). */
894 tree old_decl = gimple_call_fndecl (gs: stmt);
895 if (MAY_HAVE_DEBUG_BIND_STMTS && old_decl && callee_decl)
896 {
897 vec<tree, va_gc> **debug_args = NULL;
898 unsigned i = 0;
899 cgraph_node *callee_node = cgraph_node::get (decl: callee_decl);
900
901 /* FIXME: we don't seem to be able to insert debug args before clone
902 is materialized. Materializing them early leads to extra memory
903 use. */
904 if (callee_node->clone_of)
905 callee_node->get_untransformed_body ();
906 for (tree old_parm = DECL_ARGUMENTS (old_decl);
907 old_parm && i < old_nargs && ((int) i) < m_always_copy_start;
908 old_parm = DECL_CHAIN (old_parm), i++)
909 {
910 if (!is_gimple_reg (old_parm) || kept[i])
911 continue;
912 tree arg;
913 if (mod_info)
914 {
915 if (mod_info->index_map[i] < 0)
916 continue;
917 arg = gimple_call_arg (gs: stmt, index: mod_info->index_map[i]);
918 }
919 else
920 arg = gimple_call_arg (gs: stmt, index: i);
921
922 tree origin = DECL_ORIGIN (old_parm);
923 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
924 {
925 if (!fold_convertible_p (TREE_TYPE (origin), arg))
926 continue;
927 tree rhs1;
928 if (TREE_CODE (arg) == SSA_NAME
929 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
930 && (rhs1
931 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
932 && useless_type_conversion_p (TREE_TYPE (origin),
933 TREE_TYPE (rhs1)))
934 arg = rhs1;
935 else
936 arg = fold_convert_loc (gimple_location (g: stmt),
937 TREE_TYPE (origin), arg);
938 }
939 if (debug_args == NULL)
940 debug_args = decl_debug_args_insert (callee_decl);
941 unsigned int ix;
942 tree ddecl = NULL_TREE;
943 for (ix = 0; vec_safe_iterate (v: *debug_args, ix, ptr: &ddecl); ix += 2)
944 if (ddecl == origin)
945 {
946 ddecl = (**debug_args)[ix + 1];
947 break;
948 }
949 if (ddecl == NULL)
950 {
951 ddecl = build_debug_expr_decl (TREE_TYPE (origin));
952 /* FIXME: Is setting the mode really necessary? */
953 SET_DECL_MODE (ddecl, DECL_MODE (origin));
954
955 vec_safe_push (v&: *debug_args, obj: origin);
956 vec_safe_push (v&: *debug_args, obj: ddecl);
957 }
958 gimple *def_temp = gimple_build_debug_bind (ddecl,
959 unshare_expr (arg), stmt);
960 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
961 }
962 }
963
964 if (dump_file && (dump_flags & TDF_DETAILS))
965 {
966 fprintf (stream: dump_file, format: "replacing stmt:");
967 print_gimple_stmt (dump_file, gsi_stmt (i: gsi), 0);
968 }
969
970 gcall *new_stmt = gimple_build_call_vec (callee_decl, vargs);
971
972 hash_set <tree> *ssas_to_remove = NULL;
973 if (tree lhs = gimple_call_lhs (gs: stmt))
974 {
975 if (!m_skip_return)
976 gimple_call_set_lhs (gs: new_stmt, lhs);
977 else if (TREE_CODE (lhs) == SSA_NAME)
978 {
979 if (!killed_ssas)
980 {
981 ssas_to_remove = new hash_set<tree> (8);
982 killed_ssas = ssas_to_remove;
983 }
984 killed_ssas->add (k: lhs);
985 purge_all_uses (name: lhs, killed_ssas);
986 }
987 }
988
989 gimple_set_block (g: new_stmt, block: gimple_block (g: stmt));
990 if (gimple_has_location (g: stmt))
991 gimple_set_location (g: new_stmt, location: gimple_location (g: stmt));
992 gimple_call_set_chain (call_stmt: new_stmt, chain: gimple_call_chain (gs: stmt));
993 gimple_call_copy_flags (dest_call: new_stmt, orig_call: stmt);
994 if (gimple_in_ssa_p (cfun))
995 gimple_move_vops (new_stmt, stmt);
996
997 if (dump_file && (dump_flags & TDF_DETAILS))
998 {
999 fprintf (stream: dump_file, format: "with stmt:");
1000 print_gimple_stmt (dump_file, new_stmt, 0);
1001 fprintf (stream: dump_file, format: "\n");
1002 }
1003 gsi_replace (&gsi, new_stmt, true);
1004 if (ssas_to_remove)
1005 {
1006 ipa_release_ssas_in_hash (killed_ssas: ssas_to_remove);
1007 delete ssas_to_remove;
1008 }
1009 if (update_references)
1010 do
1011 {
1012 current_node->record_stmt_references (stmt: gsi_stmt (i: gsi));
1013 gsi_prev (i: &gsi);
1014 }
1015 while (gsi_stmt (i: gsi) != gsi_stmt (i: prev_gsi));
1016
1017 if (mod_info)
1018 ipa_edge_modifications->remove (edge: cs);
1019 return new_stmt;
1020}
1021
1022/* Dump information contained in the object in textual form to F. */
1023
1024void
1025ipa_param_adjustments::dump (FILE *f)
1026{
1027 fprintf (stream: f, format: " m_always_copy_start: %i\n", m_always_copy_start);
1028 ipa_dump_adjusted_parameters (f, adj_params: m_adj_params);
1029 if (m_skip_return)
1030 fprintf (stream: f, format: " Will SKIP return.\n");
1031}
1032
1033/* Dump information contained in the object in textual form to stderr. */
1034
1035void
1036ipa_param_adjustments::debug ()
1037{
1038 dump (stderr);
1039}
1040
1041/* Register a REPLACEMENT for accesses to BASE at UNIT_OFFSET. */
1042
1043void
1044ipa_param_body_adjustments::register_replacement (tree base,
1045 unsigned unit_offset,
1046 tree replacement)
1047{
1048 ipa_param_body_replacement psr;
1049 psr.base = base;
1050 psr.repl = replacement;
1051 psr.dummy = NULL_TREE;
1052 psr.unit_offset = unit_offset;
1053 m_replacements.safe_push (obj: psr);
1054 m_sorted_replacements_p = false;
1055}
1056
1057/* Register that REPLACEMENT should replace parameter described in APM. */
1058
1059void
1060ipa_param_body_adjustments::register_replacement (ipa_adjusted_param *apm,
1061 tree replacement)
1062{
1063 gcc_checking_assert (apm->op == IPA_PARAM_OP_SPLIT
1064 || apm->op == IPA_PARAM_OP_NEW);
1065 gcc_checking_assert (!apm->prev_clone_adjustment);
1066 register_replacement (base: m_oparms[apm->prev_clone_index], unit_offset: apm->unit_offset,
1067 replacement);
1068}
1069
1070/* Comparator for sorting and searching
1071 ipa_param_body_adjustments::m_replacements. */
1072
1073static int
1074compare_param_body_replacement (const void *va, const void *vb)
1075{
1076 const ipa_param_body_replacement *a = (const ipa_param_body_replacement *) va;
1077 const ipa_param_body_replacement *b = (const ipa_param_body_replacement *) vb;
1078
1079 if (DECL_UID (a->base) < DECL_UID (b->base))
1080 return -1;
1081 if (DECL_UID (a->base) > DECL_UID (b->base))
1082 return 1;
1083 if (a->unit_offset < b->unit_offset)
1084 return -1;
1085 if (a->unit_offset > b->unit_offset)
1086 return 1;
1087 return 0;
1088}
1089
1090/* Sort m_replacements and set m_sorted_replacements_p to true. */
1091
1092void
1093ipa_param_body_adjustments::sort_replacements ()
1094{
1095 if (m_sorted_replacements_p)
1096 return;
1097 m_replacements.qsort (compare_param_body_replacement);
1098 m_sorted_replacements_p = true;
1099}
1100
1101/* Copy or not, as appropriate given m_id and decl context, a pre-existing
1102 PARM_DECL T so that it can be included in the parameters of the modified
1103 function. */
1104
1105tree
1106ipa_param_body_adjustments::carry_over_param (tree t)
1107{
1108 tree new_parm;
1109 if (m_id)
1110 {
1111 new_parm = remap_decl (decl: t, id: m_id);
1112 if (TREE_CODE (new_parm) != PARM_DECL)
1113 new_parm = m_id->copy_decl (t, m_id);
1114 }
1115 else if (DECL_CONTEXT (t) != m_fndecl)
1116 {
1117 new_parm = copy_node (t);
1118 DECL_CONTEXT (new_parm) = m_fndecl;
1119 }
1120 else
1121 new_parm = t;
1122 return new_parm;
1123}
1124
1125/* If DECL is a gimple register that has a default definition SSA name and that
1126 has some uses, return the default definition, otherwise return NULL_TREE. */
1127
1128tree
1129ipa_param_body_adjustments::get_ddef_if_exists_and_is_used (tree decl)
1130{
1131 if (!is_gimple_reg (decl))
1132 return NULL_TREE;
1133 tree ddef = ssa_default_def (m_id->src_cfun, decl);
1134 if (!ddef || has_zero_uses (var: ddef))
1135 return NULL_TREE;
1136 return ddef;
1137}
1138
1139/* Populate m_dead_stmts given that DEAD_PARAM is going to be removed without
1140 any replacement or splitting. REPL is the replacement VAR_SECL to base any
1141 remaining uses of a removed parameter on. Push all removed SSA names that
1142 are used within debug statements to DEBUGSTACK. */
1143
1144void
1145ipa_param_body_adjustments::mark_dead_statements (tree dead_param,
1146 vec<tree> *debugstack)
1147{
1148 /* Current IPA analyses which remove unused parameters never remove a
1149 non-gimple register ones which have any use except as parameters in other
1150 calls, so we can safely leve them as they are. */
1151 tree parm_ddef = get_ddef_if_exists_and_is_used (decl: dead_param);
1152 if (!parm_ddef)
1153 return;
1154
1155 auto_vec<tree, 4> stack;
1156 hash_set<tree> used_in_debug;
1157 m_dead_ssas.add (k: parm_ddef);
1158 stack.safe_push (obj: parm_ddef);
1159 while (!stack.is_empty ())
1160 {
1161 imm_use_iterator imm_iter;
1162 use_operand_p use_p;
1163 tree t = stack.pop ();
1164
1165 insert_decl_map (m_id, t, error_mark_node);
1166 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, t)
1167 {
1168 gimple *stmt = USE_STMT (use_p);
1169
1170 /* Calls containing dead arguments cannot be deleted,
1171 modify_call_stmt will instead remove just the argument later on.
1172 If isra_track_scalar_value_uses in ipa-sra.cc is extended to look
1173 through const functions, we will need to do so here too. */
1174 if (is_gimple_call (gs: stmt)
1175 || (m_id->blocks_to_copy
1176 && !bitmap_bit_p (m_id->blocks_to_copy,
1177 gimple_bb (g: stmt)->index)))
1178 continue;
1179
1180 if (is_gimple_debug (gs: stmt))
1181 {
1182 m_dead_stmts.add (k: stmt);
1183 gcc_assert (gimple_debug_bind_p (stmt));
1184 if (!used_in_debug.contains (k: t))
1185 {
1186 used_in_debug.add (k: t);
1187 debugstack->safe_push (obj: t);
1188 }
1189 }
1190 else if (gimple_code (g: stmt) == GIMPLE_PHI)
1191 {
1192 gphi *phi = as_a <gphi *> (p: stmt);
1193 int ix = PHI_ARG_INDEX_FROM_USE (use_p);
1194
1195 if (!m_id->blocks_to_copy
1196 || bitmap_bit_p (m_id->blocks_to_copy,
1197 gimple_phi_arg_edge (phi, i: ix)->src->index))
1198 {
1199 m_dead_stmts.add (k: phi);
1200 tree res = gimple_phi_result (gs: phi);
1201 if (!m_dead_ssas.add (k: res))
1202 stack.safe_push (obj: res);
1203 }
1204 }
1205 else if (is_gimple_assign (gs: stmt))
1206 {
1207 m_dead_stmts.add (k: stmt);
1208 if (!gimple_clobber_p (s: stmt))
1209 {
1210 tree lhs = gimple_assign_lhs (gs: stmt);
1211 gcc_assert (TREE_CODE (lhs) == SSA_NAME);
1212 if (!m_dead_ssas.add (k: lhs))
1213 stack.safe_push (obj: lhs);
1214 }
1215 }
1216 else if (gimple_code (g: stmt) == GIMPLE_RETURN)
1217 gcc_assert (m_adjustments && m_adjustments->m_skip_return);
1218 else
1219 /* IPA-SRA does not analyze other types of statements. */
1220 gcc_unreachable ();
1221 }
1222 }
1223
1224 if (!MAY_HAVE_DEBUG_STMTS)
1225 {
1226 gcc_assert (debugstack->is_empty ());
1227 return;
1228 }
1229
1230 tree dp_ddecl = build_debug_expr_decl (TREE_TYPE (dead_param));
1231 /* FIXME: Is setting the mode really necessary? */
1232 SET_DECL_MODE (dp_ddecl, DECL_MODE (dead_param));
1233 m_dead_ssa_debug_equiv.put (k: parm_ddef, v: dp_ddecl);
1234}
1235
1236/* Put all clobbers of of dereference of default definition of PARAM into
1237 m_dead_stmts. If there are returns among uses of the default definition of
1238 PARAM, verify they will be stripped off the return value. */
1239
1240void
1241ipa_param_body_adjustments::mark_clobbers_dead (tree param)
1242{
1243 if (!is_gimple_reg (param))
1244 return;
1245 tree ddef = get_ddef_if_exists_and_is_used (decl: param);
1246 if (!ddef)
1247 return;
1248
1249 imm_use_iterator imm_iter;
1250 use_operand_p use_p;
1251 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
1252 {
1253 gimple *stmt = USE_STMT (use_p);
1254 if (gimple_clobber_p (s: stmt))
1255 m_dead_stmts.add (k: stmt);
1256 else if (gimple_code (g: stmt) == GIMPLE_RETURN)
1257 gcc_assert (m_adjustments && m_adjustments->m_skip_return);
1258 }
1259}
1260
1261/* Callback to walk_tree. If REMAP is an SSA_NAME that is present in hash_map
1262 passed in DATA, replace it with unshared version of what it was mapped to.
1263 If an SSA argument would be remapped to NULL, the whole operation needs to
1264 abort which is signaled by returning error_mark_node. */
1265
1266static tree
1267replace_with_mapped_expr (tree *remap, int *walk_subtrees, void *data)
1268{
1269 if (TYPE_P (*remap))
1270 {
1271 *walk_subtrees = 0;
1272 return 0;
1273 }
1274 if (TREE_CODE (*remap) != SSA_NAME)
1275 return 0;
1276
1277 *walk_subtrees = 0;
1278
1279 hash_map<tree, tree> *equivs = (hash_map<tree, tree> *) data;
1280 if (tree *p = equivs->get (k: *remap))
1281 {
1282 if (!*p)
1283 return error_mark_node;
1284 *remap = unshare_expr (*p);
1285 }
1286 return 0;
1287}
1288
1289/* Replace all occurances of SSAs in m_dead_ssa_debug_equiv in t with what they
1290 are mapped to. */
1291
1292void
1293ipa_param_body_adjustments::remap_with_debug_expressions (tree *t)
1294{
1295 /* If *t is an SSA_NAME which should have its debug statements reset, it is
1296 mapped to NULL in the hash_map.
1297
1298 It is perhaps simpler to handle the SSA_NAME cases directly and only
1299 invoke walk_tree on more complex expressions. When
1300 remap_with_debug_expressions is called from tree-inline.cc, a to-be-reset
1301 SSA_NAME can be an operand to such expressions and the entire debug
1302 variable we are remapping should be reset. This is signaled by walk_tree
1303 returning error_mark_node and done by setting *t to NULL. */
1304 if (TREE_CODE (*t) == SSA_NAME)
1305 {
1306 if (tree *p = m_dead_ssa_debug_equiv.get (k: *t))
1307 *t = *p;
1308 }
1309 else if (walk_tree (t, replace_with_mapped_expr,
1310 &m_dead_ssa_debug_equiv, NULL) == error_mark_node)
1311 *t = NULL_TREE;
1312}
1313
1314/* For an SSA_NAME DEAD_SSA which is about to be DCEd because it is based on a
1315 useless parameter, prepare an expression that should represent it in
1316 debug_binds in the cloned function and add a mapping from DEAD_SSA to
1317 m_dead_ssa_debug_equiv. That mapping is to NULL when the associated
1318 debug_statement has to be reset instead. In such case return false,
1319 ottherwise return true. If DEAD_SSA comes from a basic block which is not
1320 about to be copied, ignore it and return true. */
1321
1322bool
1323ipa_param_body_adjustments::prepare_debug_expressions (tree dead_ssa)
1324{
1325 gcc_checking_assert (m_dead_ssas.contains (dead_ssa));
1326 if (tree *d = m_dead_ssa_debug_equiv.get (k: dead_ssa))
1327 return (*d != NULL_TREE);
1328
1329 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (dead_ssa));
1330 gimple *def = SSA_NAME_DEF_STMT (dead_ssa);
1331 if (m_id->blocks_to_copy
1332 && !bitmap_bit_p (m_id->blocks_to_copy, gimple_bb (g: def)->index))
1333 return true;
1334
1335 if (gimple_code (g: def) == GIMPLE_PHI)
1336 {
1337 /* In theory, we could ignore all SSAs coming from BBs not in
1338 m_id->blocks_to_copy but at the time of the writing this code that
1339 should never really be the case because only fnsplit uses that bitmap,
1340 so don't bother. */
1341 tree value = degenerate_phi_result (as_a <gphi *> (p: def));
1342 if (!value
1343 || (m_dead_ssas.contains (k: value)
1344 && !prepare_debug_expressions (dead_ssa: value)))
1345 {
1346 m_dead_ssa_debug_equiv.put (k: dead_ssa, NULL_TREE);
1347 return false;
1348 }
1349
1350 gcc_assert (TREE_CODE (value) == SSA_NAME);
1351 tree *d = m_dead_ssa_debug_equiv.get (k: value);
1352 m_dead_ssa_debug_equiv.put (k: dead_ssa, v: *d);
1353 return true;
1354 }
1355
1356 bool lost = false;
1357 use_operand_p use_p;
1358 ssa_op_iter oi;
1359 FOR_EACH_PHI_OR_STMT_USE (use_p, def, oi, SSA_OP_USE)
1360 {
1361 tree use = USE_FROM_PTR (use_p);
1362 if (m_dead_ssas.contains (k: use)
1363 && !prepare_debug_expressions (dead_ssa: use))
1364 {
1365 lost = true;
1366 break;
1367 }
1368 }
1369
1370 if (lost)
1371 {
1372 m_dead_ssa_debug_equiv.put (k: dead_ssa, NULL_TREE);
1373 return false;
1374 }
1375
1376 if (is_gimple_assign (gs: def))
1377 {
1378 gcc_assert (!gimple_clobber_p (def));
1379 if (gimple_assign_copy_p (def)
1380 && TREE_CODE (gimple_assign_rhs1 (def)) == SSA_NAME)
1381 {
1382 tree d = *m_dead_ssa_debug_equiv.get (k: gimple_assign_rhs1 (gs: def));
1383 gcc_assert (d);
1384 m_dead_ssa_debug_equiv.put (k: dead_ssa, v: d);
1385 return true;
1386 }
1387
1388 tree val
1389 = unshare_expr_without_location (gimple_assign_rhs_to_tree (def));
1390 remap_with_debug_expressions (t: &val);
1391
1392 tree vexpr = build_debug_expr_decl (TREE_TYPE (val));
1393 m_dead_stmt_debug_equiv.put (k: def, v: val);
1394 m_dead_ssa_debug_equiv.put (k: dead_ssa, v: vexpr);
1395 return true;
1396 }
1397 else
1398 gcc_unreachable ();
1399}
1400
1401/* Common initialization performed by all ipa_param_body_adjustments
1402 constructors. OLD_FNDECL is the declaration we take original arguments
1403 from, (it may be the same as M_FNDECL). VARS, if non-NULL, is a pointer to
1404 a chained list of new local variables. TREE_MAP is the IPA-CP produced
1405 mapping of trees to constants.
1406
1407 The function is rather long but it really onlu initializes all data members
1408 of the class. It creates new param DECLs, finds their new types, */
1409
1410void
1411ipa_param_body_adjustments::common_initialization (tree old_fndecl,
1412 tree *vars,
1413 vec<ipa_replace_map *,
1414 va_gc> *tree_map)
1415{
1416 push_function_arg_decls (args: &m_oparms, fndecl: old_fndecl);
1417 auto_vec<tree,16> otypes;
1418 if (TYPE_ARG_TYPES (TREE_TYPE (old_fndecl)) != NULL_TREE)
1419 push_function_arg_types (types: &otypes, TREE_TYPE (old_fndecl));
1420 else
1421 {
1422 auto_vec<tree,16> oparms;
1423 push_function_arg_decls (args: &oparms, fndecl: old_fndecl);
1424 unsigned ocount = oparms.length ();
1425 otypes.reserve_exact (nelems: ocount);
1426 for (unsigned i = 0; i < ocount; i++)
1427 otypes.quick_push (TREE_TYPE (oparms[i]));
1428 }
1429 fill_vector_of_new_param_types (new_types: &m_new_types, otypes: &otypes, adj_params: m_adj_params, use_prev_indices: true);
1430
1431 auto_vec<bool, 16> kept;
1432 kept.reserve_exact (nelems: m_oparms.length ());
1433 kept.quick_grow_cleared (len: m_oparms.length ());
1434 auto_vec<bool, 16> split;
1435 split.reserve_exact (nelems: m_oparms.length ());
1436 split.quick_grow_cleared (len: m_oparms.length ());
1437
1438 unsigned adj_len = vec_safe_length (v: m_adj_params);
1439 m_method2func = ((TREE_CODE (TREE_TYPE (m_fndecl)) == METHOD_TYPE)
1440 && (adj_len == 0
1441 || (*m_adj_params)[0].op != IPA_PARAM_OP_COPY
1442 || (*m_adj_params)[0].base_index != 0));
1443
1444 /* The main job of the this function is to go over the vector of adjusted
1445 parameters and create declarations or find corresponding old ones and push
1446 them to m_new_decls. For IPA-SRA replacements it also creates
1447 corresponding m_id->dst_node->clone.performed_splits entries. */
1448
1449 m_new_decls.reserve_exact (nelems: adj_len);
1450 for (unsigned i = 0; i < adj_len ; i++)
1451 {
1452 ipa_adjusted_param *apm = &(*m_adj_params)[i];
1453 unsigned prev_index = apm->prev_clone_index;
1454 tree new_parm;
1455 if (apm->op == IPA_PARAM_OP_COPY
1456 || apm->prev_clone_adjustment)
1457 {
1458 kept[prev_index] = true;
1459 new_parm = carry_over_param (t: m_oparms[prev_index]);
1460 m_new_decls.quick_push (obj: new_parm);
1461 }
1462 else if (apm->op == IPA_PARAM_OP_NEW
1463 || apm->op == IPA_PARAM_OP_SPLIT)
1464 {
1465 tree new_type = m_new_types[i];
1466 gcc_checking_assert (new_type);
1467 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
1468 new_type);
1469 const char *prefix = ipa_param_prefixes[apm->param_prefix_index];
1470 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
1471 DECL_ARTIFICIAL (new_parm) = 1;
1472 DECL_ARG_TYPE (new_parm) = new_type;
1473 DECL_CONTEXT (new_parm) = m_fndecl;
1474 TREE_USED (new_parm) = 1;
1475 DECL_IGNORED_P (new_parm) = 1;
1476 layout_decl (new_parm, 0);
1477 m_new_decls.quick_push (obj: new_parm);
1478
1479 if (apm->op == IPA_PARAM_OP_SPLIT)
1480 {
1481 split[prev_index] = true;
1482 register_replacement (apm, replacement: new_parm);
1483 }
1484 }
1485 else
1486 gcc_unreachable ();
1487 }
1488
1489 auto_vec <int, 16> index_mapping;
1490 bool need_remap = false;
1491 if (m_id)
1492 {
1493 clone_info *cinfo = clone_info::get (node: m_id->src_node);
1494 if (cinfo && cinfo->param_adjustments)
1495 {
1496 cinfo->param_adjustments->get_updated_indices (new_indices: &index_mapping);
1497 need_remap = true;
1498 }
1499
1500 if (ipcp_transformation *ipcp_ts
1501 = ipcp_get_transformation_summary (node: m_id->src_node))
1502 {
1503 for (const ipa_argagg_value &av : ipcp_ts->m_agg_values)
1504 {
1505 int parm_num = av.index;
1506
1507 if (need_remap)
1508 {
1509 /* FIXME: We cannot handle the situation when IPA-CP
1510 identified that a parameter is a pointer to a global
1511 variable and at the same time the variable has some known
1512 constant contents (PR 107640). The best place to make
1513 sure we don't drop such constants on the floor probably is
1514 not here, but we have to make sure that it does not
1515 confuse the remapping. */
1516 if (parm_num >= (int) index_mapping.length ())
1517 continue;
1518 parm_num = index_mapping[parm_num];
1519 if (parm_num < 0)
1520 continue;
1521 }
1522
1523 if (!kept[parm_num])
1524 {
1525 /* IPA-CP has detected an aggregate constant in a parameter
1526 that will not be kept, which means that IPA-SRA would have
1527 split it if there wasn't a constant. Because we are about
1528 to remove the original, this is the last chance where we
1529 can substitute the uses with a constant (for values passed
1530 by reference) or do the split but initialize the
1531 replacement with a constant (for split aggregates passed
1532 by value). */
1533
1534 if (split[parm_num])
1535 {
1536 /* We must be careful not to add a duplicate
1537 replacement. */
1538 sort_replacements ();
1539 ipa_param_body_replacement *pbr
1540 = lookup_replacement_1 (base: m_oparms[parm_num],
1541 unit_offset: av.unit_offset);
1542 if (pbr)
1543 {
1544 /* Otherwise IPA-SRA should have bailed out. */
1545 gcc_assert (AGGREGATE_TYPE_P (TREE_TYPE (pbr->repl)));
1546 continue;
1547 }
1548 }
1549
1550 tree repl;
1551 if (av.by_ref)
1552 repl = av.value;
1553 else
1554 {
1555 repl = create_tmp_var (TREE_TYPE (av.value),
1556 "removed_ipa_cp");
1557 gimple *init_stmt = gimple_build_assign (repl, av.value);
1558 m_split_agg_csts_inits.safe_push (obj: init_stmt);
1559 }
1560 register_replacement (base: m_oparms[parm_num], unit_offset: av.unit_offset,
1561 replacement: repl);
1562 split[parm_num] = true;
1563 }
1564 }
1565 }
1566 }
1567 sort_replacements ();
1568
1569 if (tree_map)
1570 {
1571 /* Do not treat parameters which were replaced with a constant as
1572 completely vanished. */
1573 for (unsigned i = 0; i < tree_map->length (); i++)
1574 {
1575 int parm_num = (*tree_map)[i]->parm_num;
1576 gcc_assert (parm_num >= 0);
1577 if (need_remap)
1578 parm_num = index_mapping[parm_num];
1579 kept[parm_num] = true;
1580 }
1581 }
1582
1583 /* As part of body modifications, we will also have to replace remaining uses
1584 of remaining uses of removed PARM_DECLs (which do not however use the
1585 initial value) with their VAR_DECL copies.
1586
1587 We do this differently with and without m_id. With m_id, we rely on its
1588 mapping and create a replacement straight away. Without it, we have our
1589 own mechanism for which we have to populate m_removed_decls vector. Just
1590 don't mix them, that is why you should not call
1591 replace_removed_params_ssa_names or perform_cfun_body_modifications when
1592 you construct with ID not equal to NULL. */
1593
1594 auto_vec<tree, 8> ssas_to_process_debug;
1595 unsigned op_len = m_oparms.length ();
1596 for (unsigned i = 0; i < op_len; i++)
1597 if (!kept[i])
1598 {
1599 if (m_id)
1600 {
1601 gcc_assert (!m_id->decl_map->get (m_oparms[i]));
1602 tree var = copy_decl_to_var (m_oparms[i], m_id);
1603 insert_decl_map (m_id, m_oparms[i], var);
1604 /* Declare this new variable. */
1605 DECL_CHAIN (var) = *vars;
1606 *vars = var;
1607
1608 /* If this is not a split but a real removal, init hash sets
1609 that will guide what not to copy to the new body. */
1610 if (!split[i])
1611 mark_dead_statements (dead_param: m_oparms[i], debugstack: &ssas_to_process_debug);
1612 else
1613 mark_clobbers_dead (param: m_oparms[i]);
1614 if (MAY_HAVE_DEBUG_STMTS
1615 && is_gimple_reg (m_oparms[i]))
1616 m_reset_debug_decls.safe_push (obj: m_oparms[i]);
1617 }
1618 else
1619 {
1620 m_removed_decls.safe_push (obj: m_oparms[i]);
1621 m_removed_map.put (k: m_oparms[i], v: m_removed_decls.length () - 1);
1622 if (MAY_HAVE_DEBUG_STMTS
1623 && !kept[i]
1624 && is_gimple_reg (m_oparms[i]))
1625 m_reset_debug_decls.safe_push (obj: m_oparms[i]);
1626 }
1627 }
1628
1629 while (!ssas_to_process_debug.is_empty ())
1630 prepare_debug_expressions (dead_ssa: ssas_to_process_debug.pop ());
1631}
1632
1633/* Constructor of ipa_param_body_adjustments from a simple list of
1634 modifications to parameters listed in ADJ_PARAMS which will prepare ground
1635 for modification of parameters of fndecl. Return value of the function will
1636 not be removed and the object will assume it does not run as a part of
1637 tree-function_versioning. */
1638
1639ipa_param_body_adjustments
1640::ipa_param_body_adjustments (vec<ipa_adjusted_param, va_gc> *adj_params,
1641 tree fndecl)
1642 : m_adj_params (adj_params), m_adjustments (NULL), m_reset_debug_decls (),
1643 m_dead_stmts (), m_dead_ssas (), m_dead_ssa_debug_equiv (),
1644 m_dead_stmt_debug_equiv (), m_fndecl (fndecl), m_id (NULL), m_oparms (),
1645 m_new_decls (), m_new_types (), m_replacements (),
1646 m_split_agg_csts_inits (), m_removed_decls (), m_removed_map (),
1647 m_method2func (false), m_sorted_replacements_p (true)
1648{
1649 common_initialization (old_fndecl: fndecl, NULL, NULL);
1650}
1651
1652/* Constructor of ipa_param_body_adjustments from ipa_param_adjustments in
1653 ADJUSTMENTS which will prepare ground for modification of parameters of
1654 fndecl. The object will assume it does not run as a part of
1655 tree-function_versioning. */
1656
1657ipa_param_body_adjustments
1658::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
1659 tree fndecl)
1660 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
1661 m_reset_debug_decls (), m_dead_stmts (), m_dead_ssas (),
1662 m_dead_ssa_debug_equiv (), m_dead_stmt_debug_equiv (), m_fndecl (fndecl),
1663 m_id (NULL), m_oparms (), m_new_decls (), m_new_types (), m_replacements (),
1664 m_split_agg_csts_inits (), m_removed_decls (), m_removed_map (),
1665 m_method2func (false), m_sorted_replacements_p (true)
1666{
1667 common_initialization (old_fndecl: fndecl, NULL, NULL);
1668}
1669
1670/* Constructor of ipa_param_body_adjustments which sets it up as a part of
1671 running tree_function_versioning. Planned modifications to the function are
1672 in ADJUSTMENTS. FNDECL designates the new function clone which is being
1673 modified. OLD_FNDECL is the function of which FNDECL is a clone (and which
1674 at the time of invocation still share DECL_ARGUMENTS). ID is the
1675 copy_body_data structure driving the wholy body copying process. VARS is a
1676 pointer to the head of the list of new local variables, TREE_MAP is the map
1677 that drives tree substitution in the cloning process. */
1678
1679ipa_param_body_adjustments
1680::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
1681 tree fndecl, tree old_fndecl,
1682 copy_body_data *id, tree *vars,
1683 vec<ipa_replace_map *, va_gc> *tree_map)
1684 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
1685 m_reset_debug_decls (), m_dead_stmts (), m_dead_ssas (),
1686 m_dead_ssa_debug_equiv (), m_dead_stmt_debug_equiv (), m_fndecl (fndecl),
1687 m_id (id), m_oparms (), m_new_decls (), m_new_types (), m_replacements (),
1688 m_split_agg_csts_inits (), m_removed_decls (), m_removed_map (),
1689 m_method2func (false), m_sorted_replacements_p (true)
1690{
1691 common_initialization (old_fndecl, vars, tree_map);
1692}
1693
1694/* Chain new param decls up and return them. */
1695
1696tree
1697ipa_param_body_adjustments::get_new_param_chain ()
1698{
1699 tree result;
1700 tree *link = &result;
1701
1702 unsigned len = vec_safe_length (v: m_adj_params);
1703 for (unsigned i = 0; i < len; i++)
1704 {
1705 tree new_decl = m_new_decls[i];
1706 *link = new_decl;
1707 link = &DECL_CHAIN (new_decl);
1708 }
1709 *link = NULL_TREE;
1710 return result;
1711}
1712
1713/* Modify the function parameters FNDECL and its type according to the plan in
1714 ADJUSTMENTS. This function needs to be called when the decl has not already
1715 been processed with ipa_param_adjustments::adjust_decl, otherwise just
1716 seting DECL_ARGUMENTS to whatever get_new_param_chain will do is enough. */
1717
1718void
1719ipa_param_body_adjustments::modify_formal_parameters ()
1720{
1721 tree orig_type = TREE_TYPE (m_fndecl);
1722 DECL_ARGUMENTS (m_fndecl) = get_new_param_chain ();
1723
1724 /* When signature changes, we need to clear builtin info. */
1725 if (fndecl_built_in_p (node: m_fndecl))
1726 set_decl_built_in_function (decl: m_fndecl, fclass: NOT_BUILT_IN, fcode: 0);
1727
1728 bool modified = false;
1729 size_t index = 0;
1730 if (m_adj_params)
1731 for (tree t = TYPE_ARG_TYPES (orig_type);
1732 t && !modified;
1733 t = TREE_CHAIN (t), index++)
1734 if (index >= m_adj_params->length ()
1735 || (*m_adj_params)[index].op != IPA_PARAM_OP_COPY
1736 || (*m_adj_params)[index].base_index != index)
1737 modified = true;
1738
1739 /* At this point, removing return value is only implemented when going
1740 through tree_function_versioning, not when modifying function body
1741 directly. */
1742 gcc_assert (!m_adjustments || !m_adjustments->m_skip_return);
1743 tree new_type = build_adjusted_function_type (orig_type, new_param_types: &m_new_types,
1744 method2func: m_method2func, skip_return: false, args_modified: modified);
1745
1746 TREE_TYPE (m_fndecl) = new_type;
1747 DECL_VIRTUAL_P (m_fndecl) = 0;
1748 DECL_LANG_SPECIFIC (m_fndecl) = NULL;
1749 if (m_method2func)
1750 DECL_VINDEX (m_fndecl) = NULL_TREE;
1751}
1752
1753/* Given BASE and UNIT_OFFSET, find the corresponding record among replacement
1754 structures. */
1755
1756ipa_param_body_replacement *
1757ipa_param_body_adjustments::lookup_replacement_1 (tree base,
1758 unsigned unit_offset)
1759{
1760 gcc_assert (m_sorted_replacements_p);
1761 ipa_param_body_replacement key;
1762 key.base = base;
1763 key.unit_offset = unit_offset;
1764 ipa_param_body_replacement *res
1765 = std::lower_bound (first: m_replacements.begin (), last: m_replacements.end (), val: key,
1766 comp: [] (const ipa_param_body_replacement &elt,
1767 const ipa_param_body_replacement &val)
1768 {
1769 return (compare_param_body_replacement (va: &elt, vb: &val)
1770 < 0);
1771 });
1772
1773 if (res == m_replacements.end ()
1774 || res->base != base
1775 || res->unit_offset != unit_offset)
1776 return NULL;
1777 return res;
1778}
1779
1780/* Find the first replacement for BASE among m_replacements and return pointer
1781 to it, or NULL if there is none. */
1782
1783ipa_param_body_replacement *
1784ipa_param_body_adjustments::lookup_first_base_replacement (tree base)
1785{
1786 gcc_assert (m_sorted_replacements_p);
1787 ipa_param_body_replacement key;
1788 key.base = base;
1789 ipa_param_body_replacement *res
1790 = std::lower_bound (first: m_replacements.begin (), last: m_replacements.end (), val: key,
1791 comp: [] (const ipa_param_body_replacement &elt,
1792 const ipa_param_body_replacement &val)
1793 {
1794 if (DECL_UID (elt.base) < DECL_UID (val.base))
1795 return true;
1796 return false;
1797 });
1798
1799 if (res == m_replacements.end ()
1800 || res->base != base)
1801 return NULL;
1802 return res;
1803}
1804
1805/* Given BASE and UNIT_OFFSET, find the corresponding replacement expression
1806 and return it, assuming it is known it does not hold value by reference or
1807 in reverse storage order. */
1808
1809tree
1810ipa_param_body_adjustments::lookup_replacement (tree base, unsigned unit_offset)
1811{
1812 ipa_param_body_replacement *pbr = lookup_replacement_1 (base, unit_offset);
1813 if (!pbr)
1814 return NULL;
1815 return pbr->repl;
1816}
1817
1818/* If T is an SSA_NAME, return NULL if it is not a default def or
1819 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
1820 the base variable is always returned, regardless if it is a default
1821 def. Return T if it is not an SSA_NAME. */
1822
1823static tree
1824get_ssa_base_param (tree t, bool ignore_default_def)
1825{
1826 if (TREE_CODE (t) == SSA_NAME)
1827 {
1828 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
1829 return SSA_NAME_VAR (t);
1830 else
1831 return NULL_TREE;
1832 }
1833 return t;
1834}
1835
1836/* Given an expression, return the structure describing how it should be
1837 replaced if it accesses a part of a split parameter or NULL otherwise.
1838
1839 Do not free the result, it will be deallocated when the object is destroyed.
1840
1841 If IGNORE_DEFAULT_DEF is cleared, consider only SSA_NAMEs of PARM_DECLs
1842 which are default definitions, if set, consider all SSA_NAMEs of
1843 PARM_DECLs. */
1844
1845ipa_param_body_replacement *
1846ipa_param_body_adjustments::get_expr_replacement (tree expr,
1847 bool ignore_default_def)
1848{
1849 tree base;
1850 unsigned unit_offset;
1851
1852 if (!isra_get_ref_base_and_offset (expr, base_p: &base, unit_offset_p: &unit_offset))
1853 return NULL;
1854
1855 base = get_ssa_base_param (t: base, ignore_default_def);
1856 if (!base || TREE_CODE (base) != PARM_DECL)
1857 return NULL;
1858 return lookup_replacement_1 (base, unit_offset);
1859}
1860
1861/* Given OLD_DECL, which is a PARM_DECL of a parameter that is being removed
1862 (which includes it being split or replaced), return a new variable that
1863 should be used for any SSA names that will remain in the function that
1864 previously belonged to OLD_DECL. */
1865
1866tree
1867ipa_param_body_adjustments::get_replacement_ssa_base (tree old_decl)
1868{
1869 unsigned *idx = m_removed_map.get (k: old_decl);
1870 if (!idx)
1871 return NULL;
1872
1873 tree repl;
1874 if (TREE_CODE (m_removed_decls[*idx]) == PARM_DECL)
1875 {
1876 gcc_assert (m_removed_decls[*idx] == old_decl);
1877 repl = copy_var_decl (old_decl, DECL_NAME (old_decl),
1878 TREE_TYPE (old_decl));
1879 m_removed_decls[*idx] = repl;
1880 }
1881 else
1882 repl = m_removed_decls[*idx];
1883 return repl;
1884}
1885
1886/* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
1887 parameter which is to be removed because its value is not used, create a new
1888 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
1889 original with it and return it. If there is no need to re-map, return NULL.
1890 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
1891
1892tree
1893ipa_param_body_adjustments::replace_removed_params_ssa_names (tree old_name,
1894 gimple *stmt)
1895{
1896 gcc_assert (!m_id);
1897 if (TREE_CODE (old_name) != SSA_NAME)
1898 return NULL;
1899
1900 tree decl = SSA_NAME_VAR (old_name);
1901 if (decl == NULL_TREE
1902 || TREE_CODE (decl) != PARM_DECL)
1903 return NULL;
1904
1905 tree repl = get_replacement_ssa_base (old_decl: decl);
1906 if (!repl)
1907 return NULL;
1908
1909 tree new_name = make_ssa_name (var: repl, stmt);
1910 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
1911 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
1912
1913 if (dump_file && (dump_flags & TDF_DETAILS))
1914 {
1915 fprintf (stream: dump_file, format: "replacing an SSA name of a removed param ");
1916 print_generic_expr (dump_file, old_name);
1917 fprintf (stream: dump_file, format: " with ");
1918 print_generic_expr (dump_file, new_name);
1919 fprintf (stream: dump_file, format: "\n");
1920 }
1921
1922 replace_uses_by (old_name, new_name);
1923 return new_name;
1924}
1925
1926/* If the expression *EXPR_P should be replaced, do so. CONVERT specifies
1927 whether the function should care about type incompatibility of the current
1928 and new expressions. If it is false, the function will leave
1929 incompatibility issues to the caller - note that when the function
1930 encounters a BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR, it will modify
1931 their bases instead of the expressions themselves and then also performs any
1932 necessary conversions. */
1933
1934bool
1935ipa_param_body_adjustments::modify_expression (tree *expr_p, bool convert,
1936 gimple_seq *extra_stmts)
1937{
1938 tree expr = *expr_p;
1939
1940 if (m_replacements.is_empty ())
1941 return false;
1942 if (TREE_CODE (expr) == BIT_FIELD_REF
1943 || TREE_CODE (expr) == IMAGPART_EXPR
1944 || TREE_CODE (expr) == REALPART_EXPR)
1945 {
1946 /* For a BIT_FIELD_REF do not bother to VIEW_CONVERT the base,
1947 instead reference the replacement directly. */
1948 convert = TREE_CODE (expr) != BIT_FIELD_REF;
1949 expr_p = &TREE_OPERAND (expr, 0);
1950 expr = *expr_p;
1951 }
1952
1953 ipa_param_body_replacement *pbr = get_expr_replacement (expr, ignore_default_def: false);
1954 if (!pbr)
1955 return false;
1956
1957 tree repl = pbr->repl;
1958 if (dump_file && (dump_flags & TDF_DETAILS))
1959 {
1960 fprintf (stream: dump_file, format: "About to replace expr ");
1961 print_generic_expr (dump_file, expr);
1962 fprintf (stream: dump_file, format: " with ");
1963 print_generic_expr (dump_file, repl);
1964 fprintf (stream: dump_file, format: "\n");
1965 }
1966
1967 if (convert && !useless_type_conversion_p (TREE_TYPE (expr),
1968 TREE_TYPE (repl)))
1969 {
1970 gcc_checking_assert (tree_to_shwi (TYPE_SIZE (TREE_TYPE (expr)))
1971 == tree_to_shwi (TYPE_SIZE (TREE_TYPE (repl))));
1972 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expr), repl);
1973 if (is_gimple_reg (repl)
1974 && is_gimple_reg_type (TREE_TYPE (expr)))
1975 {
1976 gcc_assert (extra_stmts);
1977 vce = force_gimple_operand (vce, extra_stmts, true, NULL_TREE);
1978 }
1979 *expr_p = vce;
1980 }
1981 else
1982 *expr_p = repl;
1983 return true;
1984}
1985
1986/* If the assignment statement STMT contains any expressions that need to
1987 replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
1988 potential type incompatibilities. If any conversion sttements have to be
1989 pre-pended to STMT, they will be added to EXTRA_STMTS. Return true iff the
1990 statement was modified. */
1991
1992bool
1993ipa_param_body_adjustments::modify_assignment (gimple *stmt,
1994 gimple_seq *extra_stmts)
1995{
1996 tree *lhs_p, *rhs_p;
1997 bool any;
1998
1999 if (m_replacements.is_empty () || !gimple_assign_single_p (gs: stmt))
2000 return false;
2001
2002 rhs_p = gimple_assign_rhs1_ptr (gs: stmt);
2003 lhs_p = gimple_assign_lhs_ptr (gs: stmt);
2004
2005 any = modify_expression (expr_p: lhs_p, convert: false);
2006 any |= modify_expression (expr_p: rhs_p, convert: false, extra_stmts);
2007 if (any
2008 && !useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
2009 {
2010 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
2011 {
2012 /* V_C_Es of constructors can cause trouble (PR 42714). */
2013 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
2014 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
2015 else
2016 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
2017 NULL);
2018 }
2019 else
2020 {
2021 gcc_checking_assert (tree_to_shwi (TYPE_SIZE (TREE_TYPE (*lhs_p)))
2022 == tree_to_shwi (TYPE_SIZE (TREE_TYPE (*rhs_p))));
2023 tree new_rhs = fold_build1_loc (gimple_location (g: stmt),
2024 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
2025 *rhs_p);
2026 tree tmp = force_gimple_operand (new_rhs, extra_stmts, true,
2027 NULL_TREE);
2028 gimple_assign_set_rhs1 (gs: stmt, rhs: tmp);
2029 }
2030 return true;
2031 }
2032
2033 return any;
2034}
2035
2036/* Record information about what modifications to call arguments have already
2037 been done by clone materialization into a summary describing CS. The
2038 information is stored in NEW_INDEX_MAP, NEW_PT_MAP and NEW_ALWAYS_COPY_DELTA
2039 and correspond to equivalent fields in ipa_edge_modification_info. Return
2040 the edge summary. */
2041
2042static ipa_edge_modification_info *
2043record_argument_state_1 (cgraph_edge *cs, const vec<int> &new_index_map,
2044 const vec<pass_through_split_map> &new_pt_map,
2045 int new_always_copy_delta)
2046
2047{
2048 ipa_edge_modification_info *sum = ipa_edge_modifications->get_create (edge: cs);
2049
2050 unsigned len = sum->pass_through_map.length ();
2051 for (unsigned i = 0; i < len; i++)
2052 {
2053 unsigned oldnew = sum->pass_through_map[i].new_index;
2054 sum->pass_through_map[i].new_index = new_index_map[oldnew];
2055 }
2056
2057 len = sum->index_map.length ();
2058 if (len > 0)
2059 {
2060 unsigned nptlen = new_pt_map.length ();
2061 for (unsigned j = 0; j < nptlen; j++)
2062 {
2063 int inverse = -1;
2064 for (unsigned i = 0; i < len ; i++)
2065 if ((unsigned) sum->index_map[i] == new_pt_map[j].base_index)
2066 {
2067 inverse = i;
2068 break;
2069 }
2070 gcc_assert (inverse >= 0);
2071 pass_through_split_map ptm_item;
2072
2073 ptm_item.base_index = inverse;
2074 ptm_item.unit_offset = new_pt_map[j].unit_offset;
2075 ptm_item.new_index = new_pt_map[j].new_index;
2076 sum->pass_through_map.safe_push (obj: ptm_item);
2077 }
2078
2079 for (unsigned i = 0; i < len; i++)
2080 {
2081 int idx = sum->index_map[i];
2082 if (idx < 0)
2083 continue;
2084 sum->index_map[i] = new_index_map[idx];
2085 }
2086 }
2087 else
2088 {
2089 sum->pass_through_map.safe_splice (src: new_pt_map);
2090 sum->index_map.safe_splice (src: new_index_map);
2091 }
2092 sum->always_copy_delta += new_always_copy_delta;
2093 return sum;
2094}
2095
2096/* Record information about what modifications to call arguments have already
2097 been done by clone materialization into a summary of an edge describing the
2098 call in this clone and all its clones. NEW_INDEX_MAP, NEW_PT_MAP and
2099 NEW_ALWAYS_COPY_DELTA have the same meaning as record_argument_state_1.
2100
2101 In order to associate the info with the right edge summaries, we need
2102 address of the ORIG_STMT in the function from which we are cloning (because
2103 the edges have not yet been re-assigned to the new statement that has just
2104 been created) and ID, the structure governing function body copying. */
2105
2106static void
2107record_argument_state (copy_body_data *id, gimple *orig_stmt,
2108 const vec<int> &new_index_map,
2109 const vec<pass_through_split_map> &new_pt_map,
2110 int new_always_copy_delta)
2111{
2112 if (!ipa_edge_modifications)
2113 ipa_edge_modifications = new ipa_edge_modification_sum (symtab);
2114
2115 struct cgraph_node *this_node = id->dst_node;
2116 ipa_edge_modification_info *first_sum = NULL;
2117 cgraph_edge *cs = this_node->get_edge (call_stmt: orig_stmt);
2118 if (cs)
2119 first_sum = record_argument_state_1 (cs, new_index_map, new_pt_map,
2120 new_always_copy_delta);
2121 else
2122 gcc_assert (this_node->clones);
2123
2124 if (!this_node->clones)
2125 return;
2126 for (cgraph_node *subclone = this_node->clones; subclone != this_node;)
2127 {
2128 cs = subclone->get_edge (call_stmt: orig_stmt);
2129 if (cs)
2130 {
2131 if (!first_sum)
2132 first_sum = record_argument_state_1 (cs, new_index_map, new_pt_map,
2133 new_always_copy_delta);
2134 else
2135 {
2136 ipa_edge_modification_info *s2
2137 = ipa_edge_modifications->get_create (edge: cs);
2138 s2->index_map.truncate (size: 0);
2139 s2->index_map.safe_splice (src: first_sum->index_map);
2140 s2->pass_through_map.truncate (size: 0);
2141 s2->pass_through_map.safe_splice (src: first_sum->pass_through_map);
2142 s2->always_copy_delta = first_sum->always_copy_delta;
2143 }
2144 }
2145 else
2146 gcc_assert (subclone->clones);
2147
2148 if (subclone->clones)
2149 subclone = subclone->clones;
2150 else if (subclone->next_sibling_clone)
2151 subclone = subclone->next_sibling_clone;
2152 else
2153 {
2154 while (subclone != this_node && !subclone->next_sibling_clone)
2155 subclone = subclone->clone_of;
2156 if (subclone != this_node)
2157 subclone = subclone->next_sibling_clone;
2158 }
2159 }
2160}
2161
2162/* If the call statement pointed at by STMT_P contains any expressions that
2163 need to replaced with a different one as noted by ADJUSTMENTS, do so. f the
2164 statement needs to be rebuilt, do so. Return true if any modifications have
2165 been performed. ORIG_STMT, if not NULL, is the original statement in the
2166 function that is being cloned from, which at this point can be used to look
2167 up call_graph edges.
2168
2169 If the method is invoked as a part of IPA clone materialization and if any
2170 parameter split is pass-through, i.e. it applies to the functin that is
2171 being modified and also to the callee of the statement, replace the
2172 parameter passed to old callee with all of the replacement a callee might
2173 possibly want and record the performed argument modifications in
2174 ipa_edge_modifications. Likewise if any argument has already been left out
2175 because it is not necessary. */
2176
2177bool
2178ipa_param_body_adjustments::modify_call_stmt (gcall **stmt_p,
2179 gimple *orig_stmt)
2180{
2181 auto_vec <unsigned, 4> pass_through_args;
2182 auto_vec <unsigned, 4> pass_through_pbr_indices;
2183 auto_vec <HOST_WIDE_INT, 4> pass_through_offsets;
2184 gcall *stmt = *stmt_p;
2185 unsigned nargs = gimple_call_num_args (gs: stmt);
2186 bool recreate = false;
2187 gcc_assert (m_sorted_replacements_p);
2188
2189 for (unsigned i = 0; i < gimple_call_num_args (gs: stmt); i++)
2190 {
2191 tree t = gimple_call_arg (gs: stmt, index: i);
2192 gcc_assert (TREE_CODE (t) != BIT_FIELD_REF
2193 && TREE_CODE (t) != IMAGPART_EXPR
2194 && TREE_CODE (t) != REALPART_EXPR);
2195
2196 if (TREE_CODE (t) == SSA_NAME
2197 && m_dead_ssas.contains (k: t))
2198 recreate = true;
2199
2200 if (m_replacements.is_empty ())
2201 continue;
2202
2203 tree base;
2204 unsigned agg_arg_offset;
2205 if (!isra_get_ref_base_and_offset (expr: t, base_p: &base, unit_offset_p: &agg_arg_offset))
2206 continue;
2207
2208 bool by_ref = false;
2209 if (TREE_CODE (base) == SSA_NAME)
2210 {
2211 if (!SSA_NAME_IS_DEFAULT_DEF (base))
2212 continue;
2213 base = SSA_NAME_VAR (base);
2214 gcc_checking_assert (base);
2215 by_ref = true;
2216 }
2217 if (TREE_CODE (base) != PARM_DECL)
2218 continue;
2219
2220 ipa_param_body_replacement *first_rep
2221 = lookup_first_base_replacement (base);
2222 if (!first_rep)
2223 continue;
2224 unsigned first_rep_index = first_rep - m_replacements.begin ();
2225
2226 /* We still have to distinguish between an end-use that we have to
2227 transform now and a pass-through, which happens in the following
2228 two cases. */
2229
2230 /* TODO: After we adjust ptr_parm_has_nonarg_uses to also consider
2231 &MEM_REF[ssa_name + offset], we will also have to detect that case
2232 here. */
2233
2234 if (TREE_CODE (t) == SSA_NAME
2235 && SSA_NAME_IS_DEFAULT_DEF (t)
2236 && SSA_NAME_VAR (t)
2237 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL)
2238 {
2239 /* This must be a by_reference pass-through. */
2240 recreate = true;
2241 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2242 pass_through_args.safe_push (obj: i);
2243 pass_through_pbr_indices.safe_push (obj: first_rep_index);
2244 pass_through_offsets.safe_push (obj: agg_arg_offset);
2245 }
2246 else if (!by_ref && AGGREGATE_TYPE_P (TREE_TYPE (t)))
2247 {
2248 /* Currently IPA-SRA guarantees the aggregate access type
2249 exactly matches in this case. So if it does not match, it is
2250 a pass-through argument that will be sorted out at edge
2251 redirection time. */
2252 ipa_param_body_replacement *pbr
2253 = lookup_replacement_1 (base, unit_offset: agg_arg_offset);
2254
2255 if (!pbr
2256 || (TYPE_MAIN_VARIANT (TREE_TYPE (t))
2257 != TYPE_MAIN_VARIANT (TREE_TYPE (pbr->repl))))
2258 {
2259 recreate = true;
2260 pass_through_args.safe_push (obj: i);
2261 pass_through_pbr_indices.safe_push (obj: first_rep_index);
2262 pass_through_offsets.safe_push (obj: agg_arg_offset);
2263 }
2264 }
2265 }
2266
2267 if (!recreate)
2268 {
2269 /* No need to rebuild the statement, let's just modify arguments
2270 and the LHS if/as appropriate. */
2271 bool modified = false;
2272 for (unsigned i = 0; i < nargs; i++)
2273 {
2274 tree *t = gimple_call_arg_ptr (gs: stmt, index: i);
2275 modified |= modify_expression (expr_p: t, convert: true);
2276 }
2277 if (gimple_call_lhs (gs: stmt))
2278 {
2279 tree *t = gimple_call_lhs_ptr (gs: stmt);
2280 modified |= modify_expression (expr_p: t, convert: false);
2281 }
2282 return modified;
2283 }
2284
2285 auto_vec<int, 16> index_map;
2286 auto_vec<pass_through_split_map, 4> pass_through_map;
2287 auto_vec<tree, 16> vargs;
2288 int always_copy_delta = 0;
2289 unsigned pt_idx = 0;
2290 int new_arg_idx = 0;
2291 for (unsigned i = 0; i < nargs; i++)
2292 {
2293 if (pt_idx < pass_through_args.length ()
2294 && i == pass_through_args[pt_idx])
2295 {
2296 unsigned j = pass_through_pbr_indices[pt_idx];
2297 unsigned agg_arg_offset = pass_through_offsets[pt_idx];
2298 pt_idx++;
2299 always_copy_delta--;
2300 tree base = m_replacements[j].base;
2301
2302 /* In order to be put into SSA form, we have to push all replacements
2303 pertaining to this parameter as parameters to the call statement.
2304 Edge redirection will need to use edge summary to weed out the
2305 unnecessary ones. */
2306 unsigned repl_list_len = m_replacements.length ();
2307 for (; j < repl_list_len; j++)
2308 {
2309 if (m_replacements[j].base != base)
2310 break;
2311 if (m_replacements[j].unit_offset < agg_arg_offset)
2312 continue;
2313 pass_through_split_map pt_map;
2314 pt_map.base_index = i;
2315 pt_map.unit_offset
2316 = m_replacements[j].unit_offset - agg_arg_offset;
2317 pt_map.new_index = new_arg_idx;
2318 pass_through_map.safe_push (obj: pt_map);
2319 vargs.safe_push (obj: m_replacements[j].repl);
2320 new_arg_idx++;
2321 always_copy_delta++;
2322 }
2323 index_map.safe_push (obj: -1);
2324 }
2325 else
2326 {
2327 tree t = gimple_call_arg (gs: stmt, index: i);
2328 if (TREE_CODE (t) == SSA_NAME
2329 && m_dead_ssas.contains (k: t))
2330 {
2331 always_copy_delta--;
2332 index_map.safe_push (obj: -1);
2333 }
2334 else
2335 {
2336 modify_expression (expr_p: &t, convert: true);
2337 vargs.safe_push (obj: t);
2338 index_map.safe_push (obj: new_arg_idx);
2339 new_arg_idx++;
2340 }
2341 }
2342 }
2343
2344 gcall *new_stmt = gimple_build_call_vec (gimple_call_fn (gs: stmt), vargs);
2345 if (gimple_has_location (g: stmt))
2346 gimple_set_location (g: new_stmt, location: gimple_location (g: stmt));
2347 gimple_call_set_chain (call_stmt: new_stmt, chain: gimple_call_chain (gs: stmt));
2348 gimple_call_copy_flags (dest_call: new_stmt, orig_call: stmt);
2349 if (tree lhs = gimple_call_lhs (gs: stmt))
2350 {
2351 modify_expression (expr_p: &lhs, convert: false);
2352 /* Avoid adjusting SSA_NAME_DEF_STMT of a SSA lhs, SSA names
2353 have not yet been remapped. */
2354 *gimple_call_lhs_ptr (gs: new_stmt) = lhs;
2355 }
2356 *stmt_p = new_stmt;
2357
2358 if (orig_stmt)
2359 record_argument_state (id: m_id, orig_stmt, new_index_map: index_map, new_pt_map: pass_through_map,
2360 new_always_copy_delta: always_copy_delta);
2361 return true;
2362}
2363
2364/* If the statement STMT contains any expressions that need to replaced with a
2365 different one as noted by ADJUSTMENTS, do so. Handle any potential type
2366 incompatibilities. If any conversion sttements have to be pre-pended to
2367 STMT, they will be added to EXTRA_STMTS. Return true iff the statement was
2368 modified. */
2369
2370bool
2371ipa_param_body_adjustments::modify_gimple_stmt (gimple **stmt,
2372 gimple_seq *extra_stmts,
2373 gimple *orig_stmt)
2374{
2375 bool modified = false;
2376 tree *t;
2377
2378 switch (gimple_code (g: *stmt))
2379 {
2380 case GIMPLE_RETURN:
2381 t = gimple_return_retval_ptr (gs: as_a <greturn *> (p: *stmt));
2382 if (m_adjustments && m_adjustments->m_skip_return)
2383 *t = NULL_TREE;
2384 else if (*t != NULL_TREE)
2385 modified |= modify_expression (expr_p: t, convert: true);
2386 break;
2387
2388 case GIMPLE_ASSIGN:
2389 modified |= modify_assignment (stmt: *stmt, extra_stmts);
2390 break;
2391
2392 case GIMPLE_CALL:
2393 modified |= modify_call_stmt (stmt_p: (gcall **) stmt, orig_stmt);
2394 break;
2395
2396 case GIMPLE_ASM:
2397 {
2398 gasm *asm_stmt = as_a <gasm *> (p: *stmt);
2399 for (unsigned i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
2400 {
2401 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
2402 modified |= modify_expression (expr_p: t, convert: true);
2403 }
2404 for (unsigned i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
2405 {
2406 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
2407 modified |= modify_expression (expr_p: t, convert: false);
2408 }
2409 }
2410 break;
2411
2412 default:
2413 break;
2414 }
2415 return modified;
2416}
2417
2418
2419/* Traverse body of the current function and perform the requested adjustments
2420 on its statements. Return true iff the CFG has been changed. */
2421
2422bool
2423ipa_param_body_adjustments::modify_cfun_body ()
2424{
2425 bool cfg_changed = false;
2426 basic_block bb;
2427
2428 FOR_EACH_BB_FN (bb, cfun)
2429 {
2430 gimple_stmt_iterator gsi;
2431
2432 for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
2433 {
2434 gphi *phi = as_a <gphi *> (p: gsi_stmt (i: gsi));
2435 tree new_lhs, old_lhs = gimple_phi_result (gs: phi);
2436 new_lhs = replace_removed_params_ssa_names (old_name: old_lhs, stmt: phi);
2437 if (new_lhs)
2438 {
2439 gimple_phi_set_result (phi, result: new_lhs);
2440 release_ssa_name (name: old_lhs);
2441 }
2442 }
2443
2444 gsi = gsi_start_bb (bb);
2445 while (!gsi_end_p (i: gsi))
2446 {
2447 gimple *stmt = gsi_stmt (i: gsi);
2448 gimple *stmt_copy = stmt;
2449 gimple_seq extra_stmts = NULL;
2450 bool modified = modify_gimple_stmt (stmt: &stmt, extra_stmts: &extra_stmts, NULL);
2451 if (stmt != stmt_copy)
2452 {
2453 gcc_checking_assert (modified);
2454 gsi_replace (&gsi, stmt, false);
2455 }
2456 if (!gimple_seq_empty_p (s: extra_stmts))
2457 gsi_insert_seq_before (&gsi, extra_stmts, GSI_SAME_STMT);
2458
2459 def_operand_p defp;
2460 ssa_op_iter iter;
2461 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
2462 {
2463 tree old_def = DEF_FROM_PTR (defp);
2464 if (tree new_def = replace_removed_params_ssa_names (old_name: old_def,
2465 stmt))
2466 {
2467 SET_DEF (defp, new_def);
2468 release_ssa_name (name: old_def);
2469 modified = true;
2470 }
2471 }
2472
2473 if (modified)
2474 {
2475 update_stmt (s: stmt);
2476 if (maybe_clean_eh_stmt (stmt)
2477 && gimple_purge_dead_eh_edges (gimple_bb (g: stmt)))
2478 cfg_changed = true;
2479 }
2480 gsi_next (i: &gsi);
2481 }
2482 }
2483
2484 return cfg_changed;
2485}
2486
2487/* Call gimple_debug_bind_reset_value on all debug statements describing
2488 gimple register parameters that are being removed or replaced. */
2489
2490void
2491ipa_param_body_adjustments::reset_debug_stmts ()
2492{
2493 int i, len;
2494 gimple_stmt_iterator *gsip = NULL, gsi;
2495
2496 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
2497 {
2498 gsi = gsi_after_labels (bb: single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
2499 gsip = &gsi;
2500 }
2501 len = m_reset_debug_decls.length ();
2502 for (i = 0; i < len; i++)
2503 {
2504 imm_use_iterator ui;
2505 gimple *stmt;
2506 gdebug *def_temp;
2507 tree name, vexpr, copy = NULL_TREE;
2508 use_operand_p use_p;
2509 tree decl = m_reset_debug_decls[i];
2510
2511 gcc_checking_assert (is_gimple_reg (decl));
2512 name = ssa_default_def (cfun, decl);
2513 vexpr = NULL;
2514 if (name)
2515 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
2516 {
2517 if (gimple_clobber_p (s: stmt))
2518 {
2519 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
2520 unlink_stmt_vdef (stmt);
2521 gsi_remove (&cgsi, true);
2522 release_defs (stmt);
2523 continue;
2524 }
2525 /* All other users must have been removed by function body
2526 modification. */
2527 gcc_assert (is_gimple_debug (stmt));
2528 if (vexpr == NULL && gsip != NULL)
2529 {
2530 vexpr = build_debug_expr_decl (TREE_TYPE (name));
2531 /* FIXME: Is setting the mode really necessary? */
2532 SET_DECL_MODE (vexpr, DECL_MODE (decl));
2533 def_temp = gimple_build_debug_source_bind (vexpr, decl, NULL);
2534 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
2535 }
2536 if (vexpr)
2537 {
2538 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
2539 SET_USE (use_p, vexpr);
2540 }
2541 else
2542 gimple_debug_bind_reset_value (dbg: stmt);
2543 update_stmt (s: stmt);
2544 }
2545 /* Create a VAR_DECL for debug info purposes. */
2546 if (!DECL_IGNORED_P (decl))
2547 {
2548 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2549 VAR_DECL, DECL_NAME (decl),
2550 TREE_TYPE (decl));
2551 if (DECL_PT_UID_SET_P (decl))
2552 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
2553 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
2554 TREE_READONLY (copy) = TREE_READONLY (decl);
2555 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
2556 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
2557 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
2558 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
2559 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
2560 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
2561 SET_DECL_RTL (copy, 0);
2562 TREE_USED (copy) = 1;
2563 DECL_CONTEXT (copy) = current_function_decl;
2564 add_local_decl (cfun, d: copy);
2565 DECL_CHAIN (copy)
2566 = BLOCK_VARS (DECL_INITIAL (current_function_decl));
2567 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
2568 }
2569 if (gsip != NULL && copy && target_for_debug_bind (decl))
2570 {
2571 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2572 if (vexpr)
2573 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
2574 else
2575 def_temp = gimple_build_debug_source_bind (copy, decl,
2576 NULL);
2577 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
2578 }
2579 }
2580}
2581
2582/* Perform all necessary body changes to change signature, body and debug info
2583 of fun according to adjustments passed at construction. Return true if CFG
2584 was changed in any way. The main entry point for modification of standalone
2585 functions that is not part of IPA clone materialization. */
2586
2587bool
2588ipa_param_body_adjustments::perform_cfun_body_modifications ()
2589{
2590 bool cfg_changed;
2591 modify_formal_parameters ();
2592 cfg_changed = modify_cfun_body ();
2593 reset_debug_stmts ();
2594
2595 return cfg_changed;
2596}
2597
2598
2599/* If there are any initialization statements that need to be emitted into
2600 the basic block BB right at ther start of the new function, do so. */
2601void
2602ipa_param_body_adjustments::append_init_stmts (basic_block bb)
2603{
2604 gimple_stmt_iterator si = gsi_last_bb (bb);
2605 while (!m_split_agg_csts_inits.is_empty ())
2606 gsi_insert_after (&si, m_split_agg_csts_inits.pop (), GSI_NEW_STMT);
2607}
2608
2609/* Deallocate summaries which otherwise stay alive until the end of
2610 compilation. */
2611
2612void
2613ipa_edge_modifications_finalize ()
2614{
2615 if (!ipa_edge_modifications)
2616 return;
2617 delete ipa_edge_modifications;
2618 ipa_edge_modifications = NULL;
2619}
2620
2621/* Helper used to sort a vector of SSA_NAMES. */
2622
2623static int
2624compare_ssa_versions (const void *va, const void *vb)
2625{
2626 const_tree const a = *(const_tree const*)va;
2627 const_tree const b = *(const_tree const*)vb;
2628
2629 if (SSA_NAME_VERSION (a) < SSA_NAME_VERSION (b))
2630 return -1;
2631 if (SSA_NAME_VERSION (a) > SSA_NAME_VERSION (b))
2632 return 1;
2633 return 0;
2634}
2635
2636/* Call release_ssa_name on all elements in KILLED_SSAS in a defined order. */
2637
2638void
2639ipa_release_ssas_in_hash (hash_set <tree> *killed_ssas)
2640{
2641 auto_vec<tree, 16> ssas_to_release;
2642 for (tree sn : *killed_ssas)
2643 ssas_to_release.safe_push (obj: sn);
2644 ssas_to_release.qsort (compare_ssa_versions);
2645 for (tree sn : ssas_to_release)
2646 release_ssa_name (name: sn);
2647}
2648

source code of gcc/ipa-param-manipulation.cc