1/* Tree inlining.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "target.h"
26#include "rtl.h"
27#include "tree.h"
28#include "gimple.h"
29#include "cfghooks.h"
30#include "tree-pass.h"
31#include "ssa.h"
32#include "cgraph.h"
33#include "tree-pretty-print.h"
34#include "diagnostic-core.h"
35#include "gimple-predict.h"
36#include "fold-const.h"
37#include "stor-layout.h"
38#include "calls.h"
39#include "tree-inline.h"
40#include "langhooks.h"
41#include "cfganal.h"
42#include "tree-iterator.h"
43#include "intl.h"
44#include "gimple-fold.h"
45#include "tree-eh.h"
46#include "gimplify.h"
47#include "gimple-iterator.h"
48#include "gimplify-me.h"
49#include "gimple-walk.h"
50#include "tree-cfg.h"
51#include "tree-into-ssa.h"
52#include "tree-dfa.h"
53#include "tree-ssa.h"
54#include "except.h"
55#include "debug.h"
56#include "params.h"
57#include "value-prof.h"
58#include "cfgloop.h"
59#include "builtins.h"
60#include "tree-chkp.h"
61#include "stringpool.h"
62#include "attribs.h"
63#include "sreal.h"
64
65/* I'm not real happy about this, but we need to handle gimple and
66 non-gimple trees. */
67
68/* Inlining, Cloning, Versioning, Parallelization
69
70 Inlining: a function body is duplicated, but the PARM_DECLs are
71 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72 MODIFY_EXPRs that store to a dedicated returned-value variable.
73 The duplicated eh_region info of the copy will later be appended
74 to the info for the caller; the eh_region info in copied throwing
75 statements and RESX statements are adjusted accordingly.
76
77 Cloning: (only in C++) We have one body for a con/de/structor, and
78 multiple function decls, each with a unique parameter list.
79 Duplicate the body, using the given splay tree; some parameters
80 will become constants (like 0 or 1).
81
82 Versioning: a function body is duplicated and the result is a new
83 function rather than into blocks of an existing function as with
84 inlining. Some parameters will become constants.
85
86 Parallelization: a region of a function is duplicated resulting in
87 a new function. Variables may be replaced with complex expressions
88 to enable shared variable semantics.
89
90 All of these will simultaneously lookup any callgraph edges. If
91 we're going to inline the duplicated function body, and the given
92 function has some cloned callgraph nodes (one for each place this
93 function will be inlined) those callgraph edges will be duplicated.
94 If we're cloning the body, those callgraph edges will be
95 updated to point into the new body. (Note that the original
96 callgraph node and edge list will not be altered.)
97
98 See the CALL_EXPR handling case in copy_tree_body_r (). */
99
100/* To Do:
101
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
108
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
111
112
113/* Weights that estimate_num_insns uses to estimate the size of the
114 produced code. */
115
116eni_weights eni_size_weights;
117
118/* Weights that estimate_num_insns uses to estimate the time necessary
119 to execute the produced code. */
120
121eni_weights eni_time_weights;
122
123/* Prototypes. */
124
125static tree declare_return_variable (copy_body_data *, tree, tree, tree,
126 basic_block);
127static void remap_block (tree *, copy_body_data *);
128static void copy_bind_expr (tree *, int *, copy_body_data *);
129static void declare_inline_vars (tree, tree);
130static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131static void prepend_lexical_block (tree current_block, tree new_block);
132static tree copy_decl_to_var (tree, copy_body_data *);
133static tree copy_result_decl_to_var (tree, copy_body_data *);
134static tree copy_decl_maybe_to_var (tree, copy_body_data *);
135static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
136static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
137static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138
139/* Insert a tree->tree mapping for ID. Despite the name suggests
140 that the trees should be variables, it is used for more than that. */
141
142void
143insert_decl_map (copy_body_data *id, tree key, tree value)
144{
145 id->decl_map->put (key, value);
146
147 /* Always insert an identity map as well. If we see this same new
148 node again, we won't want to duplicate it a second time. */
149 if (key != value)
150 id->decl_map->put (value, value);
151}
152
153/* Insert a tree->tree mapping for ID. This is only used for
154 variables. */
155
156static void
157insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158{
159 if (!gimple_in_ssa_p (id->src_cfun))
160 return;
161
162 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
163 return;
164
165 if (!target_for_debug_bind (key))
166 return;
167
168 gcc_assert (TREE_CODE (key) == PARM_DECL);
169 gcc_assert (VAR_P (value));
170
171 if (!id->debug_map)
172 id->debug_map = new hash_map<tree, tree>;
173
174 id->debug_map->put (key, value);
175}
176
177/* If nonzero, we're remapping the contents of inlined debug
178 statements. If negative, an error has occurred, such as a
179 reference to a variable that isn't available in the inlined
180 context. */
181static int processing_debug_stmt = 0;
182
183/* Construct new SSA name for old NAME. ID is the inline context. */
184
185static tree
186remap_ssa_name (tree name, copy_body_data *id)
187{
188 tree new_tree, var;
189 tree *n;
190
191 gcc_assert (TREE_CODE (name) == SSA_NAME);
192
193 n = id->decl_map->get (name);
194 if (n)
195 return unshare_expr (*n);
196
197 if (processing_debug_stmt)
198 {
199 if (SSA_NAME_IS_DEFAULT_DEF (name)
200 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
201 && id->entry_bb == NULL
202 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
203 {
204 tree vexpr = make_node (DEBUG_EXPR_DECL);
205 gimple *def_temp;
206 gimple_stmt_iterator gsi;
207 tree val = SSA_NAME_VAR (name);
208
209 n = id->decl_map->get (val);
210 if (n != NULL)
211 val = *n;
212 if (TREE_CODE (val) != PARM_DECL)
213 {
214 processing_debug_stmt = -1;
215 return name;
216 }
217 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
218 DECL_ARTIFICIAL (vexpr) = 1;
219 TREE_TYPE (vexpr) = TREE_TYPE (name);
220 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
221 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
222 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
223 return vexpr;
224 }
225
226 processing_debug_stmt = -1;
227 return name;
228 }
229
230 /* Remap anonymous SSA names or SSA names of anonymous decls. */
231 var = SSA_NAME_VAR (name);
232 if (!var
233 || (!SSA_NAME_IS_DEFAULT_DEF (name)
234 && VAR_P (var)
235 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
236 && DECL_ARTIFICIAL (var)
237 && DECL_IGNORED_P (var)
238 && !DECL_NAME (var)))
239 {
240 struct ptr_info_def *pi;
241 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
242 if (!var && SSA_NAME_IDENTIFIER (name))
243 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
244 insert_decl_map (id, name, new_tree);
245 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
246 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
247 /* At least IPA points-to info can be directly transferred. */
248 if (id->src_cfun->gimple_df
249 && id->src_cfun->gimple_df->ipa_pta
250 && POINTER_TYPE_P (TREE_TYPE (name))
251 && (pi = SSA_NAME_PTR_INFO (name))
252 && !pi->pt.anything)
253 {
254 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
255 new_pi->pt = pi->pt;
256 }
257 return new_tree;
258 }
259
260 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
261 in copy_bb. */
262 new_tree = remap_decl (var, id);
263
264 /* We might've substituted constant or another SSA_NAME for
265 the variable.
266
267 Replace the SSA name representing RESULT_DECL by variable during
268 inlining: this saves us from need to introduce PHI node in a case
269 return value is just partly initialized. */
270 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
271 && (!SSA_NAME_VAR (name)
272 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
273 || !id->transform_return_to_modify))
274 {
275 struct ptr_info_def *pi;
276 new_tree = make_ssa_name (new_tree);
277 insert_decl_map (id, name, new_tree);
278 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
279 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
280 /* At least IPA points-to info can be directly transferred. */
281 if (id->src_cfun->gimple_df
282 && id->src_cfun->gimple_df->ipa_pta
283 && POINTER_TYPE_P (TREE_TYPE (name))
284 && (pi = SSA_NAME_PTR_INFO (name))
285 && !pi->pt.anything)
286 {
287 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
288 new_pi->pt = pi->pt;
289 }
290 if (SSA_NAME_IS_DEFAULT_DEF (name))
291 {
292 /* By inlining function having uninitialized variable, we might
293 extend the lifetime (variable might get reused). This cause
294 ICE in the case we end up extending lifetime of SSA name across
295 abnormal edge, but also increase register pressure.
296
297 We simply initialize all uninitialized vars by 0 except
298 for case we are inlining to very first BB. We can avoid
299 this for all BBs that are not inside strongly connected
300 regions of the CFG, but this is expensive to test. */
301 if (id->entry_bb
302 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
303 && (!SSA_NAME_VAR (name)
304 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
305 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
306 0)->dest
307 || EDGE_COUNT (id->entry_bb->preds) != 1))
308 {
309 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
310 gimple *init_stmt;
311 tree zero = build_zero_cst (TREE_TYPE (new_tree));
312
313 init_stmt = gimple_build_assign (new_tree, zero);
314 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
315 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
316 }
317 else
318 {
319 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
320 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
321 }
322 }
323 }
324 else
325 insert_decl_map (id, name, new_tree);
326 return new_tree;
327}
328
329/* Remap DECL during the copying of the BLOCK tree for the function. */
330
331tree
332remap_decl (tree decl, copy_body_data *id)
333{
334 tree *n;
335
336 /* We only remap local variables in the current function. */
337
338 /* See if we have remapped this declaration. */
339
340 n = id->decl_map->get (decl);
341
342 if (!n && processing_debug_stmt)
343 {
344 processing_debug_stmt = -1;
345 return decl;
346 }
347
348 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
349 necessary DECLs have already been remapped and we do not want to duplicate
350 a decl coming from outside of the sequence we are copying. */
351 if (!n
352 && id->prevent_decl_creation_for_types
353 && id->remapping_type_depth > 0
354 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
355 return decl;
356
357 /* If we didn't already have an equivalent for this declaration, create one
358 now. */
359 if (!n)
360 {
361 /* Make a copy of the variable or label. */
362 tree t = id->copy_decl (decl, id);
363
364 /* Remember it, so that if we encounter this local entity again
365 we can reuse this copy. Do this early because remap_type may
366 need this decl for TYPE_STUB_DECL. */
367 insert_decl_map (id, decl, t);
368
369 if (!DECL_P (t))
370 return t;
371
372 /* Remap types, if necessary. */
373 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
374 if (TREE_CODE (t) == TYPE_DECL)
375 {
376 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
377
378 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
379 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
380 is not set on the TYPE_DECL, for example in LTO mode. */
381 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
382 {
383 tree x = build_variant_type_copy (TREE_TYPE (t));
384 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
385 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
386 DECL_ORIGINAL_TYPE (t) = x;
387 }
388 }
389
390 /* Remap sizes as necessary. */
391 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
392 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
393
394 /* If fields, do likewise for offset and qualifier. */
395 if (TREE_CODE (t) == FIELD_DECL)
396 {
397 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
398 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
399 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
400 }
401
402 return t;
403 }
404
405 if (id->do_not_unshare)
406 return *n;
407 else
408 return unshare_expr (*n);
409}
410
411static tree
412remap_type_1 (tree type, copy_body_data *id)
413{
414 tree new_tree, t;
415
416 /* We do need a copy. build and register it now. If this is a pointer or
417 reference type, remap the designated type and make a new pointer or
418 reference type. */
419 if (TREE_CODE (type) == POINTER_TYPE)
420 {
421 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
422 TYPE_MODE (type),
423 TYPE_REF_CAN_ALIAS_ALL (type));
424 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
425 new_tree = build_type_attribute_qual_variant (new_tree,
426 TYPE_ATTRIBUTES (type),
427 TYPE_QUALS (type));
428 insert_decl_map (id, type, new_tree);
429 return new_tree;
430 }
431 else if (TREE_CODE (type) == REFERENCE_TYPE)
432 {
433 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
434 TYPE_MODE (type),
435 TYPE_REF_CAN_ALIAS_ALL (type));
436 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
437 new_tree = build_type_attribute_qual_variant (new_tree,
438 TYPE_ATTRIBUTES (type),
439 TYPE_QUALS (type));
440 insert_decl_map (id, type, new_tree);
441 return new_tree;
442 }
443 else
444 new_tree = copy_node (type);
445
446 insert_decl_map (id, type, new_tree);
447
448 /* This is a new type, not a copy of an old type. Need to reassociate
449 variants. We can handle everything except the main variant lazily. */
450 t = TYPE_MAIN_VARIANT (type);
451 if (type != t)
452 {
453 t = remap_type (t, id);
454 TYPE_MAIN_VARIANT (new_tree) = t;
455 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
456 TYPE_NEXT_VARIANT (t) = new_tree;
457 }
458 else
459 {
460 TYPE_MAIN_VARIANT (new_tree) = new_tree;
461 TYPE_NEXT_VARIANT (new_tree) = NULL;
462 }
463
464 if (TYPE_STUB_DECL (type))
465 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
466
467 /* Lazily create pointer and reference types. */
468 TYPE_POINTER_TO (new_tree) = NULL;
469 TYPE_REFERENCE_TO (new_tree) = NULL;
470
471 /* Copy all types that may contain references to local variables; be sure to
472 preserve sharing in between type and its main variant when possible. */
473 switch (TREE_CODE (new_tree))
474 {
475 case INTEGER_TYPE:
476 case REAL_TYPE:
477 case FIXED_POINT_TYPE:
478 case ENUMERAL_TYPE:
479 case BOOLEAN_TYPE:
480 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
481 {
482 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
483 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
484
485 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
486 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
487 }
488 else
489 {
490 t = TYPE_MIN_VALUE (new_tree);
491 if (t && TREE_CODE (t) != INTEGER_CST)
492 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
493
494 t = TYPE_MAX_VALUE (new_tree);
495 if (t && TREE_CODE (t) != INTEGER_CST)
496 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
497 }
498 return new_tree;
499
500 case FUNCTION_TYPE:
501 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
502 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
503 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
504 else
505 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
506 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
508 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
509 else
510 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
511 return new_tree;
512
513 case ARRAY_TYPE:
514 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
515 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
516 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
517 else
518 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
519
520 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
521 {
522 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
523 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
524 }
525 else
526 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
527 break;
528
529 case RECORD_TYPE:
530 case UNION_TYPE:
531 case QUAL_UNION_TYPE:
532 if (TYPE_MAIN_VARIANT (type) != type
533 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
534 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
535 else
536 {
537 tree f, nf = NULL;
538
539 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
540 {
541 t = remap_decl (f, id);
542 DECL_CONTEXT (t) = new_tree;
543 DECL_CHAIN (t) = nf;
544 nf = t;
545 }
546 TYPE_FIELDS (new_tree) = nreverse (nf);
547 }
548 break;
549
550 case OFFSET_TYPE:
551 default:
552 /* Shouldn't have been thought variable sized. */
553 gcc_unreachable ();
554 }
555
556 /* All variants of type share the same size, so use the already remaped data. */
557 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
558 {
559 tree s = TYPE_SIZE (type);
560 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
561 tree su = TYPE_SIZE_UNIT (type);
562 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
563 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
564 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
565 || s == mvs);
566 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
567 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
568 || su == mvsu);
569 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
570 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
571 }
572 else
573 {
574 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
575 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
576 }
577
578 return new_tree;
579}
580
581tree
582remap_type (tree type, copy_body_data *id)
583{
584 tree *node;
585 tree tmp;
586
587 if (type == NULL)
588 return type;
589
590 /* See if we have remapped this type. */
591 node = id->decl_map->get (type);
592 if (node)
593 return *node;
594
595 /* The type only needs remapping if it's variably modified. */
596 if (! variably_modified_type_p (type, id->src_fn))
597 {
598 insert_decl_map (id, type, type);
599 return type;
600 }
601
602 id->remapping_type_depth++;
603 tmp = remap_type_1 (type, id);
604 id->remapping_type_depth--;
605
606 return tmp;
607}
608
609/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
610
611static bool
612can_be_nonlocal (tree decl, copy_body_data *id)
613{
614 /* We can not duplicate function decls. */
615 if (TREE_CODE (decl) == FUNCTION_DECL)
616 return true;
617
618 /* Local static vars must be non-local or we get multiple declaration
619 problems. */
620 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
621 return true;
622
623 return false;
624}
625
626static tree
627remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
628 copy_body_data *id)
629{
630 tree old_var;
631 tree new_decls = NULL_TREE;
632
633 /* Remap its variables. */
634 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
635 {
636 tree new_var;
637
638 if (can_be_nonlocal (old_var, id))
639 {
640 /* We need to add this variable to the local decls as otherwise
641 nothing else will do so. */
642 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
643 add_local_decl (cfun, old_var);
644 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
645 && !DECL_IGNORED_P (old_var)
646 && nonlocalized_list)
647 vec_safe_push (*nonlocalized_list, old_var);
648 continue;
649 }
650
651 /* Remap the variable. */
652 new_var = remap_decl (old_var, id);
653
654 /* If we didn't remap this variable, we can't mess with its
655 TREE_CHAIN. If we remapped this variable to the return slot, it's
656 already declared somewhere else, so don't declare it here. */
657
658 if (new_var == id->retvar)
659 ;
660 else if (!new_var)
661 {
662 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
663 && !DECL_IGNORED_P (old_var)
664 && nonlocalized_list)
665 vec_safe_push (*nonlocalized_list, old_var);
666 }
667 else
668 {
669 gcc_assert (DECL_P (new_var));
670 DECL_CHAIN (new_var) = new_decls;
671 new_decls = new_var;
672
673 /* Also copy value-expressions. */
674 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
675 {
676 tree tem = DECL_VALUE_EXPR (new_var);
677 bool old_regimplify = id->regimplify;
678 id->remapping_type_depth++;
679 walk_tree (&tem, copy_tree_body_r, id, NULL);
680 id->remapping_type_depth--;
681 id->regimplify = old_regimplify;
682 SET_DECL_VALUE_EXPR (new_var, tem);
683 }
684 }
685 }
686
687 return nreverse (new_decls);
688}
689
690/* Copy the BLOCK to contain remapped versions of the variables
691 therein. And hook the new block into the block-tree. */
692
693static void
694remap_block (tree *block, copy_body_data *id)
695{
696 tree old_block;
697 tree new_block;
698
699 /* Make the new block. */
700 old_block = *block;
701 new_block = make_node (BLOCK);
702 TREE_USED (new_block) = TREE_USED (old_block);
703 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
704 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
705 BLOCK_NONLOCALIZED_VARS (new_block)
706 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
707 *block = new_block;
708
709 /* Remap its variables. */
710 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
711 &BLOCK_NONLOCALIZED_VARS (new_block),
712 id);
713
714 if (id->transform_lang_insert_block)
715 id->transform_lang_insert_block (new_block);
716
717 /* Remember the remapped block. */
718 insert_decl_map (id, old_block, new_block);
719}
720
721/* Copy the whole block tree and root it in id->block. */
722static tree
723remap_blocks (tree block, copy_body_data *id)
724{
725 tree t;
726 tree new_tree = block;
727
728 if (!block)
729 return NULL;
730
731 remap_block (&new_tree, id);
732 gcc_assert (new_tree != block);
733 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
734 prepend_lexical_block (new_tree, remap_blocks (t, id));
735 /* Blocks are in arbitrary order, but make things slightly prettier and do
736 not swap order when producing a copy. */
737 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
738 return new_tree;
739}
740
741/* Remap the block tree rooted at BLOCK to nothing. */
742static void
743remap_blocks_to_null (tree block, copy_body_data *id)
744{
745 tree t;
746 insert_decl_map (id, block, NULL_TREE);
747 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
748 remap_blocks_to_null (t, id);
749}
750
751static void
752copy_statement_list (tree *tp)
753{
754 tree_stmt_iterator oi, ni;
755 tree new_tree;
756
757 new_tree = alloc_stmt_list ();
758 ni = tsi_start (new_tree);
759 oi = tsi_start (*tp);
760 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
761 *tp = new_tree;
762
763 for (; !tsi_end_p (oi); tsi_next (&oi))
764 {
765 tree stmt = tsi_stmt (oi);
766 if (TREE_CODE (stmt) == STATEMENT_LIST)
767 /* This copy is not redundant; tsi_link_after will smash this
768 STATEMENT_LIST into the end of the one we're building, and we
769 don't want to do that with the original. */
770 copy_statement_list (&stmt);
771 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
772 }
773}
774
775static void
776copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
777{
778 tree block = BIND_EXPR_BLOCK (*tp);
779 /* Copy (and replace) the statement. */
780 copy_tree_r (tp, walk_subtrees, NULL);
781 if (block)
782 {
783 remap_block (&block, id);
784 BIND_EXPR_BLOCK (*tp) = block;
785 }
786
787 if (BIND_EXPR_VARS (*tp))
788 /* This will remap a lot of the same decls again, but this should be
789 harmless. */
790 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
791}
792
793
794/* Create a new gimple_seq by remapping all the statements in BODY
795 using the inlining information in ID. */
796
797static gimple_seq
798remap_gimple_seq (gimple_seq body, copy_body_data *id)
799{
800 gimple_stmt_iterator si;
801 gimple_seq new_body = NULL;
802
803 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
804 {
805 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
806 gimple_seq_add_seq (&new_body, new_stmts);
807 }
808
809 return new_body;
810}
811
812
813/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
814 block using the mapping information in ID. */
815
816static gimple *
817copy_gimple_bind (gbind *stmt, copy_body_data *id)
818{
819 gimple *new_bind;
820 tree new_block, new_vars;
821 gimple_seq body, new_body;
822
823 /* Copy the statement. Note that we purposely don't use copy_stmt
824 here because we need to remap statements as we copy. */
825 body = gimple_bind_body (stmt);
826 new_body = remap_gimple_seq (body, id);
827
828 new_block = gimple_bind_block (stmt);
829 if (new_block)
830 remap_block (&new_block, id);
831
832 /* This will remap a lot of the same decls again, but this should be
833 harmless. */
834 new_vars = gimple_bind_vars (stmt);
835 if (new_vars)
836 new_vars = remap_decls (new_vars, NULL, id);
837
838 new_bind = gimple_build_bind (new_vars, new_body, new_block);
839
840 return new_bind;
841}
842
843/* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
844
845static bool
846is_parm (tree decl)
847{
848 if (TREE_CODE (decl) == SSA_NAME)
849 {
850 decl = SSA_NAME_VAR (decl);
851 if (!decl)
852 return false;
853 }
854
855 return (TREE_CODE (decl) == PARM_DECL);
856}
857
858/* Remap the dependence CLIQUE from the source to the destination function
859 as specified in ID. */
860
861static unsigned short
862remap_dependence_clique (copy_body_data *id, unsigned short clique)
863{
864 if (clique == 0 || processing_debug_stmt)
865 return 0;
866 if (!id->dependence_map)
867 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
868 bool existed;
869 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
870 if (!existed)
871 newc = ++cfun->last_clique;
872 return newc;
873}
874
875/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
876 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
877 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
878 recursing into the children nodes of *TP. */
879
880static tree
881remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
882{
883 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
884 copy_body_data *id = (copy_body_data *) wi_p->info;
885 tree fn = id->src_fn;
886
887 /* For recursive invocations this is no longer the LHS itself. */
888 bool is_lhs = wi_p->is_lhs;
889 wi_p->is_lhs = false;
890
891 if (TREE_CODE (*tp) == SSA_NAME)
892 {
893 *tp = remap_ssa_name (*tp, id);
894 *walk_subtrees = 0;
895 if (is_lhs)
896 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
897 return NULL;
898 }
899 else if (auto_var_in_fn_p (*tp, fn))
900 {
901 /* Local variables and labels need to be replaced by equivalent
902 variables. We don't want to copy static variables; there's
903 only one of those, no matter how many times we inline the
904 containing function. Similarly for globals from an outer
905 function. */
906 tree new_decl;
907
908 /* Remap the declaration. */
909 new_decl = remap_decl (*tp, id);
910 gcc_assert (new_decl);
911 /* Replace this variable with the copy. */
912 STRIP_TYPE_NOPS (new_decl);
913 /* ??? The C++ frontend uses void * pointer zero to initialize
914 any other type. This confuses the middle-end type verification.
915 As cloned bodies do not go through gimplification again the fixup
916 there doesn't trigger. */
917 if (TREE_CODE (new_decl) == INTEGER_CST
918 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
919 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
920 *tp = new_decl;
921 *walk_subtrees = 0;
922 }
923 else if (TREE_CODE (*tp) == STATEMENT_LIST)
924 gcc_unreachable ();
925 else if (TREE_CODE (*tp) == SAVE_EXPR)
926 gcc_unreachable ();
927 else if (TREE_CODE (*tp) == LABEL_DECL
928 && (!DECL_CONTEXT (*tp)
929 || decl_function_context (*tp) == id->src_fn))
930 /* These may need to be remapped for EH handling. */
931 *tp = remap_decl (*tp, id);
932 else if (TREE_CODE (*tp) == FIELD_DECL)
933 {
934 /* If the enclosing record type is variably_modified_type_p, the field
935 has already been remapped. Otherwise, it need not be. */
936 tree *n = id->decl_map->get (*tp);
937 if (n)
938 *tp = *n;
939 *walk_subtrees = 0;
940 }
941 else if (TYPE_P (*tp))
942 /* Types may need remapping as well. */
943 *tp = remap_type (*tp, id);
944 else if (CONSTANT_CLASS_P (*tp))
945 {
946 /* If this is a constant, we have to copy the node iff the type
947 will be remapped. copy_tree_r will not copy a constant. */
948 tree new_type = remap_type (TREE_TYPE (*tp), id);
949
950 if (new_type == TREE_TYPE (*tp))
951 *walk_subtrees = 0;
952
953 else if (TREE_CODE (*tp) == INTEGER_CST)
954 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
955 else
956 {
957 *tp = copy_node (*tp);
958 TREE_TYPE (*tp) = new_type;
959 }
960 }
961 else
962 {
963 /* Otherwise, just copy the node. Note that copy_tree_r already
964 knows not to copy VAR_DECLs, etc., so this is safe. */
965
966 if (TREE_CODE (*tp) == MEM_REF)
967 {
968 /* We need to re-canonicalize MEM_REFs from inline substitutions
969 that can happen when a pointer argument is an ADDR_EXPR.
970 Recurse here manually to allow that. */
971 tree ptr = TREE_OPERAND (*tp, 0);
972 tree type = remap_type (TREE_TYPE (*tp), id);
973 tree old = *tp;
974 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
975 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
976 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
977 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
978 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
979 if (MR_DEPENDENCE_CLIQUE (old) != 0)
980 {
981 MR_DEPENDENCE_CLIQUE (*tp)
982 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
983 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
984 }
985 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
986 remapped a parameter as the property might be valid only
987 for the parameter itself. */
988 if (TREE_THIS_NOTRAP (old)
989 && (!is_parm (TREE_OPERAND (old, 0))
990 || (!id->transform_parameter && is_parm (ptr))))
991 TREE_THIS_NOTRAP (*tp) = 1;
992 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
993 *walk_subtrees = 0;
994 return NULL;
995 }
996
997 /* Here is the "usual case". Copy this tree node, and then
998 tweak some special cases. */
999 copy_tree_r (tp, walk_subtrees, NULL);
1000
1001 if (TREE_CODE (*tp) != OMP_CLAUSE)
1002 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1003
1004 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1005 {
1006 /* The copied TARGET_EXPR has never been expanded, even if the
1007 original node was expanded already. */
1008 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1009 TREE_OPERAND (*tp, 3) = NULL_TREE;
1010 }
1011 else if (TREE_CODE (*tp) == ADDR_EXPR)
1012 {
1013 /* Variable substitution need not be simple. In particular,
1014 the MEM_REF substitution above. Make sure that
1015 TREE_CONSTANT and friends are up-to-date. */
1016 int invariant = is_gimple_min_invariant (*tp);
1017 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1018 recompute_tree_invariant_for_addr_expr (*tp);
1019
1020 /* If this used to be invariant, but is not any longer,
1021 then regimplification is probably needed. */
1022 if (invariant && !is_gimple_min_invariant (*tp))
1023 id->regimplify = true;
1024
1025 *walk_subtrees = 0;
1026 }
1027 }
1028
1029 /* Update the TREE_BLOCK for the cloned expr. */
1030 if (EXPR_P (*tp))
1031 {
1032 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1033 tree old_block = TREE_BLOCK (*tp);
1034 if (old_block)
1035 {
1036 tree *n;
1037 n = id->decl_map->get (TREE_BLOCK (*tp));
1038 if (n)
1039 new_block = *n;
1040 }
1041 TREE_SET_BLOCK (*tp, new_block);
1042 }
1043
1044 /* Keep iterating. */
1045 return NULL_TREE;
1046}
1047
1048
1049/* Called from copy_body_id via walk_tree. DATA is really a
1050 `copy_body_data *'. */
1051
1052tree
1053copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1054{
1055 copy_body_data *id = (copy_body_data *) data;
1056 tree fn = id->src_fn;
1057 tree new_block;
1058
1059 /* Begin by recognizing trees that we'll completely rewrite for the
1060 inlining context. Our output for these trees is completely
1061 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1062 into an edge). Further down, we'll handle trees that get
1063 duplicated and/or tweaked. */
1064
1065 /* When requested, RETURN_EXPRs should be transformed to just the
1066 contained MODIFY_EXPR. The branch semantics of the return will
1067 be handled elsewhere by manipulating the CFG rather than a statement. */
1068 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1069 {
1070 tree assignment = TREE_OPERAND (*tp, 0);
1071
1072 /* If we're returning something, just turn that into an
1073 assignment into the equivalent of the original RESULT_DECL.
1074 If the "assignment" is just the result decl, the result
1075 decl has already been set (e.g. a recent "foo (&result_decl,
1076 ...)"); just toss the entire RETURN_EXPR. */
1077 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1078 {
1079 /* Replace the RETURN_EXPR with (a copy of) the
1080 MODIFY_EXPR hanging underneath. */
1081 *tp = copy_node (assignment);
1082 }
1083 else /* Else the RETURN_EXPR returns no value. */
1084 {
1085 *tp = NULL;
1086 return (tree) (void *)1;
1087 }
1088 }
1089 else if (TREE_CODE (*tp) == SSA_NAME)
1090 {
1091 *tp = remap_ssa_name (*tp, id);
1092 *walk_subtrees = 0;
1093 return NULL;
1094 }
1095
1096 /* Local variables and labels need to be replaced by equivalent
1097 variables. We don't want to copy static variables; there's only
1098 one of those, no matter how many times we inline the containing
1099 function. Similarly for globals from an outer function. */
1100 else if (auto_var_in_fn_p (*tp, fn))
1101 {
1102 tree new_decl;
1103
1104 /* Remap the declaration. */
1105 new_decl = remap_decl (*tp, id);
1106 gcc_assert (new_decl);
1107 /* Replace this variable with the copy. */
1108 STRIP_TYPE_NOPS (new_decl);
1109 *tp = new_decl;
1110 *walk_subtrees = 0;
1111 }
1112 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1113 copy_statement_list (tp);
1114 else if (TREE_CODE (*tp) == SAVE_EXPR
1115 || TREE_CODE (*tp) == TARGET_EXPR)
1116 remap_save_expr (tp, id->decl_map, walk_subtrees);
1117 else if (TREE_CODE (*tp) == LABEL_DECL
1118 && (! DECL_CONTEXT (*tp)
1119 || decl_function_context (*tp) == id->src_fn))
1120 /* These may need to be remapped for EH handling. */
1121 *tp = remap_decl (*tp, id);
1122 else if (TREE_CODE (*tp) == BIND_EXPR)
1123 copy_bind_expr (tp, walk_subtrees, id);
1124 /* Types may need remapping as well. */
1125 else if (TYPE_P (*tp))
1126 *tp = remap_type (*tp, id);
1127
1128 /* If this is a constant, we have to copy the node iff the type will be
1129 remapped. copy_tree_r will not copy a constant. */
1130 else if (CONSTANT_CLASS_P (*tp))
1131 {
1132 tree new_type = remap_type (TREE_TYPE (*tp), id);
1133
1134 if (new_type == TREE_TYPE (*tp))
1135 *walk_subtrees = 0;
1136
1137 else if (TREE_CODE (*tp) == INTEGER_CST)
1138 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1139 else
1140 {
1141 *tp = copy_node (*tp);
1142 TREE_TYPE (*tp) = new_type;
1143 }
1144 }
1145
1146 /* Otherwise, just copy the node. Note that copy_tree_r already
1147 knows not to copy VAR_DECLs, etc., so this is safe. */
1148 else
1149 {
1150 /* Here we handle trees that are not completely rewritten.
1151 First we detect some inlining-induced bogosities for
1152 discarding. */
1153 if (TREE_CODE (*tp) == MODIFY_EXPR
1154 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1155 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1156 {
1157 /* Some assignments VAR = VAR; don't generate any rtl code
1158 and thus don't count as variable modification. Avoid
1159 keeping bogosities like 0 = 0. */
1160 tree decl = TREE_OPERAND (*tp, 0), value;
1161 tree *n;
1162
1163 n = id->decl_map->get (decl);
1164 if (n)
1165 {
1166 value = *n;
1167 STRIP_TYPE_NOPS (value);
1168 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1169 {
1170 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1171 return copy_tree_body_r (tp, walk_subtrees, data);
1172 }
1173 }
1174 }
1175 else if (TREE_CODE (*tp) == INDIRECT_REF)
1176 {
1177 /* Get rid of *& from inline substitutions that can happen when a
1178 pointer argument is an ADDR_EXPR. */
1179 tree decl = TREE_OPERAND (*tp, 0);
1180 tree *n = id->decl_map->get (decl);
1181 if (n)
1182 {
1183 /* If we happen to get an ADDR_EXPR in n->value, strip
1184 it manually here as we'll eventually get ADDR_EXPRs
1185 which lie about their types pointed to. In this case
1186 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1187 but we absolutely rely on that. As fold_indirect_ref
1188 does other useful transformations, try that first, though. */
1189 tree type = TREE_TYPE (*tp);
1190 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1191 tree old = *tp;
1192 *tp = gimple_fold_indirect_ref (ptr);
1193 if (! *tp)
1194 {
1195 if (TREE_CODE (ptr) == ADDR_EXPR)
1196 {
1197 *tp
1198 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1199 /* ??? We should either assert here or build
1200 a VIEW_CONVERT_EXPR instead of blindly leaking
1201 incompatible types to our IL. */
1202 if (! *tp)
1203 *tp = TREE_OPERAND (ptr, 0);
1204 }
1205 else
1206 {
1207 *tp = build1 (INDIRECT_REF, type, ptr);
1208 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1209 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1210 TREE_READONLY (*tp) = TREE_READONLY (old);
1211 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1212 have remapped a parameter as the property might be
1213 valid only for the parameter itself. */
1214 if (TREE_THIS_NOTRAP (old)
1215 && (!is_parm (TREE_OPERAND (old, 0))
1216 || (!id->transform_parameter && is_parm (ptr))))
1217 TREE_THIS_NOTRAP (*tp) = 1;
1218 }
1219 }
1220 *walk_subtrees = 0;
1221 return NULL;
1222 }
1223 }
1224 else if (TREE_CODE (*tp) == MEM_REF)
1225 {
1226 /* We need to re-canonicalize MEM_REFs from inline substitutions
1227 that can happen when a pointer argument is an ADDR_EXPR.
1228 Recurse here manually to allow that. */
1229 tree ptr = TREE_OPERAND (*tp, 0);
1230 tree type = remap_type (TREE_TYPE (*tp), id);
1231 tree old = *tp;
1232 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1233 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1234 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1235 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1236 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1237 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1238 {
1239 MR_DEPENDENCE_CLIQUE (*tp)
1240 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1241 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1242 }
1243 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1244 remapped a parameter as the property might be valid only
1245 for the parameter itself. */
1246 if (TREE_THIS_NOTRAP (old)
1247 && (!is_parm (TREE_OPERAND (old, 0))
1248 || (!id->transform_parameter && is_parm (ptr))))
1249 TREE_THIS_NOTRAP (*tp) = 1;
1250 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1251 *walk_subtrees = 0;
1252 return NULL;
1253 }
1254
1255 /* Here is the "usual case". Copy this tree node, and then
1256 tweak some special cases. */
1257 copy_tree_r (tp, walk_subtrees, NULL);
1258
1259 /* If EXPR has block defined, map it to newly constructed block.
1260 When inlining we want EXPRs without block appear in the block
1261 of function call if we are not remapping a type. */
1262 if (EXPR_P (*tp))
1263 {
1264 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1265 if (TREE_BLOCK (*tp))
1266 {
1267 tree *n;
1268 n = id->decl_map->get (TREE_BLOCK (*tp));
1269 if (n)
1270 new_block = *n;
1271 }
1272 TREE_SET_BLOCK (*tp, new_block);
1273 }
1274
1275 if (TREE_CODE (*tp) != OMP_CLAUSE)
1276 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1277
1278 /* The copied TARGET_EXPR has never been expanded, even if the
1279 original node was expanded already. */
1280 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1281 {
1282 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1283 TREE_OPERAND (*tp, 3) = NULL_TREE;
1284 }
1285
1286 /* Variable substitution need not be simple. In particular, the
1287 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1288 and friends are up-to-date. */
1289 else if (TREE_CODE (*tp) == ADDR_EXPR)
1290 {
1291 int invariant = is_gimple_min_invariant (*tp);
1292 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1293
1294 /* Handle the case where we substituted an INDIRECT_REF
1295 into the operand of the ADDR_EXPR. */
1296 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1297 {
1298 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1299 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1300 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1301 *tp = t;
1302 }
1303 else
1304 recompute_tree_invariant_for_addr_expr (*tp);
1305
1306 /* If this used to be invariant, but is not any longer,
1307 then regimplification is probably needed. */
1308 if (invariant && !is_gimple_min_invariant (*tp))
1309 id->regimplify = true;
1310
1311 *walk_subtrees = 0;
1312 }
1313 }
1314
1315 /* Keep iterating. */
1316 return NULL_TREE;
1317}
1318
1319/* Helper for remap_gimple_stmt. Given an EH region number for the
1320 source function, map that to the duplicate EH region number in
1321 the destination function. */
1322
1323static int
1324remap_eh_region_nr (int old_nr, copy_body_data *id)
1325{
1326 eh_region old_r, new_r;
1327
1328 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1329 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1330
1331 return new_r->index;
1332}
1333
1334/* Similar, but operate on INTEGER_CSTs. */
1335
1336static tree
1337remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1338{
1339 int old_nr, new_nr;
1340
1341 old_nr = tree_to_shwi (old_t_nr);
1342 new_nr = remap_eh_region_nr (old_nr, id);
1343
1344 return build_int_cst (integer_type_node, new_nr);
1345}
1346
1347/* Helper for copy_bb. Remap statement STMT using the inlining
1348 information in ID. Return the new statement copy. */
1349
1350static gimple_seq
1351remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1352{
1353 gimple *copy = NULL;
1354 struct walk_stmt_info wi;
1355 bool skip_first = false;
1356 gimple_seq stmts = NULL;
1357
1358 if (is_gimple_debug (stmt)
1359 && (gimple_debug_nonbind_marker_p (stmt)
1360 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1361 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1362 return stmts;
1363
1364 /* Begin by recognizing trees that we'll completely rewrite for the
1365 inlining context. Our output for these trees is completely
1366 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1367 into an edge). Further down, we'll handle trees that get
1368 duplicated and/or tweaked. */
1369
1370 /* When requested, GIMPLE_RETURNs should be transformed to just the
1371 contained GIMPLE_ASSIGN. The branch semantics of the return will
1372 be handled elsewhere by manipulating the CFG rather than the
1373 statement. */
1374 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1375 {
1376 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1377 tree retbnd = gimple_return_retbnd (stmt);
1378 tree bndslot = id->retbnd;
1379
1380 if (retbnd && bndslot)
1381 {
1382 gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1383 memset (&wi, 0, sizeof (wi));
1384 wi.info = id;
1385 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1386 gimple_seq_add_stmt (&stmts, bndcopy);
1387 }
1388
1389 /* If we're returning something, just turn that into an
1390 assignment into the equivalent of the original RESULT_DECL.
1391 If RETVAL is just the result decl, the result decl has
1392 already been set (e.g. a recent "foo (&result_decl, ...)");
1393 just toss the entire GIMPLE_RETURN. */
1394 if (retval
1395 && (TREE_CODE (retval) != RESULT_DECL
1396 && (TREE_CODE (retval) != SSA_NAME
1397 || ! SSA_NAME_VAR (retval)
1398 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1399 {
1400 copy = gimple_build_assign (id->do_not_unshare
1401 ? id->retvar : unshare_expr (id->retvar),
1402 retval);
1403 /* id->retvar is already substituted. Skip it on later remapping. */
1404 skip_first = true;
1405
1406 /* We need to copy bounds if return structure with pointers into
1407 instrumented function. */
1408 if (chkp_function_instrumented_p (id->dst_fn)
1409 && !bndslot
1410 && !BOUNDED_P (id->retvar)
1411 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1412 id->assign_stmts.safe_push (copy);
1413
1414 }
1415 else
1416 return stmts;
1417 }
1418 else if (gimple_has_substatements (stmt))
1419 {
1420 gimple_seq s1, s2;
1421
1422 /* When cloning bodies from the C++ front end, we will be handed bodies
1423 in High GIMPLE form. Handle here all the High GIMPLE statements that
1424 have embedded statements. */
1425 switch (gimple_code (stmt))
1426 {
1427 case GIMPLE_BIND:
1428 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1429 break;
1430
1431 case GIMPLE_CATCH:
1432 {
1433 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1434 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1435 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1436 }
1437 break;
1438
1439 case GIMPLE_EH_FILTER:
1440 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1441 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1442 break;
1443
1444 case GIMPLE_TRY:
1445 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1446 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1447 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1448 break;
1449
1450 case GIMPLE_WITH_CLEANUP_EXPR:
1451 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1452 copy = gimple_build_wce (s1);
1453 break;
1454
1455 case GIMPLE_OMP_PARALLEL:
1456 {
1457 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1458 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1459 copy = gimple_build_omp_parallel
1460 (s1,
1461 gimple_omp_parallel_clauses (omp_par_stmt),
1462 gimple_omp_parallel_child_fn (omp_par_stmt),
1463 gimple_omp_parallel_data_arg (omp_par_stmt));
1464 }
1465 break;
1466
1467 case GIMPLE_OMP_TASK:
1468 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1469 copy = gimple_build_omp_task
1470 (s1,
1471 gimple_omp_task_clauses (stmt),
1472 gimple_omp_task_child_fn (stmt),
1473 gimple_omp_task_data_arg (stmt),
1474 gimple_omp_task_copy_fn (stmt),
1475 gimple_omp_task_arg_size (stmt),
1476 gimple_omp_task_arg_align (stmt));
1477 break;
1478
1479 case GIMPLE_OMP_FOR:
1480 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1481 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1482 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1483 gimple_omp_for_clauses (stmt),
1484 gimple_omp_for_collapse (stmt), s2);
1485 {
1486 size_t i;
1487 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1488 {
1489 gimple_omp_for_set_index (copy, i,
1490 gimple_omp_for_index (stmt, i));
1491 gimple_omp_for_set_initial (copy, i,
1492 gimple_omp_for_initial (stmt, i));
1493 gimple_omp_for_set_final (copy, i,
1494 gimple_omp_for_final (stmt, i));
1495 gimple_omp_for_set_incr (copy, i,
1496 gimple_omp_for_incr (stmt, i));
1497 gimple_omp_for_set_cond (copy, i,
1498 gimple_omp_for_cond (stmt, i));
1499 }
1500 }
1501 break;
1502
1503 case GIMPLE_OMP_MASTER:
1504 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1505 copy = gimple_build_omp_master (s1);
1506 break;
1507
1508 case GIMPLE_OMP_TASKGROUP:
1509 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1510 copy = gimple_build_omp_taskgroup (s1);
1511 break;
1512
1513 case GIMPLE_OMP_ORDERED:
1514 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1515 copy = gimple_build_omp_ordered
1516 (s1,
1517 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1518 break;
1519
1520 case GIMPLE_OMP_SECTION:
1521 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1522 copy = gimple_build_omp_section (s1);
1523 break;
1524
1525 case GIMPLE_OMP_SECTIONS:
1526 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1527 copy = gimple_build_omp_sections
1528 (s1, gimple_omp_sections_clauses (stmt));
1529 break;
1530
1531 case GIMPLE_OMP_SINGLE:
1532 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1533 copy = gimple_build_omp_single
1534 (s1, gimple_omp_single_clauses (stmt));
1535 break;
1536
1537 case GIMPLE_OMP_TARGET:
1538 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1539 copy = gimple_build_omp_target
1540 (s1, gimple_omp_target_kind (stmt),
1541 gimple_omp_target_clauses (stmt));
1542 break;
1543
1544 case GIMPLE_OMP_TEAMS:
1545 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1546 copy = gimple_build_omp_teams
1547 (s1, gimple_omp_teams_clauses (stmt));
1548 break;
1549
1550 case GIMPLE_OMP_CRITICAL:
1551 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1552 copy = gimple_build_omp_critical (s1,
1553 gimple_omp_critical_name
1554 (as_a <gomp_critical *> (stmt)),
1555 gimple_omp_critical_clauses
1556 (as_a <gomp_critical *> (stmt)));
1557 break;
1558
1559 case GIMPLE_TRANSACTION:
1560 {
1561 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1562 gtransaction *new_trans_stmt;
1563 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1564 id);
1565 copy = new_trans_stmt = gimple_build_transaction (s1);
1566 gimple_transaction_set_subcode (new_trans_stmt,
1567 gimple_transaction_subcode (old_trans_stmt));
1568 gimple_transaction_set_label_norm (new_trans_stmt,
1569 gimple_transaction_label_norm (old_trans_stmt));
1570 gimple_transaction_set_label_uninst (new_trans_stmt,
1571 gimple_transaction_label_uninst (old_trans_stmt));
1572 gimple_transaction_set_label_over (new_trans_stmt,
1573 gimple_transaction_label_over (old_trans_stmt));
1574 }
1575 break;
1576
1577 default:
1578 gcc_unreachable ();
1579 }
1580 }
1581 else
1582 {
1583 if (gimple_assign_copy_p (stmt)
1584 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1585 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1586 {
1587 /* Here we handle statements that are not completely rewritten.
1588 First we detect some inlining-induced bogosities for
1589 discarding. */
1590
1591 /* Some assignments VAR = VAR; don't generate any rtl code
1592 and thus don't count as variable modification. Avoid
1593 keeping bogosities like 0 = 0. */
1594 tree decl = gimple_assign_lhs (stmt), value;
1595 tree *n;
1596
1597 n = id->decl_map->get (decl);
1598 if (n)
1599 {
1600 value = *n;
1601 STRIP_TYPE_NOPS (value);
1602 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1603 return NULL;
1604 }
1605 }
1606
1607 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1608 in a block that we aren't copying during tree_function_versioning,
1609 just drop the clobber stmt. */
1610 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1611 {
1612 tree lhs = gimple_assign_lhs (stmt);
1613 if (TREE_CODE (lhs) == MEM_REF
1614 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1615 {
1616 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1617 if (gimple_bb (def_stmt)
1618 && !bitmap_bit_p (id->blocks_to_copy,
1619 gimple_bb (def_stmt)->index))
1620 return NULL;
1621 }
1622 }
1623
1624 if (gimple_debug_bind_p (stmt))
1625 {
1626 gdebug *copy
1627 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1628 gimple_debug_bind_get_value (stmt),
1629 stmt);
1630 id->debug_stmts.safe_push (copy);
1631 gimple_seq_add_stmt (&stmts, copy);
1632 return stmts;
1633 }
1634 if (gimple_debug_source_bind_p (stmt))
1635 {
1636 gdebug *copy = gimple_build_debug_source_bind
1637 (gimple_debug_source_bind_get_var (stmt),
1638 gimple_debug_source_bind_get_value (stmt),
1639 stmt);
1640 id->debug_stmts.safe_push (copy);
1641 gimple_seq_add_stmt (&stmts, copy);
1642 return stmts;
1643 }
1644 if (gimple_debug_nonbind_marker_p (stmt))
1645 {
1646 /* If the inlined function has too many debug markers,
1647 don't copy them. */
1648 if (id->src_cfun->debug_marker_count
1649 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1650 return stmts;
1651
1652 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1653 id->debug_stmts.safe_push (copy);
1654 gimple_seq_add_stmt (&stmts, copy);
1655 return stmts;
1656 }
1657 gcc_checking_assert (!is_gimple_debug (stmt));
1658
1659 /* Create a new deep copy of the statement. */
1660 copy = gimple_copy (stmt);
1661
1662 /* Clear flags that need revisiting. */
1663 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1664 {
1665 if (gimple_call_tail_p (call_stmt))
1666 gimple_call_set_tail (call_stmt, false);
1667 if (gimple_call_from_thunk_p (call_stmt))
1668 gimple_call_set_from_thunk (call_stmt, false);
1669 if (gimple_call_internal_p (call_stmt))
1670 switch (gimple_call_internal_fn (call_stmt))
1671 {
1672 case IFN_GOMP_SIMD_LANE:
1673 case IFN_GOMP_SIMD_VF:
1674 case IFN_GOMP_SIMD_LAST_LANE:
1675 case IFN_GOMP_SIMD_ORDERED_START:
1676 case IFN_GOMP_SIMD_ORDERED_END:
1677 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1678 break;
1679 default:
1680 break;
1681 }
1682 }
1683
1684 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1685 RESX and EH_DISPATCH. */
1686 if (id->eh_map)
1687 switch (gimple_code (copy))
1688 {
1689 case GIMPLE_CALL:
1690 {
1691 tree r, fndecl = gimple_call_fndecl (copy);
1692 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1693 switch (DECL_FUNCTION_CODE (fndecl))
1694 {
1695 case BUILT_IN_EH_COPY_VALUES:
1696 r = gimple_call_arg (copy, 1);
1697 r = remap_eh_region_tree_nr (r, id);
1698 gimple_call_set_arg (copy, 1, r);
1699 /* FALLTHRU */
1700
1701 case BUILT_IN_EH_POINTER:
1702 case BUILT_IN_EH_FILTER:
1703 r = gimple_call_arg (copy, 0);
1704 r = remap_eh_region_tree_nr (r, id);
1705 gimple_call_set_arg (copy, 0, r);
1706 break;
1707
1708 default:
1709 break;
1710 }
1711
1712 /* Reset alias info if we didn't apply measures to
1713 keep it valid over inlining by setting DECL_PT_UID. */
1714 if (!id->src_cfun->gimple_df
1715 || !id->src_cfun->gimple_df->ipa_pta)
1716 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1717 }
1718 break;
1719
1720 case GIMPLE_RESX:
1721 {
1722 gresx *resx_stmt = as_a <gresx *> (copy);
1723 int r = gimple_resx_region (resx_stmt);
1724 r = remap_eh_region_nr (r, id);
1725 gimple_resx_set_region (resx_stmt, r);
1726 }
1727 break;
1728
1729 case GIMPLE_EH_DISPATCH:
1730 {
1731 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1732 int r = gimple_eh_dispatch_region (eh_dispatch);
1733 r = remap_eh_region_nr (r, id);
1734 gimple_eh_dispatch_set_region (eh_dispatch, r);
1735 }
1736 break;
1737
1738 default:
1739 break;
1740 }
1741 }
1742
1743 /* If STMT has a block defined, map it to the newly constructed
1744 block. */
1745 if (gimple_block (copy))
1746 {
1747 tree *n;
1748 n = id->decl_map->get (gimple_block (copy));
1749 gcc_assert (n);
1750 gimple_set_block (copy, *n);
1751 }
1752
1753 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy)
1754 || gimple_debug_nonbind_marker_p (copy))
1755 {
1756 gimple_seq_add_stmt (&stmts, copy);
1757 return stmts;
1758 }
1759
1760 /* Remap all the operands in COPY. */
1761 memset (&wi, 0, sizeof (wi));
1762 wi.info = id;
1763 if (skip_first)
1764 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1765 else
1766 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1767
1768 /* Clear the copied virtual operands. We are not remapping them here
1769 but are going to recreate them from scratch. */
1770 if (gimple_has_mem_ops (copy))
1771 {
1772 gimple_set_vdef (copy, NULL_TREE);
1773 gimple_set_vuse (copy, NULL_TREE);
1774 }
1775
1776 gimple_seq_add_stmt (&stmts, copy);
1777 return stmts;
1778}
1779
1780
1781/* Copy basic block, scale profile accordingly. Edges will be taken care of
1782 later */
1783
1784static basic_block
1785copy_bb (copy_body_data *id, basic_block bb,
1786 profile_count num, profile_count den)
1787{
1788 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1789 basic_block copy_basic_block;
1790 tree decl;
1791 basic_block prev;
1792
1793 profile_count::adjust_for_ipa_scaling (&num, &den);
1794
1795 /* Search for previous copied basic block. */
1796 prev = bb->prev_bb;
1797 while (!prev->aux)
1798 prev = prev->prev_bb;
1799
1800 /* create_basic_block() will append every new block to
1801 basic_block_info automatically. */
1802 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1803 copy_basic_block->count = bb->count.apply_scale (num, den);
1804
1805 copy_gsi = gsi_start_bb (copy_basic_block);
1806
1807 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1808 {
1809 gimple_seq stmts;
1810 gimple *stmt = gsi_stmt (gsi);
1811 gimple *orig_stmt = stmt;
1812 gimple_stmt_iterator stmts_gsi;
1813 bool stmt_added = false;
1814
1815 id->regimplify = false;
1816 stmts = remap_gimple_stmt (stmt, id);
1817
1818 if (gimple_seq_empty_p (stmts))
1819 continue;
1820
1821 seq_gsi = copy_gsi;
1822
1823 for (stmts_gsi = gsi_start (stmts);
1824 !gsi_end_p (stmts_gsi); )
1825 {
1826 stmt = gsi_stmt (stmts_gsi);
1827
1828 /* Advance iterator now before stmt is moved to seq_gsi. */
1829 gsi_next (&stmts_gsi);
1830
1831 if (gimple_nop_p (stmt))
1832 continue;
1833
1834 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1835 orig_stmt);
1836
1837 /* With return slot optimization we can end up with
1838 non-gimple (foo *)&this->m, fix that here. */
1839 if (is_gimple_assign (stmt)
1840 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1841 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1842 {
1843 tree new_rhs;
1844 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1845 gimple_assign_rhs1 (stmt),
1846 true, NULL, false,
1847 GSI_CONTINUE_LINKING);
1848 gimple_assign_set_rhs1 (stmt, new_rhs);
1849 id->regimplify = false;
1850 }
1851
1852 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1853
1854 if (id->regimplify)
1855 gimple_regimplify_operands (stmt, &seq_gsi);
1856
1857 stmt_added = true;
1858 }
1859
1860 if (!stmt_added)
1861 continue;
1862
1863 /* If copy_basic_block has been empty at the start of this iteration,
1864 call gsi_start_bb again to get at the newly added statements. */
1865 if (gsi_end_p (copy_gsi))
1866 copy_gsi = gsi_start_bb (copy_basic_block);
1867 else
1868 gsi_next (&copy_gsi);
1869
1870 /* Process the new statement. The call to gimple_regimplify_operands
1871 possibly turned the statement into multiple statements, we
1872 need to process all of them. */
1873 do
1874 {
1875 tree fn;
1876 gcall *call_stmt;
1877
1878 stmt = gsi_stmt (copy_gsi);
1879 call_stmt = dyn_cast <gcall *> (stmt);
1880 if (call_stmt
1881 && gimple_call_va_arg_pack_p (call_stmt)
1882 && id->call_stmt
1883 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1884 {
1885 /* __builtin_va_arg_pack () should be replaced by
1886 all arguments corresponding to ... in the caller. */
1887 tree p;
1888 gcall *new_call;
1889 vec<tree> argarray;
1890 size_t nargs = gimple_call_num_args (id->call_stmt);
1891 size_t n, i, nargs_to_copy;
1892 bool remove_bounds = false;
1893
1894 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1895 nargs--;
1896
1897 /* Bounds should be removed from arg pack in case
1898 we handle not instrumented call in instrumented
1899 function. */
1900 nargs_to_copy = nargs;
1901 if (gimple_call_with_bounds_p (id->call_stmt)
1902 && !gimple_call_with_bounds_p (stmt))
1903 {
1904 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1905 i < gimple_call_num_args (id->call_stmt);
1906 i++)
1907 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1908 nargs_to_copy--;
1909 remove_bounds = true;
1910 }
1911
1912 /* Create the new array of arguments. */
1913 n = nargs_to_copy + gimple_call_num_args (call_stmt);
1914 argarray.create (n);
1915 argarray.safe_grow_cleared (n);
1916
1917 /* Copy all the arguments before '...' */
1918 memcpy (argarray.address (),
1919 gimple_call_arg_ptr (call_stmt, 0),
1920 gimple_call_num_args (call_stmt) * sizeof (tree));
1921
1922 if (remove_bounds)
1923 {
1924 /* Append the rest of arguments removing bounds. */
1925 unsigned cur = gimple_call_num_args (call_stmt);
1926 i = gimple_call_num_args (id->call_stmt) - nargs;
1927 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1928 i < gimple_call_num_args (id->call_stmt);
1929 i++)
1930 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1931 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1932 gcc_assert (cur == n);
1933 }
1934 else
1935 {
1936 /* Append the arguments passed in '...' */
1937 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1938 gimple_call_arg_ptr (id->call_stmt, 0)
1939 + (gimple_call_num_args (id->call_stmt) - nargs),
1940 nargs * sizeof (tree));
1941 }
1942
1943 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1944 argarray);
1945
1946 argarray.release ();
1947
1948 /* Copy all GIMPLE_CALL flags, location and block, except
1949 GF_CALL_VA_ARG_PACK. */
1950 gimple_call_copy_flags (new_call, call_stmt);
1951 gimple_call_set_va_arg_pack (new_call, false);
1952 gimple_set_location (new_call, gimple_location (stmt));
1953 gimple_set_block (new_call, gimple_block (stmt));
1954 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1955
1956 gsi_replace (&copy_gsi, new_call, false);
1957 stmt = new_call;
1958 }
1959 else if (call_stmt
1960 && id->call_stmt
1961 && (decl = gimple_call_fndecl (stmt))
1962 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1963 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN
1964 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1965 {
1966 /* __builtin_va_arg_pack_len () should be replaced by
1967 the number of anonymous arguments. */
1968 size_t nargs = gimple_call_num_args (id->call_stmt), i;
1969 tree count, p;
1970 gimple *new_stmt;
1971
1972 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1973 nargs--;
1974
1975 /* For instrumented calls we should ignore bounds. */
1976 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1977 i < gimple_call_num_args (id->call_stmt);
1978 i++)
1979 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1980 nargs--;
1981
1982 count = build_int_cst (integer_type_node, nargs);
1983 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1984 gsi_replace (&copy_gsi, new_stmt, false);
1985 stmt = new_stmt;
1986 }
1987 else if (call_stmt
1988 && id->call_stmt
1989 && gimple_call_internal_p (stmt)
1990 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1991 {
1992 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1993 gsi_remove (&copy_gsi, false);
1994 continue;
1995 }
1996
1997 /* Statements produced by inlining can be unfolded, especially
1998 when we constant propagated some operands. We can't fold
1999 them right now for two reasons:
2000 1) folding require SSA_NAME_DEF_STMTs to be correct
2001 2) we can't change function calls to builtins.
2002 So we just mark statement for later folding. We mark
2003 all new statements, instead just statements that has changed
2004 by some nontrivial substitution so even statements made
2005 foldable indirectly are updated. If this turns out to be
2006 expensive, copy_body can be told to watch for nontrivial
2007 changes. */
2008 if (id->statements_to_fold)
2009 id->statements_to_fold->add (stmt);
2010
2011 /* We're duplicating a CALL_EXPR. Find any corresponding
2012 callgraph edges and update or duplicate them. */
2013 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2014 {
2015 struct cgraph_edge *edge;
2016
2017 switch (id->transform_call_graph_edges)
2018 {
2019 case CB_CGE_DUPLICATE:
2020 edge = id->src_node->get_edge (orig_stmt);
2021 if (edge)
2022 {
2023 struct cgraph_edge *old_edge = edge;
2024 profile_count old_cnt = edge->count;
2025 edge = edge->clone (id->dst_node, call_stmt,
2026 gimple_uid (stmt),
2027 num, den,
2028 true);
2029
2030 /* Speculative calls consist of two edges - direct and
2031 indirect. Duplicate the whole thing and distribute
2032 frequencies accordingly. */
2033 if (edge->speculative)
2034 {
2035 struct cgraph_edge *direct, *indirect;
2036 struct ipa_ref *ref;
2037
2038 gcc_assert (!edge->indirect_unknown_callee);
2039 old_edge->speculative_call_info (direct, indirect, ref);
2040
2041 profile_count indir_cnt = indirect->count;
2042 indirect = indirect->clone (id->dst_node, call_stmt,
2043 gimple_uid (stmt),
2044 num, den,
2045 true);
2046
2047 profile_probability prob
2048 = indir_cnt.probability_in (old_cnt + indir_cnt);
2049 indirect->count
2050 = copy_basic_block->count.apply_probability (prob);
2051 edge->count = copy_basic_block->count - indirect->count;
2052 id->dst_node->clone_reference (ref, stmt);
2053 }
2054 else
2055 edge->count = copy_basic_block->count;
2056 }
2057 break;
2058
2059 case CB_CGE_MOVE_CLONES:
2060 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2061 call_stmt);
2062 edge = id->dst_node->get_edge (stmt);
2063 break;
2064
2065 case CB_CGE_MOVE:
2066 edge = id->dst_node->get_edge (orig_stmt);
2067 if (edge)
2068 edge->set_call_stmt (call_stmt);
2069 break;
2070
2071 default:
2072 gcc_unreachable ();
2073 }
2074
2075 /* Constant propagation on argument done during inlining
2076 may create new direct call. Produce an edge for it. */
2077 if ((!edge
2078 || (edge->indirect_inlining_edge
2079 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2080 && id->dst_node->definition
2081 && (fn = gimple_call_fndecl (stmt)) != NULL)
2082 {
2083 struct cgraph_node *dest = cgraph_node::get_create (fn);
2084
2085 /* We have missing edge in the callgraph. This can happen
2086 when previous inlining turned an indirect call into a
2087 direct call by constant propagating arguments or we are
2088 producing dead clone (for further cloning). In all
2089 other cases we hit a bug (incorrect node sharing is the
2090 most common reason for missing edges). */
2091 gcc_assert (!dest->definition
2092 || dest->address_taken
2093 || !id->src_node->definition
2094 || !id->dst_node->definition);
2095 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2096 id->dst_node->create_edge_including_clones
2097 (dest, orig_stmt, call_stmt, bb->count,
2098 CIF_ORIGINALLY_INDIRECT_CALL);
2099 else
2100 id->dst_node->create_edge (dest, call_stmt,
2101 bb->count)->inline_failed
2102 = CIF_ORIGINALLY_INDIRECT_CALL;
2103 if (dump_file)
2104 {
2105 fprintf (dump_file, "Created new direct edge to %s\n",
2106 dest->name ());
2107 }
2108 }
2109
2110 notice_special_calls (as_a <gcall *> (stmt));
2111 }
2112
2113 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2114 id->eh_map, id->eh_lp_nr);
2115
2116 gsi_next (&copy_gsi);
2117 }
2118 while (!gsi_end_p (copy_gsi));
2119
2120 copy_gsi = gsi_last_bb (copy_basic_block);
2121 }
2122
2123 return copy_basic_block;
2124}
2125
2126/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2127 form is quite easy, since dominator relationship for old basic blocks does
2128 not change.
2129
2130 There is however exception where inlining might change dominator relation
2131 across EH edges from basic block within inlined functions destinating
2132 to landing pads in function we inline into.
2133
2134 The function fills in PHI_RESULTs of such PHI nodes if they refer
2135 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2136 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2137 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2138 set, and this means that there will be no overlapping live ranges
2139 for the underlying symbol.
2140
2141 This might change in future if we allow redirecting of EH edges and
2142 we might want to change way build CFG pre-inlining to include
2143 all the possible edges then. */
2144static void
2145update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2146 bool can_throw, bool nonlocal_goto)
2147{
2148 edge e;
2149 edge_iterator ei;
2150
2151 FOR_EACH_EDGE (e, ei, bb->succs)
2152 if (!e->dest->aux
2153 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2154 {
2155 gphi *phi;
2156 gphi_iterator si;
2157
2158 if (!nonlocal_goto)
2159 gcc_assert (e->flags & EDGE_EH);
2160
2161 if (!can_throw)
2162 gcc_assert (!(e->flags & EDGE_EH));
2163
2164 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2165 {
2166 edge re;
2167
2168 phi = si.phi ();
2169
2170 /* For abnormal goto/call edges the receiver can be the
2171 ENTRY_BLOCK. Do not assert this cannot happen. */
2172
2173 gcc_assert ((e->flags & EDGE_EH)
2174 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2175
2176 re = find_edge (ret_bb, e->dest);
2177 gcc_checking_assert (re);
2178 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2179 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2180
2181 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2182 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2183 }
2184 }
2185}
2186
2187
2188/* Copy edges from BB into its copy constructed earlier, scale profile
2189 accordingly. Edges will be taken care of later. Assume aux
2190 pointers to point to the copies of each BB. Return true if any
2191 debug stmts are left after a statement that must end the basic block. */
2192
2193static bool
2194copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2195 basic_block ret_bb, basic_block abnormal_goto_dest)
2196{
2197 basic_block new_bb = (basic_block) bb->aux;
2198 edge_iterator ei;
2199 edge old_edge;
2200 gimple_stmt_iterator si;
2201 int flags;
2202 bool need_debug_cleanup = false;
2203
2204 /* Use the indices from the original blocks to create edges for the
2205 new ones. */
2206 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2207 if (!(old_edge->flags & EDGE_EH))
2208 {
2209 edge new_edge;
2210
2211 flags = old_edge->flags;
2212
2213 /* Return edges do get a FALLTHRU flag when the get inlined. */
2214 if (old_edge->dest->index == EXIT_BLOCK
2215 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2216 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2217 flags |= EDGE_FALLTHRU;
2218 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2219 new_edge->probability = old_edge->probability;
2220 }
2221
2222 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2223 return false;
2224
2225 /* When doing function splitting, we must decreate count of the return block
2226 which was previously reachable by block we did not copy. */
2227 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2228 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2229 if (old_edge->src->index != ENTRY_BLOCK
2230 && !old_edge->src->aux)
2231 new_bb->count -= old_edge->count ().apply_scale (num, den);
2232
2233 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2234 {
2235 gimple *copy_stmt;
2236 bool can_throw, nonlocal_goto;
2237
2238 copy_stmt = gsi_stmt (si);
2239 if (!is_gimple_debug (copy_stmt))
2240 update_stmt (copy_stmt);
2241
2242 /* Do this before the possible split_block. */
2243 gsi_next (&si);
2244
2245 /* If this tree could throw an exception, there are two
2246 cases where we need to add abnormal edge(s): the
2247 tree wasn't in a region and there is a "current
2248 region" in the caller; or the original tree had
2249 EH edges. In both cases split the block after the tree,
2250 and add abnormal edge(s) as needed; we need both
2251 those from the callee and the caller.
2252 We check whether the copy can throw, because the const
2253 propagation can change an INDIRECT_REF which throws
2254 into a COMPONENT_REF which doesn't. If the copy
2255 can throw, the original could also throw. */
2256 can_throw = stmt_can_throw_internal (copy_stmt);
2257 nonlocal_goto
2258 = (stmt_can_make_abnormal_goto (copy_stmt)
2259 && !computed_goto_p (copy_stmt));
2260
2261 if (can_throw || nonlocal_goto)
2262 {
2263 if (!gsi_end_p (si))
2264 {
2265 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2266 gsi_next (&si);
2267 if (gsi_end_p (si))
2268 need_debug_cleanup = true;
2269 }
2270 if (!gsi_end_p (si))
2271 /* Note that bb's predecessor edges aren't necessarily
2272 right at this point; split_block doesn't care. */
2273 {
2274 edge e = split_block (new_bb, copy_stmt);
2275
2276 new_bb = e->dest;
2277 new_bb->aux = e->src->aux;
2278 si = gsi_start_bb (new_bb);
2279 }
2280 }
2281
2282 bool update_probs = false;
2283
2284 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2285 {
2286 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2287 update_probs = true;
2288 }
2289 else if (can_throw)
2290 {
2291 make_eh_edges (copy_stmt);
2292 update_probs = true;
2293 }
2294
2295 /* EH edges may not match old edges. Copy as much as possible. */
2296 if (update_probs)
2297 {
2298 edge e;
2299 edge_iterator ei;
2300 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2301
2302 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2303 if ((old_edge->flags & EDGE_EH)
2304 && (e = find_edge (copy_stmt_bb,
2305 (basic_block) old_edge->dest->aux))
2306 && (e->flags & EDGE_EH))
2307 e->probability = old_edge->probability;
2308
2309 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2310 if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
2311 e->probability = profile_probability::never ();
2312 }
2313
2314
2315 /* If the call we inline cannot make abnormal goto do not add
2316 additional abnormal edges but only retain those already present
2317 in the original function body. */
2318 if (abnormal_goto_dest == NULL)
2319 nonlocal_goto = false;
2320 if (nonlocal_goto)
2321 {
2322 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2323
2324 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2325 nonlocal_goto = false;
2326 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2327 in OpenMP regions which aren't allowed to be left abnormally.
2328 So, no need to add abnormal edge in that case. */
2329 else if (is_gimple_call (copy_stmt)
2330 && gimple_call_internal_p (copy_stmt)
2331 && (gimple_call_internal_fn (copy_stmt)
2332 == IFN_ABNORMAL_DISPATCHER)
2333 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2334 nonlocal_goto = false;
2335 else
2336 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2337 EDGE_ABNORMAL);
2338 }
2339
2340 if ((can_throw || nonlocal_goto)
2341 && gimple_in_ssa_p (cfun))
2342 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2343 can_throw, nonlocal_goto);
2344 }
2345 return need_debug_cleanup;
2346}
2347
2348/* Copy the PHIs. All blocks and edges are copied, some blocks
2349 was possibly split and new outgoing EH edges inserted.
2350 BB points to the block of original function and AUX pointers links
2351 the original and newly copied blocks. */
2352
2353static void
2354copy_phis_for_bb (basic_block bb, copy_body_data *id)
2355{
2356 basic_block const new_bb = (basic_block) bb->aux;
2357 edge_iterator ei;
2358 gphi *phi;
2359 gphi_iterator si;
2360 edge new_edge;
2361 bool inserted = false;
2362
2363 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2364 {
2365 tree res, new_res;
2366 gphi *new_phi;
2367
2368 phi = si.phi ();
2369 res = PHI_RESULT (phi);
2370 new_res = res;
2371 if (!virtual_operand_p (res))
2372 {
2373 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2374 if (EDGE_COUNT (new_bb->preds) == 0)
2375 {
2376 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2377 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2378 }
2379 else
2380 {
2381 new_phi = create_phi_node (new_res, new_bb);
2382 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2383 {
2384 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2385 bb);
2386 tree arg;
2387 tree new_arg;
2388 edge_iterator ei2;
2389 location_t locus;
2390
2391 /* When doing partial cloning, we allow PHIs on the entry
2392 block as long as all the arguments are the same.
2393 Find any input edge to see argument to copy. */
2394 if (!old_edge)
2395 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2396 if (!old_edge->src->aux)
2397 break;
2398
2399 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2400 new_arg = arg;
2401 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2402 gcc_assert (new_arg);
2403 /* With return slot optimization we can end up with
2404 non-gimple (foo *)&this->m, fix that here. */
2405 if (TREE_CODE (new_arg) != SSA_NAME
2406 && TREE_CODE (new_arg) != FUNCTION_DECL
2407 && !is_gimple_val (new_arg))
2408 {
2409 gimple_seq stmts = NULL;
2410 new_arg = force_gimple_operand (new_arg, &stmts, true,
2411 NULL);
2412 gsi_insert_seq_on_edge (new_edge, stmts);
2413 inserted = true;
2414 }
2415 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2416 if (LOCATION_BLOCK (locus))
2417 {
2418 tree *n;
2419 n = id->decl_map->get (LOCATION_BLOCK (locus));
2420 gcc_assert (n);
2421 locus = set_block (locus, *n);
2422 }
2423 else
2424 locus = LOCATION_LOCUS (locus);
2425
2426 add_phi_arg (new_phi, new_arg, new_edge, locus);
2427 }
2428 }
2429 }
2430 }
2431
2432 /* Commit the delayed edge insertions. */
2433 if (inserted)
2434 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2435 gsi_commit_one_edge_insert (new_edge, NULL);
2436}
2437
2438
2439/* Wrapper for remap_decl so it can be used as a callback. */
2440
2441static tree
2442remap_decl_1 (tree decl, void *data)
2443{
2444 return remap_decl (decl, (copy_body_data *) data);
2445}
2446
2447/* Build struct function and associated datastructures for the new clone
2448 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2449 the cfun to the function of new_fndecl (and current_function_decl too). */
2450
2451static void
2452initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2453{
2454 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2455
2456 if (!DECL_ARGUMENTS (new_fndecl))
2457 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2458 if (!DECL_RESULT (new_fndecl))
2459 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2460
2461 /* Register specific tree functions. */
2462 gimple_register_cfg_hooks ();
2463
2464 /* Get clean struct function. */
2465 push_struct_function (new_fndecl);
2466
2467 /* We will rebuild these, so just sanity check that they are empty. */
2468 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2469 gcc_assert (cfun->local_decls == NULL);
2470 gcc_assert (cfun->cfg == NULL);
2471 gcc_assert (cfun->decl == new_fndecl);
2472
2473 /* Copy items we preserve during cloning. */
2474 cfun->static_chain_decl = src_cfun->static_chain_decl;
2475 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2476 cfun->function_end_locus = src_cfun->function_end_locus;
2477 cfun->curr_properties = src_cfun->curr_properties;
2478 cfun->last_verified = src_cfun->last_verified;
2479 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2480 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2481 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2482 cfun->stdarg = src_cfun->stdarg;
2483 cfun->after_inlining = src_cfun->after_inlining;
2484 cfun->can_throw_non_call_exceptions
2485 = src_cfun->can_throw_non_call_exceptions;
2486 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2487 cfun->returns_struct = src_cfun->returns_struct;
2488 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2489
2490 init_empty_tree_cfg ();
2491
2492 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2493
2494 profile_count num = count;
2495 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2496 profile_count::adjust_for_ipa_scaling (&num, &den);
2497
2498 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2499 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2500 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2501 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2502 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2503 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2504 if (src_cfun->eh)
2505 init_eh_for_function ();
2506
2507 if (src_cfun->gimple_df)
2508 {
2509 init_tree_ssa (cfun);
2510 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2511 if (cfun->gimple_df->in_ssa_p)
2512 init_ssa_operands (cfun);
2513 }
2514}
2515
2516/* Helper function for copy_cfg_body. Move debug stmts from the end
2517 of NEW_BB to the beginning of successor basic blocks when needed. If the
2518 successor has multiple predecessors, reset them, otherwise keep
2519 their value. */
2520
2521static void
2522maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2523{
2524 edge e;
2525 edge_iterator ei;
2526 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2527
2528 if (gsi_end_p (si)
2529 || gsi_one_before_end_p (si)
2530 || !(stmt_can_throw_internal (gsi_stmt (si))
2531 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2532 return;
2533
2534 FOR_EACH_EDGE (e, ei, new_bb->succs)
2535 {
2536 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2537 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2538 while (is_gimple_debug (gsi_stmt (ssi)))
2539 {
2540 gimple *stmt = gsi_stmt (ssi);
2541 gdebug *new_stmt;
2542 tree var;
2543 tree value;
2544
2545 /* For the last edge move the debug stmts instead of copying
2546 them. */
2547 if (ei_one_before_end_p (ei))
2548 {
2549 si = ssi;
2550 gsi_prev (&ssi);
2551 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2552 gimple_debug_bind_reset_value (stmt);
2553 gsi_remove (&si, false);
2554 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2555 continue;
2556 }
2557
2558 if (gimple_debug_bind_p (stmt))
2559 {
2560 var = gimple_debug_bind_get_var (stmt);
2561 if (single_pred_p (e->dest))
2562 {
2563 value = gimple_debug_bind_get_value (stmt);
2564 value = unshare_expr (value);
2565 }
2566 else
2567 value = NULL_TREE;
2568 new_stmt = gimple_build_debug_bind (var, value, stmt);
2569 }
2570 else if (gimple_debug_source_bind_p (stmt))
2571 {
2572 var = gimple_debug_source_bind_get_var (stmt);
2573 value = gimple_debug_source_bind_get_value (stmt);
2574 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2575 }
2576 else if (gimple_debug_nonbind_marker_p (stmt))
2577 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2578 else
2579 gcc_unreachable ();
2580 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2581 id->debug_stmts.safe_push (new_stmt);
2582 gsi_prev (&ssi);
2583 }
2584 }
2585}
2586
2587/* Make a copy of the sub-loops of SRC_PARENT and place them
2588 as siblings of DEST_PARENT. */
2589
2590static void
2591copy_loops (copy_body_data *id,
2592 struct loop *dest_parent, struct loop *src_parent)
2593{
2594 struct loop *src_loop = src_parent->inner;
2595 while (src_loop)
2596 {
2597 if (!id->blocks_to_copy
2598 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2599 {
2600 struct loop *dest_loop = alloc_loop ();
2601
2602 /* Assign the new loop its header and latch and associate
2603 those with the new loop. */
2604 dest_loop->header = (basic_block)src_loop->header->aux;
2605 dest_loop->header->loop_father = dest_loop;
2606 if (src_loop->latch != NULL)
2607 {
2608 dest_loop->latch = (basic_block)src_loop->latch->aux;
2609 dest_loop->latch->loop_father = dest_loop;
2610 }
2611
2612 /* Copy loop meta-data. */
2613 copy_loop_info (src_loop, dest_loop);
2614
2615 /* Finally place it into the loop array and the loop tree. */
2616 place_new_loop (cfun, dest_loop);
2617 flow_loop_tree_node_add (dest_parent, dest_loop);
2618
2619 dest_loop->safelen = src_loop->safelen;
2620 if (src_loop->unroll)
2621 {
2622 dest_loop->unroll = src_loop->unroll;
2623 cfun->has_unroll = true;
2624 }
2625 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2626 if (src_loop->force_vectorize)
2627 {
2628 dest_loop->force_vectorize = true;
2629 cfun->has_force_vectorize_loops = true;
2630 }
2631 if (src_loop->simduid)
2632 {
2633 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2634 cfun->has_simduid_loops = true;
2635 }
2636
2637 /* Recurse. */
2638 copy_loops (id, dest_loop, src_loop);
2639 }
2640 src_loop = src_loop->next;
2641 }
2642}
2643
2644/* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2645
2646void
2647redirect_all_calls (copy_body_data * id, basic_block bb)
2648{
2649 gimple_stmt_iterator si;
2650 gimple *last = last_stmt (bb);
2651 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2652 {
2653 gimple *stmt = gsi_stmt (si);
2654 if (is_gimple_call (stmt))
2655 {
2656 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2657 if (edge)
2658 {
2659 edge->redirect_call_stmt_to_callee ();
2660 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2661 gimple_purge_dead_eh_edges (bb);
2662 }
2663 }
2664 }
2665}
2666
2667/* Make a copy of the body of FN so that it can be inserted inline in
2668 another function. Walks FN via CFG, returns new fndecl. */
2669
2670static tree
2671copy_cfg_body (copy_body_data * id,
2672 basic_block entry_block_map, basic_block exit_block_map,
2673 basic_block new_entry)
2674{
2675 tree callee_fndecl = id->src_fn;
2676 /* Original cfun for the callee, doesn't change. */
2677 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2678 struct function *cfun_to_copy;
2679 basic_block bb;
2680 tree new_fndecl = NULL;
2681 bool need_debug_cleanup = false;
2682 int last;
2683 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2684 profile_count num = entry_block_map->count;
2685
2686 profile_count::adjust_for_ipa_scaling (&num, &den);
2687
2688 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2689
2690 /* Register specific tree functions. */
2691 gimple_register_cfg_hooks ();
2692
2693 /* If we are inlining just region of the function, make sure to connect
2694 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2695 part of loop, we must compute frequency and probability of
2696 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2697 probabilities of edges incoming from nonduplicated region. */
2698 if (new_entry)
2699 {
2700 edge e;
2701 edge_iterator ei;
2702 den = profile_count::zero ();
2703
2704 FOR_EACH_EDGE (e, ei, new_entry->preds)
2705 if (!e->src->aux)
2706 den += e->count ();
2707 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2708 }
2709
2710 /* Must have a CFG here at this point. */
2711 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2712 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2713
2714
2715 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2716 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2717 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2718 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2719
2720 /* Duplicate any exception-handling regions. */
2721 if (cfun->eh)
2722 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2723 remap_decl_1, id);
2724
2725 /* Use aux pointers to map the original blocks to copy. */
2726 FOR_EACH_BB_FN (bb, cfun_to_copy)
2727 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2728 {
2729 basic_block new_bb = copy_bb (id, bb, num, den);
2730 bb->aux = new_bb;
2731 new_bb->aux = bb;
2732 new_bb->loop_father = entry_block_map->loop_father;
2733 }
2734
2735 last = last_basic_block_for_fn (cfun);
2736
2737 /* Now that we've duplicated the blocks, duplicate their edges. */
2738 basic_block abnormal_goto_dest = NULL;
2739 if (id->call_stmt
2740 && stmt_can_make_abnormal_goto (id->call_stmt))
2741 {
2742 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2743
2744 bb = gimple_bb (id->call_stmt);
2745 gsi_next (&gsi);
2746 if (gsi_end_p (gsi))
2747 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2748 }
2749 FOR_ALL_BB_FN (bb, cfun_to_copy)
2750 if (!id->blocks_to_copy
2751 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2752 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2753 abnormal_goto_dest);
2754
2755 if (new_entry)
2756 {
2757 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2758 EDGE_FALLTHRU);
2759 e->probability = profile_probability::always ();
2760 }
2761
2762 /* Duplicate the loop tree, if available and wanted. */
2763 if (loops_for_fn (src_cfun) != NULL
2764 && current_loops != NULL)
2765 {
2766 copy_loops (id, entry_block_map->loop_father,
2767 get_loop (src_cfun, 0));
2768 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2769 loops_state_set (LOOPS_NEED_FIXUP);
2770 }
2771
2772 /* If the loop tree in the source function needed fixup, mark the
2773 destination loop tree for fixup, too. */
2774 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2775 loops_state_set (LOOPS_NEED_FIXUP);
2776
2777 if (gimple_in_ssa_p (cfun))
2778 FOR_ALL_BB_FN (bb, cfun_to_copy)
2779 if (!id->blocks_to_copy
2780 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2781 copy_phis_for_bb (bb, id);
2782
2783 FOR_ALL_BB_FN (bb, cfun_to_copy)
2784 if (bb->aux)
2785 {
2786 if (need_debug_cleanup
2787 && bb->index != ENTRY_BLOCK
2788 && bb->index != EXIT_BLOCK)
2789 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2790 /* Update call edge destinations. This can not be done before loop
2791 info is updated, because we may split basic blocks. */
2792 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2793 && bb->index != ENTRY_BLOCK
2794 && bb->index != EXIT_BLOCK)
2795 redirect_all_calls (id, (basic_block)bb->aux);
2796 ((basic_block)bb->aux)->aux = NULL;
2797 bb->aux = NULL;
2798 }
2799
2800 /* Zero out AUX fields of newly created block during EH edge
2801 insertion. */
2802 for (; last < last_basic_block_for_fn (cfun); last++)
2803 {
2804 if (need_debug_cleanup)
2805 maybe_move_debug_stmts_to_successors (id,
2806 BASIC_BLOCK_FOR_FN (cfun, last));
2807 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2808 /* Update call edge destinations. This can not be done before loop
2809 info is updated, because we may split basic blocks. */
2810 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2811 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2812 }
2813 entry_block_map->aux = NULL;
2814 exit_block_map->aux = NULL;
2815
2816 if (id->eh_map)
2817 {
2818 delete id->eh_map;
2819 id->eh_map = NULL;
2820 }
2821 if (id->dependence_map)
2822 {
2823 delete id->dependence_map;
2824 id->dependence_map = NULL;
2825 }
2826
2827 return new_fndecl;
2828}
2829
2830/* Copy the debug STMT using ID. We deal with these statements in a
2831 special way: if any variable in their VALUE expression wasn't
2832 remapped yet, we won't remap it, because that would get decl uids
2833 out of sync, causing codegen differences between -g and -g0. If
2834 this arises, we drop the VALUE expression altogether. */
2835
2836static void
2837copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2838{
2839 tree t, *n;
2840 struct walk_stmt_info wi;
2841
2842 if (gimple_block (stmt))
2843 {
2844 n = id->decl_map->get (gimple_block (stmt));
2845 gimple_set_block (stmt, n ? *n : id->block);
2846 }
2847
2848 if (gimple_debug_nonbind_marker_p (stmt))
2849 return;
2850
2851 /* Remap all the operands in COPY. */
2852 memset (&wi, 0, sizeof (wi));
2853 wi.info = id;
2854
2855 processing_debug_stmt = 1;
2856
2857 if (gimple_debug_source_bind_p (stmt))
2858 t = gimple_debug_source_bind_get_var (stmt);
2859 else if (gimple_debug_bind_p (stmt))
2860 t = gimple_debug_bind_get_var (stmt);
2861 else
2862 gcc_unreachable ();
2863
2864 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2865 && (n = id->debug_map->get (t)))
2866 {
2867 gcc_assert (VAR_P (*n));
2868 t = *n;
2869 }
2870 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2871 /* T is a non-localized variable. */;
2872 else
2873 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2874
2875 if (gimple_debug_bind_p (stmt))
2876 {
2877 gimple_debug_bind_set_var (stmt, t);
2878
2879 if (gimple_debug_bind_has_value_p (stmt))
2880 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2881 remap_gimple_op_r, &wi, NULL);
2882
2883 /* Punt if any decl couldn't be remapped. */
2884 if (processing_debug_stmt < 0)
2885 gimple_debug_bind_reset_value (stmt);
2886 }
2887 else if (gimple_debug_source_bind_p (stmt))
2888 {
2889 gimple_debug_source_bind_set_var (stmt, t);
2890 /* When inlining and source bind refers to one of the optimized
2891 away parameters, change the source bind into normal debug bind
2892 referring to the corresponding DEBUG_EXPR_DECL that should have
2893 been bound before the call stmt. */
2894 t = gimple_debug_source_bind_get_value (stmt);
2895 if (t != NULL_TREE
2896 && TREE_CODE (t) == PARM_DECL
2897 && id->call_stmt)
2898 {
2899 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2900 unsigned int i;
2901 if (debug_args != NULL)
2902 {
2903 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2904 if ((**debug_args)[i] == DECL_ORIGIN (t)
2905 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2906 {
2907 t = (**debug_args)[i + 1];
2908 stmt->subcode = GIMPLE_DEBUG_BIND;
2909 gimple_debug_bind_set_value (stmt, t);
2910 break;
2911 }
2912 }
2913 }
2914 if (gimple_debug_source_bind_p (stmt))
2915 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2916 remap_gimple_op_r, &wi, NULL);
2917 }
2918
2919 processing_debug_stmt = 0;
2920
2921 update_stmt (stmt);
2922}
2923
2924/* Process deferred debug stmts. In order to give values better odds
2925 of being successfully remapped, we delay the processing of debug
2926 stmts until all other stmts that might require remapping are
2927 processed. */
2928
2929static void
2930copy_debug_stmts (copy_body_data *id)
2931{
2932 size_t i;
2933 gdebug *stmt;
2934
2935 if (!id->debug_stmts.exists ())
2936 return;
2937
2938 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2939 copy_debug_stmt (stmt, id);
2940
2941 id->debug_stmts.release ();
2942}
2943
2944/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2945 another function. */
2946
2947static tree
2948copy_tree_body (copy_body_data *id)
2949{
2950 tree fndecl = id->src_fn;
2951 tree body = DECL_SAVED_TREE (fndecl);
2952
2953 walk_tree (&body, copy_tree_body_r, id, NULL);
2954
2955 return body;
2956}
2957
2958/* Make a copy of the body of FN so that it can be inserted inline in
2959 another function. */
2960
2961static tree
2962copy_body (copy_body_data *id,
2963 basic_block entry_block_map, basic_block exit_block_map,
2964 basic_block new_entry)
2965{
2966 tree fndecl = id->src_fn;
2967 tree body;
2968
2969 /* If this body has a CFG, walk CFG and copy. */
2970 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2971 body = copy_cfg_body (id, entry_block_map, exit_block_map,
2972 new_entry);
2973 copy_debug_stmts (id);
2974
2975 return body;
2976}
2977
2978/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2979 defined in function FN, or of a data member thereof. */
2980
2981static bool
2982self_inlining_addr_expr (tree value, tree fn)
2983{
2984 tree var;
2985
2986 if (TREE_CODE (value) != ADDR_EXPR)
2987 return false;
2988
2989 var = get_base_address (TREE_OPERAND (value, 0));
2990
2991 return var && auto_var_in_fn_p (var, fn);
2992}
2993
2994/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2995 lexical block and line number information from base_stmt, if given,
2996 or from the last stmt of the block otherwise. */
2997
2998static gimple *
2999insert_init_debug_bind (copy_body_data *id,
3000 basic_block bb, tree var, tree value,
3001 gimple *base_stmt)
3002{
3003 gimple *note;
3004 gimple_stmt_iterator gsi;
3005 tree tracked_var;
3006
3007 if (!gimple_in_ssa_p (id->src_cfun))
3008 return NULL;
3009
3010 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3011 return NULL;
3012
3013 tracked_var = target_for_debug_bind (var);
3014 if (!tracked_var)
3015 return NULL;
3016
3017 if (bb)
3018 {
3019 gsi = gsi_last_bb (bb);
3020 if (!base_stmt && !gsi_end_p (gsi))
3021 base_stmt = gsi_stmt (gsi);
3022 }
3023
3024 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3025
3026 if (bb)
3027 {
3028 if (!gsi_end_p (gsi))
3029 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3030 else
3031 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3032 }
3033
3034 return note;
3035}
3036
3037static void
3038insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3039{
3040 /* If VAR represents a zero-sized variable, it's possible that the
3041 assignment statement may result in no gimple statements. */
3042 if (init_stmt)
3043 {
3044 gimple_stmt_iterator si = gsi_last_bb (bb);
3045
3046 /* We can end up with init statements that store to a non-register
3047 from a rhs with a conversion. Handle that here by forcing the
3048 rhs into a temporary. gimple_regimplify_operands is not
3049 prepared to do this for us. */
3050 if (!is_gimple_debug (init_stmt)
3051 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3052 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3053 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3054 {
3055 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3056 gimple_expr_type (init_stmt),
3057 gimple_assign_rhs1 (init_stmt));
3058 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3059 GSI_NEW_STMT);
3060 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3061 gimple_assign_set_rhs1 (init_stmt, rhs);
3062 }
3063 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3064 gimple_regimplify_operands (init_stmt, &si);
3065
3066 if (!is_gimple_debug (init_stmt))
3067 {
3068 tree def = gimple_assign_lhs (init_stmt);
3069 insert_init_debug_bind (id, bb, def, def, init_stmt);
3070 }
3071 }
3072}
3073
3074/* Initialize parameter P with VALUE. If needed, produce init statement
3075 at the end of BB. When BB is NULL, we return init statement to be
3076 output later. */
3077static gimple *
3078setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3079 basic_block bb, tree *vars)
3080{
3081 gimple *init_stmt = NULL;
3082 tree var;
3083 tree rhs = value;
3084 tree def = (gimple_in_ssa_p (cfun)
3085 ? ssa_default_def (id->src_cfun, p) : NULL);
3086
3087 if (value
3088 && value != error_mark_node
3089 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3090 {
3091 /* If we can match up types by promotion/demotion do so. */
3092 if (fold_convertible_p (TREE_TYPE (p), value))
3093 rhs = fold_convert (TREE_TYPE (p), value);
3094 else
3095 {
3096 /* ??? For valid programs we should not end up here.
3097 Still if we end up with truly mismatched types here, fall back
3098 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3099 GIMPLE to the following passes. */
3100 if (!is_gimple_reg_type (TREE_TYPE (value))
3101 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3102 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3103 else
3104 rhs = build_zero_cst (TREE_TYPE (p));
3105 }
3106 }
3107
3108 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3109 here since the type of this decl must be visible to the calling
3110 function. */
3111 var = copy_decl_to_var (p, id);
3112
3113 /* Declare this new variable. */
3114 DECL_CHAIN (var) = *vars;
3115 *vars = var;
3116
3117 /* Make gimplifier happy about this variable. */
3118 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3119
3120 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3121 we would not need to create a new variable here at all, if it
3122 weren't for debug info. Still, we can just use the argument
3123 value. */
3124 if (TREE_READONLY (p)
3125 && !TREE_ADDRESSABLE (p)
3126 && value && !TREE_SIDE_EFFECTS (value)
3127 && !def)
3128 {
3129 /* We may produce non-gimple trees by adding NOPs or introduce
3130 invalid sharing when operand is not really constant.
3131 It is not big deal to prohibit constant propagation here as
3132 we will constant propagate in DOM1 pass anyway. */
3133 if (is_gimple_min_invariant (value)
3134 && useless_type_conversion_p (TREE_TYPE (p),
3135 TREE_TYPE (value))
3136 /* We have to be very careful about ADDR_EXPR. Make sure
3137 the base variable isn't a local variable of the inlined
3138 function, e.g., when doing recursive inlining, direct or
3139 mutually-recursive or whatever, which is why we don't
3140 just test whether fn == current_function_decl. */
3141 && ! self_inlining_addr_expr (value, fn))
3142 {
3143 insert_decl_map (id, p, value);
3144 insert_debug_decl_map (id, p, var);
3145 return insert_init_debug_bind (id, bb, var, value, NULL);
3146 }
3147 }
3148
3149 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3150 that way, when the PARM_DECL is encountered, it will be
3151 automatically replaced by the VAR_DECL. */
3152 insert_decl_map (id, p, var);
3153
3154 /* Even if P was TREE_READONLY, the new VAR should not be.
3155 In the original code, we would have constructed a
3156 temporary, and then the function body would have never
3157 changed the value of P. However, now, we will be
3158 constructing VAR directly. The constructor body may
3159 change its value multiple times as it is being
3160 constructed. Therefore, it must not be TREE_READONLY;
3161 the back-end assumes that TREE_READONLY variable is
3162 assigned to only once. */
3163 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3164 TREE_READONLY (var) = 0;
3165
3166 /* If there is no setup required and we are in SSA, take the easy route
3167 replacing all SSA names representing the function parameter by the
3168 SSA name passed to function.
3169
3170 We need to construct map for the variable anyway as it might be used
3171 in different SSA names when parameter is set in function.
3172
3173 Do replacement at -O0 for const arguments replaced by constant.
3174 This is important for builtin_constant_p and other construct requiring
3175 constant argument to be visible in inlined function body. */
3176 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3177 && (optimize
3178 || (TREE_READONLY (p)
3179 && is_gimple_min_invariant (rhs)))
3180 && (TREE_CODE (rhs) == SSA_NAME
3181 || is_gimple_min_invariant (rhs))
3182 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3183 {
3184 insert_decl_map (id, def, rhs);
3185 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3186 }
3187
3188 /* If the value of argument is never used, don't care about initializing
3189 it. */
3190 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3191 {
3192 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3193 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3194 }
3195
3196 /* Initialize this VAR_DECL from the equivalent argument. Convert
3197 the argument to the proper type in case it was promoted. */
3198 if (value)
3199 {
3200 if (rhs == error_mark_node)
3201 {
3202 insert_decl_map (id, p, var);
3203 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3204 }
3205
3206 STRIP_USELESS_TYPE_CONVERSION (rhs);
3207
3208 /* If we are in SSA form properly remap the default definition
3209 or assign to a dummy SSA name if the parameter is unused and
3210 we are not optimizing. */
3211 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3212 {
3213 if (def)
3214 {
3215 def = remap_ssa_name (def, id);
3216 init_stmt = gimple_build_assign (def, rhs);
3217 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3218 set_ssa_default_def (cfun, var, NULL);
3219 }
3220 else if (!optimize)
3221 {
3222 def = make_ssa_name (var);
3223 init_stmt = gimple_build_assign (def, rhs);
3224 }
3225 }
3226 else
3227 init_stmt = gimple_build_assign (var, rhs);
3228
3229 if (bb && init_stmt)
3230 insert_init_stmt (id, bb, init_stmt);
3231 }
3232 return init_stmt;
3233}
3234
3235/* Generate code to initialize the parameters of the function at the
3236 top of the stack in ID from the GIMPLE_CALL STMT. */
3237
3238static void
3239initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3240 tree fn, basic_block bb)
3241{
3242 tree parms;
3243 size_t i;
3244 tree p;
3245 tree vars = NULL_TREE;
3246 tree static_chain = gimple_call_chain (stmt);
3247
3248 /* Figure out what the parameters are. */
3249 parms = DECL_ARGUMENTS (fn);
3250
3251 /* Loop through the parameter declarations, replacing each with an
3252 equivalent VAR_DECL, appropriately initialized. */
3253 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3254 {
3255 tree val;
3256 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3257 setup_one_parameter (id, p, val, fn, bb, &vars);
3258 }
3259 /* After remapping parameters remap their types. This has to be done
3260 in a second loop over all parameters to appropriately remap
3261 variable sized arrays when the size is specified in a
3262 parameter following the array. */
3263 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3264 {
3265 tree *varp = id->decl_map->get (p);
3266 if (varp && VAR_P (*varp))
3267 {
3268 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3269 ? ssa_default_def (id->src_cfun, p) : NULL);
3270 tree var = *varp;
3271 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3272 /* Also remap the default definition if it was remapped
3273 to the default definition of the parameter replacement
3274 by the parameter setup. */
3275 if (def)
3276 {
3277 tree *defp = id->decl_map->get (def);
3278 if (defp
3279 && TREE_CODE (*defp) == SSA_NAME
3280 && SSA_NAME_VAR (*defp) == var)
3281 TREE_TYPE (*defp) = TREE_TYPE (var);
3282 }
3283 }
3284 }
3285
3286 /* Initialize the static chain. */
3287 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3288 gcc_assert (fn != current_function_decl);
3289 if (p)
3290 {
3291 /* No static chain? Seems like a bug in tree-nested.c. */
3292 gcc_assert (static_chain);
3293
3294 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3295 }
3296
3297 declare_inline_vars (id->block, vars);
3298}
3299
3300
3301/* Declare a return variable to replace the RESULT_DECL for the
3302 function we are calling. An appropriate DECL_STMT is returned.
3303 The USE_STMT is filled to contain a use of the declaration to
3304 indicate the return value of the function.
3305
3306 RETURN_SLOT, if non-null is place where to store the result. It
3307 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3308 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3309
3310 RETURN_BOUNDS holds a destination for returned bounds.
3311
3312 The return value is a (possibly null) value that holds the result
3313 as seen by the caller. */
3314
3315static tree
3316declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3317 tree return_bounds, basic_block entry_bb)
3318{
3319 tree callee = id->src_fn;
3320 tree result = DECL_RESULT (callee);
3321 tree callee_type = TREE_TYPE (result);
3322 tree caller_type;
3323 tree var, use;
3324
3325 /* Handle type-mismatches in the function declaration return type
3326 vs. the call expression. */
3327 if (modify_dest)
3328 caller_type = TREE_TYPE (modify_dest);
3329 else
3330 caller_type = TREE_TYPE (TREE_TYPE (callee));
3331
3332 /* We don't need to do anything for functions that don't return anything. */
3333 if (VOID_TYPE_P (callee_type))
3334 return NULL_TREE;
3335
3336 /* If there was a return slot, then the return value is the
3337 dereferenced address of that object. */
3338 if (return_slot)
3339 {
3340 /* The front end shouldn't have used both return_slot and
3341 a modify expression. */
3342 gcc_assert (!modify_dest);
3343 if (DECL_BY_REFERENCE (result))
3344 {
3345 tree return_slot_addr = build_fold_addr_expr (return_slot);
3346 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3347
3348 /* We are going to construct *&return_slot and we can't do that
3349 for variables believed to be not addressable.
3350
3351 FIXME: This check possibly can match, because values returned
3352 via return slot optimization are not believed to have address
3353 taken by alias analysis. */
3354 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3355 var = return_slot_addr;
3356 }
3357 else
3358 {
3359 var = return_slot;
3360 gcc_assert (TREE_CODE (var) != SSA_NAME);
3361 if (TREE_ADDRESSABLE (result))
3362 mark_addressable (var);
3363 }
3364 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3365 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3366 && !DECL_GIMPLE_REG_P (result)
3367 && DECL_P (var))
3368 DECL_GIMPLE_REG_P (var) = 0;
3369 use = NULL;
3370 goto done;
3371 }
3372
3373 /* All types requiring non-trivial constructors should have been handled. */
3374 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3375
3376 /* Attempt to avoid creating a new temporary variable. */
3377 if (modify_dest
3378 && TREE_CODE (modify_dest) != SSA_NAME)
3379 {
3380 bool use_it = false;
3381
3382 /* We can't use MODIFY_DEST if there's type promotion involved. */
3383 if (!useless_type_conversion_p (callee_type, caller_type))
3384 use_it = false;
3385
3386 /* ??? If we're assigning to a variable sized type, then we must
3387 reuse the destination variable, because we've no good way to
3388 create variable sized temporaries at this point. */
3389 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3390 use_it = true;
3391
3392 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3393 reuse it as the result of the call directly. Don't do this if
3394 it would promote MODIFY_DEST to addressable. */
3395 else if (TREE_ADDRESSABLE (result))
3396 use_it = false;
3397 else
3398 {
3399 tree base_m = get_base_address (modify_dest);
3400
3401 /* If the base isn't a decl, then it's a pointer, and we don't
3402 know where that's going to go. */
3403 if (!DECL_P (base_m))
3404 use_it = false;
3405 else if (is_global_var (base_m))
3406 use_it = false;
3407 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3408 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3409 && !DECL_GIMPLE_REG_P (result)
3410 && DECL_GIMPLE_REG_P (base_m))
3411 use_it = false;
3412 else if (!TREE_ADDRESSABLE (base_m))
3413 use_it = true;
3414 }
3415
3416 if (use_it)
3417 {
3418 var = modify_dest;
3419 use = NULL;
3420 goto done;
3421 }
3422 }
3423
3424 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3425
3426 var = copy_result_decl_to_var (result, id);
3427 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3428
3429 /* Do not have the rest of GCC warn about this variable as it should
3430 not be visible to the user. */
3431 TREE_NO_WARNING (var) = 1;
3432
3433 declare_inline_vars (id->block, var);
3434
3435 /* Build the use expr. If the return type of the function was
3436 promoted, convert it back to the expected type. */
3437 use = var;
3438 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3439 {
3440 /* If we can match up types by promotion/demotion do so. */
3441 if (fold_convertible_p (caller_type, var))
3442 use = fold_convert (caller_type, var);
3443 else
3444 {
3445 /* ??? For valid programs we should not end up here.
3446 Still if we end up with truly mismatched types here, fall back
3447 to using a MEM_REF to not leak invalid GIMPLE to the following
3448 passes. */
3449 /* Prevent var from being written into SSA form. */
3450 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3451 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3452 DECL_GIMPLE_REG_P (var) = false;
3453 else if (is_gimple_reg_type (TREE_TYPE (var)))
3454 TREE_ADDRESSABLE (var) = true;
3455 use = fold_build2 (MEM_REF, caller_type,
3456 build_fold_addr_expr (var),
3457 build_int_cst (ptr_type_node, 0));
3458 }
3459 }
3460
3461 STRIP_USELESS_TYPE_CONVERSION (use);
3462
3463 if (DECL_BY_REFERENCE (result))
3464 {
3465 TREE_ADDRESSABLE (var) = 1;
3466 var = build_fold_addr_expr (var);
3467 }
3468
3469 done:
3470 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3471 way, when the RESULT_DECL is encountered, it will be
3472 automatically replaced by the VAR_DECL.
3473
3474 When returning by reference, ensure that RESULT_DECL remaps to
3475 gimple_val. */
3476 if (DECL_BY_REFERENCE (result)
3477 && !is_gimple_val (var))
3478 {
3479 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3480 insert_decl_map (id, result, temp);
3481 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3482 it's default_def SSA_NAME. */
3483 if (gimple_in_ssa_p (id->src_cfun)
3484 && is_gimple_reg (result))
3485 {
3486 temp = make_ssa_name (temp);
3487 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3488 }
3489 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3490 }
3491 else
3492 insert_decl_map (id, result, var);
3493
3494 /* Remember this so we can ignore it in remap_decls. */
3495 id->retvar = var;
3496
3497 /* If returned bounds are used, then make var for them. */
3498 if (return_bounds)
3499 {
3500 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3501 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3502 TREE_NO_WARNING (bndtemp) = 1;
3503 declare_inline_vars (id->block, bndtemp);
3504
3505 id->retbnd = bndtemp;
3506 insert_init_stmt (id, entry_bb,
3507 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3508 }
3509
3510 return use;
3511}
3512
3513/* Determine if the function can be copied. If so return NULL. If
3514 not return a string describng the reason for failure. */
3515
3516const char *
3517copy_forbidden (struct function *fun)
3518{
3519 const char *reason = fun->cannot_be_copied_reason;
3520
3521 /* Only examine the function once. */
3522 if (fun->cannot_be_copied_set)
3523 return reason;
3524
3525 /* We cannot copy a function that receives a non-local goto
3526 because we cannot remap the destination label used in the
3527 function that is performing the non-local goto. */
3528 /* ??? Actually, this should be possible, if we work at it.
3529 No doubt there's just a handful of places that simply
3530 assume it doesn't happen and don't substitute properly. */
3531 if (fun->has_nonlocal_label)
3532 {
3533 reason = G_("function %q+F can never be copied "
3534 "because it receives a non-local goto");
3535 goto fail;
3536 }
3537
3538 if (fun->has_forced_label_in_static)
3539 {
3540 reason = G_("function %q+F can never be copied because it saves "
3541 "address of local label in a static variable");
3542 goto fail;
3543 }
3544
3545 fail:
3546 fun->cannot_be_copied_reason = reason;
3547 fun->cannot_be_copied_set = true;
3548 return reason;
3549}
3550
3551
3552static const char *inline_forbidden_reason;
3553
3554/* A callback for walk_gimple_seq to handle statements. Returns non-null
3555 iff a function can not be inlined. Also sets the reason why. */
3556
3557static tree
3558inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3559 struct walk_stmt_info *wip)
3560{
3561 tree fn = (tree) wip->info;
3562 tree t;
3563 gimple *stmt = gsi_stmt (*gsi);
3564
3565 switch (gimple_code (stmt))
3566 {
3567 case GIMPLE_CALL:
3568 /* Refuse to inline alloca call unless user explicitly forced so as
3569 this may change program's memory overhead drastically when the
3570 function using alloca is called in loop. In GCC present in
3571 SPEC2000 inlining into schedule_block cause it to require 2GB of
3572 RAM instead of 256MB. Don't do so for alloca calls emitted for
3573 VLA objects as those can't cause unbounded growth (they're always
3574 wrapped inside stack_save/stack_restore regions. */
3575 if (gimple_maybe_alloca_call_p (stmt)
3576 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3577 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3578 {
3579 inline_forbidden_reason
3580 = G_("function %q+F can never be inlined because it uses "
3581 "alloca (override using the always_inline attribute)");
3582 *handled_ops_p = true;
3583 return fn;
3584 }
3585
3586 t = gimple_call_fndecl (stmt);
3587 if (t == NULL_TREE)
3588 break;
3589
3590 /* We cannot inline functions that call setjmp. */
3591 if (setjmp_call_p (t))
3592 {
3593 inline_forbidden_reason
3594 = G_("function %q+F can never be inlined because it uses setjmp");
3595 *handled_ops_p = true;
3596 return t;
3597 }
3598
3599 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3600 switch (DECL_FUNCTION_CODE (t))
3601 {
3602 /* We cannot inline functions that take a variable number of
3603 arguments. */
3604 case BUILT_IN_VA_START:
3605 case BUILT_IN_NEXT_ARG:
3606 case BUILT_IN_VA_END:
3607 inline_forbidden_reason
3608 = G_("function %q+F can never be inlined because it "
3609 "uses variable argument lists");
3610 *handled_ops_p = true;
3611 return t;
3612
3613 case BUILT_IN_LONGJMP:
3614 /* We can't inline functions that call __builtin_longjmp at
3615 all. The non-local goto machinery really requires the
3616 destination be in a different function. If we allow the
3617 function calling __builtin_longjmp to be inlined into the
3618 function calling __builtin_setjmp, Things will Go Awry. */
3619 inline_forbidden_reason
3620 = G_("function %q+F can never be inlined because "
3621 "it uses setjmp-longjmp exception handling");
3622 *handled_ops_p = true;
3623 return t;
3624
3625 case BUILT_IN_NONLOCAL_GOTO:
3626 /* Similarly. */
3627 inline_forbidden_reason
3628 = G_("function %q+F can never be inlined because "
3629 "it uses non-local goto");
3630 *handled_ops_p = true;
3631 return t;
3632
3633 case BUILT_IN_RETURN:
3634 case BUILT_IN_APPLY_ARGS:
3635 /* If a __builtin_apply_args caller would be inlined,
3636 it would be saving arguments of the function it has
3637 been inlined into. Similarly __builtin_return would
3638 return from the function the inline has been inlined into. */
3639 inline_forbidden_reason
3640 = G_("function %q+F can never be inlined because "
3641 "it uses __builtin_return or __builtin_apply_args");
3642 *handled_ops_p = true;
3643 return t;
3644
3645 default:
3646 break;
3647 }
3648 break;
3649
3650 case GIMPLE_GOTO:
3651 t = gimple_goto_dest (stmt);
3652
3653 /* We will not inline a function which uses computed goto. The
3654 addresses of its local labels, which may be tucked into
3655 global storage, are of course not constant across
3656 instantiations, which causes unexpected behavior. */
3657 if (TREE_CODE (t) != LABEL_DECL)
3658 {
3659 inline_forbidden_reason
3660 = G_("function %q+F can never be inlined "
3661 "because it contains a computed goto");
3662 *handled_ops_p = true;
3663 return t;
3664 }
3665 break;
3666
3667 default:
3668 break;
3669 }
3670
3671 *handled_ops_p = false;
3672 return NULL_TREE;
3673}
3674
3675/* Return true if FNDECL is a function that cannot be inlined into
3676 another one. */
3677
3678static bool
3679inline_forbidden_p (tree fndecl)
3680{
3681 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3682 struct walk_stmt_info wi;
3683 basic_block bb;
3684 bool forbidden_p = false;
3685
3686 /* First check for shared reasons not to copy the code. */
3687 inline_forbidden_reason = copy_forbidden (fun);
3688 if (inline_forbidden_reason != NULL)
3689 return true;
3690
3691 /* Next, walk the statements of the function looking for
3692 constraucts we can't handle, or are non-optimal for inlining. */
3693 hash_set<tree> visited_nodes;
3694 memset (&wi, 0, sizeof (wi));
3695 wi.info = (void *) fndecl;
3696 wi.pset = &visited_nodes;
3697
3698 FOR_EACH_BB_FN (bb, fun)
3699 {
3700 gimple *ret;
3701 gimple_seq seq = bb_seq (bb);
3702 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3703 forbidden_p = (ret != NULL);
3704 if (forbidden_p)
3705 break;
3706 }
3707
3708 return forbidden_p;
3709}
3710
3711/* Return false if the function FNDECL cannot be inlined on account of its
3712 attributes, true otherwise. */
3713static bool
3714function_attribute_inlinable_p (const_tree fndecl)
3715{
3716 if (targetm.attribute_table)
3717 {
3718 const_tree a;
3719
3720 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3721 {
3722 const_tree name = TREE_PURPOSE (a);
3723 int i;
3724
3725 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3726 if (is_attribute_p (targetm.attribute_table[i].name, name))
3727 return targetm.function_attribute_inlinable_p (fndecl);
3728 }
3729 }
3730
3731 return true;
3732}
3733
3734/* Returns nonzero if FN is a function that does not have any
3735 fundamental inline blocking properties. */
3736
3737bool
3738tree_inlinable_function_p (tree fn)
3739{
3740 bool inlinable = true;
3741 bool do_warning;
3742 tree always_inline;
3743
3744 /* If we've already decided this function shouldn't be inlined,
3745 there's no need to check again. */
3746 if (DECL_UNINLINABLE (fn))
3747 return false;
3748
3749 /* We only warn for functions declared `inline' by the user. */
3750 do_warning = (warn_inline
3751 && DECL_DECLARED_INLINE_P (fn)
3752 && !DECL_NO_INLINE_WARNING_P (fn)
3753 && !DECL_IN_SYSTEM_HEADER (fn));
3754
3755 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3756
3757 if (flag_no_inline
3758 && always_inline == NULL)
3759 {
3760 if (do_warning)
3761 warning (OPT_Winline, "function %q+F can never be inlined because it "
3762 "is suppressed using -fno-inline", fn);
3763 inlinable = false;
3764 }
3765
3766 else if (!function_attribute_inlinable_p (fn))
3767 {
3768 if (do_warning)
3769 warning (OPT_Winline, "function %q+F can never be inlined because it "
3770 "uses attributes conflicting with inlining", fn);
3771 inlinable = false;
3772 }
3773
3774 else if (inline_forbidden_p (fn))
3775 {
3776 /* See if we should warn about uninlinable functions. Previously,
3777 some of these warnings would be issued while trying to expand
3778 the function inline, but that would cause multiple warnings
3779 about functions that would for example call alloca. But since
3780 this a property of the function, just one warning is enough.
3781 As a bonus we can now give more details about the reason why a
3782 function is not inlinable. */
3783 if (always_inline)
3784 error (inline_forbidden_reason, fn);
3785 else if (do_warning)
3786 warning (OPT_Winline, inline_forbidden_reason, fn);
3787
3788 inlinable = false;
3789 }
3790
3791 /* Squirrel away the result so that we don't have to check again. */
3792 DECL_UNINLINABLE (fn) = !inlinable;
3793
3794 return inlinable;
3795}
3796
3797/* Estimate the cost of a memory move of type TYPE. Use machine dependent
3798 word size and take possible memcpy call into account and return
3799 cost based on whether optimizing for size or speed according to SPEED_P. */
3800
3801int
3802estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3803{
3804 HOST_WIDE_INT size;
3805
3806 gcc_assert (!VOID_TYPE_P (type));
3807
3808 if (TREE_CODE (type) == VECTOR_TYPE)
3809 {
3810 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3811 machine_mode simd
3812 = targetm.vectorize.preferred_simd_mode (inner);
3813 int simd_mode_size = GET_MODE_SIZE (simd);
3814 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3815 / simd_mode_size);
3816 }
3817
3818 size = int_size_in_bytes (type);
3819
3820 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3821 /* Cost of a memcpy call, 3 arguments and the call. */
3822 return 4;
3823 else
3824 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3825}
3826
3827/* Returns cost of operation CODE, according to WEIGHTS */
3828
3829static int
3830estimate_operator_cost (enum tree_code code, eni_weights *weights,
3831 tree op1 ATTRIBUTE_UNUSED, tree op2)
3832{
3833 switch (code)
3834 {
3835 /* These are "free" conversions, or their presumed cost
3836 is folded into other operations. */
3837 case RANGE_EXPR:
3838 CASE_CONVERT:
3839 case COMPLEX_EXPR:
3840 case PAREN_EXPR:
3841 case VIEW_CONVERT_EXPR:
3842 return 0;
3843
3844 /* Assign cost of 1 to usual operations.
3845 ??? We may consider mapping RTL costs to this. */
3846 case COND_EXPR:
3847 case VEC_COND_EXPR:
3848 case VEC_PERM_EXPR:
3849
3850 case PLUS_EXPR:
3851 case POINTER_PLUS_EXPR:
3852 case POINTER_DIFF_EXPR:
3853 case MINUS_EXPR:
3854 case MULT_EXPR:
3855 case MULT_HIGHPART_EXPR:
3856 case FMA_EXPR:
3857
3858 case ADDR_SPACE_CONVERT_EXPR:
3859 case FIXED_CONVERT_EXPR:
3860 case FIX_TRUNC_EXPR:
3861
3862 case NEGATE_EXPR:
3863 case FLOAT_EXPR:
3864 case MIN_EXPR:
3865 case MAX_EXPR:
3866 case ABS_EXPR:
3867
3868 case LSHIFT_EXPR:
3869 case RSHIFT_EXPR:
3870 case LROTATE_EXPR:
3871 case RROTATE_EXPR:
3872
3873 case BIT_IOR_EXPR:
3874 case BIT_XOR_EXPR:
3875 case BIT_AND_EXPR:
3876 case BIT_NOT_EXPR:
3877
3878 case TRUTH_ANDIF_EXPR:
3879 case TRUTH_ORIF_EXPR:
3880 case TRUTH_AND_EXPR:
3881 case TRUTH_OR_EXPR:
3882 case TRUTH_XOR_EXPR:
3883 case TRUTH_NOT_EXPR:
3884
3885 case LT_EXPR:
3886 case LE_EXPR:
3887 case GT_EXPR:
3888 case GE_EXPR:
3889 case EQ_EXPR:
3890 case NE_EXPR:
3891 case ORDERED_EXPR:
3892 case UNORDERED_EXPR:
3893
3894 case UNLT_EXPR:
3895 case UNLE_EXPR:
3896 case UNGT_EXPR:
3897 case UNGE_EXPR:
3898 case UNEQ_EXPR:
3899 case LTGT_EXPR:
3900
3901 case CONJ_EXPR:
3902
3903 case PREDECREMENT_EXPR:
3904 case PREINCREMENT_EXPR:
3905 case POSTDECREMENT_EXPR:
3906 case POSTINCREMENT_EXPR:
3907
3908 case REALIGN_LOAD_EXPR:
3909
3910 case WIDEN_SUM_EXPR:
3911 case WIDEN_MULT_EXPR:
3912 case DOT_PROD_EXPR:
3913 case SAD_EXPR:
3914 case WIDEN_MULT_PLUS_EXPR:
3915 case WIDEN_MULT_MINUS_EXPR:
3916 case WIDEN_LSHIFT_EXPR:
3917
3918 case VEC_WIDEN_MULT_HI_EXPR:
3919 case VEC_WIDEN_MULT_LO_EXPR:
3920 case VEC_WIDEN_MULT_EVEN_EXPR:
3921 case VEC_WIDEN_MULT_ODD_EXPR:
3922 case VEC_UNPACK_HI_EXPR:
3923 case VEC_UNPACK_LO_EXPR:
3924 case VEC_UNPACK_FLOAT_HI_EXPR:
3925 case VEC_UNPACK_FLOAT_LO_EXPR:
3926 case VEC_PACK_TRUNC_EXPR:
3927 case VEC_PACK_SAT_EXPR:
3928 case VEC_PACK_FIX_TRUNC_EXPR:
3929 case VEC_WIDEN_LSHIFT_HI_EXPR:
3930 case VEC_WIDEN_LSHIFT_LO_EXPR:
3931
3932 return 1;
3933
3934 /* Few special cases of expensive operations. This is useful
3935 to avoid inlining on functions having too many of these. */
3936 case TRUNC_DIV_EXPR:
3937 case CEIL_DIV_EXPR:
3938 case FLOOR_DIV_EXPR:
3939 case ROUND_DIV_EXPR:
3940 case EXACT_DIV_EXPR:
3941 case TRUNC_MOD_EXPR:
3942 case CEIL_MOD_EXPR:
3943 case FLOOR_MOD_EXPR:
3944 case ROUND_MOD_EXPR:
3945 case RDIV_EXPR:
3946 if (TREE_CODE (op2) != INTEGER_CST)
3947 return weights->div_mod_cost;
3948 return 1;
3949
3950 /* Bit-field insertion needs several shift and mask operations. */
3951 case BIT_INSERT_EXPR:
3952 return 3;
3953
3954 default:
3955 /* We expect a copy assignment with no operator. */
3956 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3957 return 0;
3958 }
3959}
3960
3961
3962/* Estimate number of instructions that will be created by expanding
3963 the statements in the statement sequence STMTS.
3964 WEIGHTS contains weights attributed to various constructs. */
3965
3966int
3967estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3968{
3969 int cost;
3970 gimple_stmt_iterator gsi;
3971
3972 cost = 0;
3973 for (gsi = gsi_start (stmts); !