1/* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2017 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23/* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74#include "config.h"
75#include "system.h"
76#include "coretypes.h"
77#include "backend.h"
78#include "target.h"
79#include "rtl.h"
80#include "tree.h"
81#include "gimple.h"
82#include "predict.h"
83#include "alloc-pool.h"
84#include "tree-pass.h"
85#include "ssa.h"
86#include "cgraph.h"
87#include "gimple-pretty-print.h"
88#include "alias.h"
89#include "fold-const.h"
90#include "tree-eh.h"
91#include "stor-layout.h"
92#include "gimplify.h"
93#include "gimple-iterator.h"
94#include "gimplify-me.h"
95#include "gimple-walk.h"
96#include "tree-cfg.h"
97#include "tree-dfa.h"
98#include "tree-ssa.h"
99#include "symbol-summary.h"
100#include "ipa-param-manipulation.h"
101#include "ipa-prop.h"
102#include "params.h"
103#include "dbgcnt.h"
104#include "tree-inline.h"
105#include "ipa-fnsummary.h"
106#include "ipa-utils.h"
107#include "builtins.h"
108
109/* Enumeration of all aggregate reductions we can do. */
110enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
111 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
112 SRA_MODE_INTRA }; /* late intraprocedural SRA */
113
114/* Global variable describing which aggregate reduction we are performing at
115 the moment. */
116static enum sra_mode sra_mode;
117
118struct assign_link;
119
120/* ACCESS represents each access to an aggregate variable (as a whole or a
121 part). It can also represent a group of accesses that refer to exactly the
122 same fragment of an aggregate (i.e. those that have exactly the same offset
123 and size). Such representatives for a single aggregate, once determined,
124 are linked in a linked list and have the group fields set.
125
126 Moreover, when doing intraprocedural SRA, a tree is built from those
127 representatives (by the means of first_child and next_sibling pointers), in
128 which all items in a subtree are "within" the root, i.e. their offset is
129 greater or equal to offset of the root and offset+size is smaller or equal
130 to offset+size of the root. Children of an access are sorted by offset.
131
132 Note that accesses to parts of vector and complex number types always
133 represented by an access to the whole complex number or a vector. It is a
134 duty of the modifying functions to replace them appropriately. */
135
136struct access
137{
138 /* Values returned by `get_ref_base_and_extent' for each component reference
139 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
140 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
141 HOST_WIDE_INT offset;
142 HOST_WIDE_INT size;
143 tree base;
144
145 /* Expression. It is context dependent so do not use it to create new
146 expressions to access the original aggregate. See PR 42154 for a
147 testcase. */
148 tree expr;
149 /* Type. */
150 tree type;
151
152 /* The statement this access belongs to. */
153 gimple *stmt;
154
155 /* Next group representative for this aggregate. */
156 struct access *next_grp;
157
158 /* Pointer to the group representative. Pointer to itself if the struct is
159 the representative. */
160 struct access *group_representative;
161
162 /* After access tree has been constructed, this points to the parent of the
163 current access, if there is one. NULL for roots. */
164 struct access *parent;
165
166 /* If this access has any children (in terms of the definition above), this
167 points to the first one. */
168 struct access *first_child;
169
170 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
171 described above. In IPA-SRA this is a pointer to the next access
172 belonging to the same group (having the same representative). */
173 struct access *next_sibling;
174
175 /* Pointers to the first and last element in the linked list of assign
176 links. */
177 struct assign_link *first_link, *last_link;
178
179 /* Pointer to the next access in the work queue. */
180 struct access *next_queued;
181
182 /* Replacement variable for this access "region." Never to be accessed
183 directly, always only by the means of get_access_replacement() and only
184 when grp_to_be_replaced flag is set. */
185 tree replacement_decl;
186
187 /* Is this access an access to a non-addressable field? */
188 unsigned non_addressable : 1;
189
190 /* Is this access made in reverse storage order? */
191 unsigned reverse : 1;
192
193 /* Is this particular access write access? */
194 unsigned write : 1;
195
196 /* Is this access currently in the work queue? */
197 unsigned grp_queued : 1;
198
199 /* Does this group contain a write access? This flag is propagated down the
200 access tree. */
201 unsigned grp_write : 1;
202
203 /* Does this group contain a read access? This flag is propagated down the
204 access tree. */
205 unsigned grp_read : 1;
206
207 /* Does this group contain a read access that comes from an assignment
208 statement? This flag is propagated down the access tree. */
209 unsigned grp_assignment_read : 1;
210
211 /* Does this group contain a write access that comes from an assignment
212 statement? This flag is propagated down the access tree. */
213 unsigned grp_assignment_write : 1;
214
215 /* Does this group contain a read access through a scalar type? This flag is
216 not propagated in the access tree in any direction. */
217 unsigned grp_scalar_read : 1;
218
219 /* Does this group contain a write access through a scalar type? This flag
220 is not propagated in the access tree in any direction. */
221 unsigned grp_scalar_write : 1;
222
223 /* Is this access an artificial one created to scalarize some record
224 entirely? */
225 unsigned grp_total_scalarization : 1;
226
227 /* Other passes of the analysis use this bit to make function
228 analyze_access_subtree create scalar replacements for this group if
229 possible. */
230 unsigned grp_hint : 1;
231
232 /* Is the subtree rooted in this access fully covered by scalar
233 replacements? */
234 unsigned grp_covered : 1;
235
236 /* If set to true, this access and all below it in an access tree must not be
237 scalarized. */
238 unsigned grp_unscalarizable_region : 1;
239
240 /* Whether data have been written to parts of the aggregate covered by this
241 access which is not to be scalarized. This flag is propagated up in the
242 access tree. */
243 unsigned grp_unscalarized_data : 1;
244
245 /* Does this access and/or group contain a write access through a
246 BIT_FIELD_REF? */
247 unsigned grp_partial_lhs : 1;
248
249 /* Set when a scalar replacement should be created for this variable. */
250 unsigned grp_to_be_replaced : 1;
251
252 /* Set when we want a replacement for the sole purpose of having it in
253 generated debug statements. */
254 unsigned grp_to_be_debug_replaced : 1;
255
256 /* Should TREE_NO_WARNING of a replacement be set? */
257 unsigned grp_no_warning : 1;
258
259 /* Is it possible that the group refers to data which might be (directly or
260 otherwise) modified? */
261 unsigned grp_maybe_modified : 1;
262
263 /* Set when this is a representative of a pointer to scalar (i.e. by
264 reference) parameter which we consider for turning into a plain scalar
265 (i.e. a by value parameter). */
266 unsigned grp_scalar_ptr : 1;
267
268 /* Set when we discover that this pointer is not safe to dereference in the
269 caller. */
270 unsigned grp_not_necessarilly_dereferenced : 1;
271};
272
273typedef struct access *access_p;
274
275
276/* Alloc pool for allocating access structures. */
277static object_allocator<struct access> access_pool ("SRA accesses");
278
279/* A structure linking lhs and rhs accesses from an aggregate assignment. They
280 are used to propagate subaccesses from rhs to lhs as long as they don't
281 conflict with what is already there. */
282struct assign_link
283{
284 struct access *lacc, *racc;
285 struct assign_link *next;
286};
287
288/* Alloc pool for allocating assign link structures. */
289static object_allocator<assign_link> assign_link_pool ("SRA links");
290
291/* Base (tree) -> Vector (vec<access_p> *) map. */
292static hash_map<tree, auto_vec<access_p> > *base_access_vec;
293
294/* Candidate hash table helpers. */
295
296struct uid_decl_hasher : nofree_ptr_hash <tree_node>
297{
298 static inline hashval_t hash (const tree_node *);
299 static inline bool equal (const tree_node *, const tree_node *);
300};
301
302/* Hash a tree in a uid_decl_map. */
303
304inline hashval_t
305uid_decl_hasher::hash (const tree_node *item)
306{
307 return item->decl_minimal.uid;
308}
309
310/* Return true if the DECL_UID in both trees are equal. */
311
312inline bool
313uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
314{
315 return (a->decl_minimal.uid == b->decl_minimal.uid);
316}
317
318/* Set of candidates. */
319static bitmap candidate_bitmap;
320static hash_table<uid_decl_hasher> *candidates;
321
322/* For a candidate UID return the candidates decl. */
323
324static inline tree
325candidate (unsigned uid)
326{
327 tree_node t;
328 t.decl_minimal.uid = uid;
329 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
330}
331
332/* Bitmap of candidates which we should try to entirely scalarize away and
333 those which cannot be (because they are and need be used as a whole). */
334static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
335
336/* Bitmap of candidates in the constant pool, which cannot be scalarized
337 because this would produce non-constant expressions (e.g. Ada). */
338static bitmap disqualified_constants;
339
340/* Obstack for creation of fancy names. */
341static struct obstack name_obstack;
342
343/* Head of a linked list of accesses that need to have its subaccesses
344 propagated to their assignment counterparts. */
345static struct access *work_queue_head;
346
347/* Number of parameters of the analyzed function when doing early ipa SRA. */
348static int func_param_count;
349
350/* scan_function sets the following to true if it encounters a call to
351 __builtin_apply_args. */
352static bool encountered_apply_args;
353
354/* Set by scan_function when it finds a recursive call. */
355static bool encountered_recursive_call;
356
357/* Set by scan_function when it finds a recursive call with less actual
358 arguments than formal parameters.. */
359static bool encountered_unchangable_recursive_call;
360
361/* This is a table in which for each basic block and parameter there is a
362 distance (offset + size) in that parameter which is dereferenced and
363 accessed in that BB. */
364static HOST_WIDE_INT *bb_dereferences;
365/* Bitmap of BBs that can cause the function to "stop" progressing by
366 returning, throwing externally, looping infinitely or calling a function
367 which might abort etc.. */
368static bitmap final_bbs;
369
370/* Representative of no accesses at all. */
371static struct access no_accesses_representant;
372
373/* Predicate to test the special value. */
374
375static inline bool
376no_accesses_p (struct access *access)
377{
378 return access == &no_accesses_representant;
379}
380
381/* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
382 representative fields are dumped, otherwise those which only describe the
383 individual access are. */
384
385static struct
386{
387 /* Number of processed aggregates is readily available in
388 analyze_all_variable_accesses and so is not stored here. */
389
390 /* Number of created scalar replacements. */
391 int replacements;
392
393 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
394 expression. */
395 int exprs;
396
397 /* Number of statements created by generate_subtree_copies. */
398 int subtree_copies;
399
400 /* Number of statements created by load_assign_lhs_subreplacements. */
401 int subreplacements;
402
403 /* Number of times sra_modify_assign has deleted a statement. */
404 int deleted;
405
406 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
407 RHS reparately due to type conversions or nonexistent matching
408 references. */
409 int separate_lhs_rhs_handling;
410
411 /* Number of parameters that were removed because they were unused. */
412 int deleted_unused_parameters;
413
414 /* Number of scalars passed as parameters by reference that have been
415 converted to be passed by value. */
416 int scalar_by_ref_to_by_val;
417
418 /* Number of aggregate parameters that were replaced by one or more of their
419 components. */
420 int aggregate_params_reduced;
421
422 /* Numbber of components created when splitting aggregate parameters. */
423 int param_reductions_created;
424} sra_stats;
425
426static void
427dump_access (FILE *f, struct access *access, bool grp)
428{
429 fprintf (f, "access { ");
430 fprintf (f, "base = (%d)'", DECL_UID (access->base));
431 print_generic_expr (f, access->base);
432 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
433 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
434 fprintf (f, ", expr = ");
435 print_generic_expr (f, access->expr);
436 fprintf (f, ", type = ");
437 print_generic_expr (f, access->type);
438 fprintf (f, ", non_addressable = %d, reverse = %d",
439 access->non_addressable, access->reverse);
440 if (grp)
441 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
442 "grp_assignment_write = %d, grp_scalar_read = %d, "
443 "grp_scalar_write = %d, grp_total_scalarization = %d, "
444 "grp_hint = %d, grp_covered = %d, "
445 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
446 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
447 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
448 "grp_not_necessarilly_dereferenced = %d\n",
449 access->grp_read, access->grp_write, access->grp_assignment_read,
450 access->grp_assignment_write, access->grp_scalar_read,
451 access->grp_scalar_write, access->grp_total_scalarization,
452 access->grp_hint, access->grp_covered,
453 access->grp_unscalarizable_region, access->grp_unscalarized_data,
454 access->grp_partial_lhs, access->grp_to_be_replaced,
455 access->grp_to_be_debug_replaced, access->grp_maybe_modified,
456 access->grp_not_necessarilly_dereferenced);
457 else
458 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
459 "grp_partial_lhs = %d\n",
460 access->write, access->grp_total_scalarization,
461 access->grp_partial_lhs);
462}
463
464/* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
465
466static void
467dump_access_tree_1 (FILE *f, struct access *access, int level)
468{
469 do
470 {
471 int i;
472
473 for (i = 0; i < level; i++)
474 fputs ("* ", f);
475
476 dump_access (f, access, true);
477
478 if (access->first_child)
479 dump_access_tree_1 (f, access->first_child, level + 1);
480
481 access = access->next_sibling;
482 }
483 while (access);
484}
485
486/* Dump all access trees for a variable, given the pointer to the first root in
487 ACCESS. */
488
489static void
490dump_access_tree (FILE *f, struct access *access)
491{
492 for (; access; access = access->next_grp)
493 dump_access_tree_1 (f, access, 0);
494}
495
496/* Return true iff ACC is non-NULL and has subaccesses. */
497
498static inline bool
499access_has_children_p (struct access *acc)
500{
501 return acc && acc->first_child;
502}
503
504/* Return true iff ACC is (partly) covered by at least one replacement. */
505
506static bool
507access_has_replacements_p (struct access *acc)
508{
509 struct access *child;
510 if (acc->grp_to_be_replaced)
511 return true;
512 for (child = acc->first_child; child; child = child->next_sibling)
513 if (access_has_replacements_p (child))
514 return true;
515 return false;
516}
517
518/* Return a vector of pointers to accesses for the variable given in BASE or
519 NULL if there is none. */
520
521static vec<access_p> *
522get_base_access_vector (tree base)
523{
524 return base_access_vec->get (base);
525}
526
527/* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
528 in ACCESS. Return NULL if it cannot be found. */
529
530static struct access *
531find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
532 HOST_WIDE_INT size)
533{
534 while (access && (access->offset != offset || access->size != size))
535 {
536 struct access *child = access->first_child;
537
538 while (child && (child->offset + child->size <= offset))
539 child = child->next_sibling;
540 access = child;
541 }
542
543 return access;
544}
545
546/* Return the first group representative for DECL or NULL if none exists. */
547
548static struct access *
549get_first_repr_for_decl (tree base)
550{
551 vec<access_p> *access_vec;
552
553 access_vec = get_base_access_vector (base);
554 if (!access_vec)
555 return NULL;
556
557 return (*access_vec)[0];
558}
559
560/* Find an access representative for the variable BASE and given OFFSET and
561 SIZE. Requires that access trees have already been built. Return NULL if
562 it cannot be found. */
563
564static struct access *
565get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
566 HOST_WIDE_INT size)
567{
568 struct access *access;
569
570 access = get_first_repr_for_decl (base);
571 while (access && (access->offset + access->size <= offset))
572 access = access->next_grp;
573 if (!access)
574 return NULL;
575
576 return find_access_in_subtree (access, offset, size);
577}
578
579/* Add LINK to the linked list of assign links of RACC. */
580static void
581add_link_to_rhs (struct access *racc, struct assign_link *link)
582{
583 gcc_assert (link->racc == racc);
584
585 if (!racc->first_link)
586 {
587 gcc_assert (!racc->last_link);
588 racc->first_link = link;
589 }
590 else
591 racc->last_link->next = link;
592
593 racc->last_link = link;
594 link->next = NULL;
595}
596
597/* Move all link structures in their linked list in OLD_RACC to the linked list
598 in NEW_RACC. */
599static void
600relink_to_new_repr (struct access *new_racc, struct access *old_racc)
601{
602 if (!old_racc->first_link)
603 {
604 gcc_assert (!old_racc->last_link);
605 return;
606 }
607
608 if (new_racc->first_link)
609 {
610 gcc_assert (!new_racc->last_link->next);
611 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
612
613 new_racc->last_link->next = old_racc->first_link;
614 new_racc->last_link = old_racc->last_link;
615 }
616 else
617 {
618 gcc_assert (!new_racc->last_link);
619
620 new_racc->first_link = old_racc->first_link;
621 new_racc->last_link = old_racc->last_link;
622 }
623 old_racc->first_link = old_racc->last_link = NULL;
624}
625
626/* Add ACCESS to the work queue (which is actually a stack). */
627
628static void
629add_access_to_work_queue (struct access *access)
630{
631 if (access->first_link && !access->grp_queued)
632 {
633 gcc_assert (!access->next_queued);
634 access->next_queued = work_queue_head;
635 access->grp_queued = 1;
636 work_queue_head = access;
637 }
638}
639
640/* Pop an access from the work queue, and return it, assuming there is one. */
641
642static struct access *
643pop_access_from_work_queue (void)
644{
645 struct access *access = work_queue_head;
646
647 work_queue_head = access->next_queued;
648 access->next_queued = NULL;
649 access->grp_queued = 0;
650 return access;
651}
652
653
654/* Allocate necessary structures. */
655
656static void
657sra_initialize (void)
658{
659 candidate_bitmap = BITMAP_ALLOC (NULL);
660 candidates = new hash_table<uid_decl_hasher>
661 (vec_safe_length (cfun->local_decls) / 2);
662 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
663 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
664 disqualified_constants = BITMAP_ALLOC (NULL);
665 gcc_obstack_init (&name_obstack);
666 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
667 memset (&sra_stats, 0, sizeof (sra_stats));
668 encountered_apply_args = false;
669 encountered_recursive_call = false;
670 encountered_unchangable_recursive_call = false;
671}
672
673/* Deallocate all general structures. */
674
675static void
676sra_deinitialize (void)
677{
678 BITMAP_FREE (candidate_bitmap);
679 delete candidates;
680 candidates = NULL;
681 BITMAP_FREE (should_scalarize_away_bitmap);
682 BITMAP_FREE (cannot_scalarize_away_bitmap);
683 BITMAP_FREE (disqualified_constants);
684 access_pool.release ();
685 assign_link_pool.release ();
686 obstack_free (&name_obstack, NULL);
687
688 delete base_access_vec;
689}
690
691/* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
692
693static bool constant_decl_p (tree decl)
694{
695 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
696}
697
698/* Remove DECL from candidates for SRA and write REASON to the dump file if
699 there is one. */
700
701static void
702disqualify_candidate (tree decl, const char *reason)
703{
704 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
705 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
706 if (constant_decl_p (decl))
707 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
708
709 if (dump_file && (dump_flags & TDF_DETAILS))
710 {
711 fprintf (dump_file, "! Disqualifying ");
712 print_generic_expr (dump_file, decl);
713 fprintf (dump_file, " - %s\n", reason);
714 }
715}
716
717/* Return true iff the type contains a field or an element which does not allow
718 scalarization. */
719
720static bool
721type_internals_preclude_sra_p (tree type, const char **msg)
722{
723 tree fld;
724 tree et;
725
726 switch (TREE_CODE (type))
727 {
728 case RECORD_TYPE:
729 case UNION_TYPE:
730 case QUAL_UNION_TYPE:
731 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
732 if (TREE_CODE (fld) == FIELD_DECL)
733 {
734 tree ft = TREE_TYPE (fld);
735
736 if (TREE_THIS_VOLATILE (fld))
737 {
738 *msg = "volatile structure field";
739 return true;
740 }
741 if (!DECL_FIELD_OFFSET (fld))
742 {
743 *msg = "no structure field offset";
744 return true;
745 }
746 if (!DECL_SIZE (fld))
747 {
748 *msg = "zero structure field size";
749 return true;
750 }
751 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
752 {
753 *msg = "structure field offset not fixed";
754 return true;
755 }
756 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
757 {
758 *msg = "structure field size not fixed";
759 return true;
760 }
761 if (!tree_fits_shwi_p (bit_position (fld)))
762 {
763 *msg = "structure field size too big";
764 return true;
765 }
766 if (AGGREGATE_TYPE_P (ft)
767 && int_bit_position (fld) % BITS_PER_UNIT != 0)
768 {
769 *msg = "structure field is bit field";
770 return true;
771 }
772
773 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
774 return true;
775 }
776
777 return false;
778
779 case ARRAY_TYPE:
780 et = TREE_TYPE (type);
781
782 if (TYPE_VOLATILE (et))
783 {
784 *msg = "element type is volatile";
785 return true;
786 }
787
788 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
789 return true;
790
791 return false;
792
793 default:
794 return false;
795 }
796}
797
798/* If T is an SSA_NAME, return NULL if it is not a default def or return its
799 base variable if it is. Return T if it is not an SSA_NAME. */
800
801static tree
802get_ssa_base_param (tree t)
803{
804 if (TREE_CODE (t) == SSA_NAME)
805 {
806 if (SSA_NAME_IS_DEFAULT_DEF (t))
807 return SSA_NAME_VAR (t);
808 else
809 return NULL_TREE;
810 }
811 return t;
812}
813
814/* Mark a dereference of BASE of distance DIST in a basic block tht STMT
815 belongs to, unless the BB has already been marked as a potentially
816 final. */
817
818static void
819mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
820{
821 basic_block bb = gimple_bb (stmt);
822 int idx, parm_index = 0;
823 tree parm;
824
825 if (bitmap_bit_p (final_bbs, bb->index))
826 return;
827
828 for (parm = DECL_ARGUMENTS (current_function_decl);
829 parm && parm != base;
830 parm = DECL_CHAIN (parm))
831 parm_index++;
832
833 gcc_assert (parm_index < func_param_count);
834
835 idx = bb->index * func_param_count + parm_index;
836 if (bb_dereferences[idx] < dist)
837 bb_dereferences[idx] = dist;
838}
839
840/* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
841 the three fields. Also add it to the vector of accesses corresponding to
842 the base. Finally, return the new access. */
843
844static struct access *
845create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
846{
847 struct access *access = access_pool.allocate ();
848
849 memset (access, 0, sizeof (struct access));
850 access->base = base;
851 access->offset = offset;
852 access->size = size;
853
854 base_access_vec->get_or_insert (base).safe_push (access);
855
856 return access;
857}
858
859static bool maybe_add_sra_candidate (tree);
860
861/* Create and insert access for EXPR. Return created access, or NULL if it is
862 not possible. Also scan for uses of constant pool as we go along and add
863 to candidates. */
864
865static struct access *
866create_access (tree expr, gimple *stmt, bool write)
867{
868 struct access *access;
869 HOST_WIDE_INT offset, size, max_size;
870 tree base = expr;
871 bool reverse, ptr, unscalarizable_region = false;
872
873 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
874
875 if (sra_mode == SRA_MODE_EARLY_IPA
876 && TREE_CODE (base) == MEM_REF)
877 {
878 base = get_ssa_base_param (TREE_OPERAND (base, 0));
879 if (!base)
880 return NULL;
881 ptr = true;
882 }
883 else
884 ptr = false;
885
886 /* For constant-pool entries, check we can substitute the constant value. */
887 if (constant_decl_p (base)
888 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
889 {
890 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
891 if (expr != base
892 && !is_gimple_reg_type (TREE_TYPE (expr))
893 && dump_file && (dump_flags & TDF_DETAILS))
894 {
895 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
896 and elements of multidimensional arrays (which are
897 multi-element arrays in their own right). */
898 fprintf (dump_file, "Allowing non-reg-type load of part"
899 " of constant-pool entry: ");
900 print_generic_expr (dump_file, expr);
901 }
902 maybe_add_sra_candidate (base);
903 }
904
905 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
906 return NULL;
907
908 if (sra_mode == SRA_MODE_EARLY_IPA)
909 {
910 if (size < 0 || size != max_size)
911 {
912 disqualify_candidate (base, "Encountered a variable sized access.");
913 return NULL;
914 }
915 if (TREE_CODE (expr) == COMPONENT_REF
916 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
917 {
918 disqualify_candidate (base, "Encountered a bit-field access.");
919 return NULL;
920 }
921 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
922
923 if (ptr)
924 mark_parm_dereference (base, offset + size, stmt);
925 }
926 else
927 {
928 if (size != max_size)
929 {
930 size = max_size;
931 unscalarizable_region = true;
932 }
933 if (size < 0)
934 {
935 disqualify_candidate (base, "Encountered an unconstrained access.");
936 return NULL;
937 }
938 }
939
940 access = create_access_1 (base, offset, size);
941 access->expr = expr;
942 access->type = TREE_TYPE (expr);
943 access->write = write;
944 access->grp_unscalarizable_region = unscalarizable_region;
945 access->stmt = stmt;
946 access->reverse = reverse;
947
948 if (TREE_CODE (expr) == COMPONENT_REF
949 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
950 access->non_addressable = 1;
951
952 return access;
953}
954
955
956/* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
957 ARRAY_TYPE with fields that are either of gimple register types (excluding
958 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
959 we are considering a decl from constant pool. If it is false, char arrays
960 will be refused. */
961
962static bool
963scalarizable_type_p (tree type, bool const_decl)
964{
965 gcc_assert (!is_gimple_reg_type (type));
966 if (type_contains_placeholder_p (type))
967 return false;
968
969 switch (TREE_CODE (type))
970 {
971 case RECORD_TYPE:
972 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
973 if (TREE_CODE (fld) == FIELD_DECL)
974 {
975 tree ft = TREE_TYPE (fld);
976
977 if (DECL_BIT_FIELD (fld))
978 return false;
979
980 if (!is_gimple_reg_type (ft)
981 && !scalarizable_type_p (ft, const_decl))
982 return false;
983 }
984
985 return true;
986
987 case ARRAY_TYPE:
988 {
989 HOST_WIDE_INT min_elem_size;
990 if (const_decl)
991 min_elem_size = 0;
992 else
993 min_elem_size = BITS_PER_UNIT;
994
995 if (TYPE_DOMAIN (type) == NULL_TREE
996 || !tree_fits_shwi_p (TYPE_SIZE (type))
997 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
998 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
999 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1000 return false;
1001 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1002 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1003 /* Zero-element array, should not prevent scalarization. */
1004 ;
1005 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1006 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1007 /* Variable-length array, do not allow scalarization. */
1008 return false;
1009
1010 tree elem = TREE_TYPE (type);
1011 if (!is_gimple_reg_type (elem)
1012 && !scalarizable_type_p (elem, const_decl))
1013 return false;
1014 return true;
1015 }
1016 default:
1017 return false;
1018 }
1019}
1020
1021static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1022
1023/* Create total_scalarization accesses for all scalar fields of a member
1024 of type DECL_TYPE conforming to scalarizable_type_p. BASE
1025 must be the top-most VAR_DECL representing the variable; within that,
1026 OFFSET locates the member and REF must be the memory reference expression for
1027 the member. */
1028
1029static void
1030completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1031{
1032 switch (TREE_CODE (decl_type))
1033 {
1034 case RECORD_TYPE:
1035 for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1036 if (TREE_CODE (fld) == FIELD_DECL)
1037 {
1038 HOST_WIDE_INT pos = offset + int_bit_position (fld);
1039 tree ft = TREE_TYPE (fld);
1040 tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1041
1042 scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1043 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1044 nref, ft);
1045 }
1046 break;
1047 case ARRAY_TYPE:
1048 {
1049 tree elemtype = TREE_TYPE (decl_type);
1050 tree elem_size = TYPE_SIZE (elemtype);
1051 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1052 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1053 gcc_assert (el_size > 0);
1054
1055 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1056 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1057 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1058 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
1059 if (maxidx)
1060 {
1061 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1062 tree domain = TYPE_DOMAIN (decl_type);
1063 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1064 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
1065 offset_int idx = wi::to_offset (minidx);
1066 offset_int max = wi::to_offset (maxidx);
1067 if (!TYPE_UNSIGNED (domain))
1068 {
1069 idx = wi::sext (idx, TYPE_PRECISION (domain));
1070 max = wi::sext (max, TYPE_PRECISION (domain));
1071 }
1072 for (int el_off = offset; idx <= max; ++idx)
1073 {
1074 tree nref = build4 (ARRAY_REF, elemtype,
1075 ref,
1076 wide_int_to_tree (domain, idx),
1077 NULL_TREE, NULL_TREE);
1078 scalarize_elem (base, el_off, el_size,
1079 TYPE_REVERSE_STORAGE_ORDER (decl_type),
1080 nref, elemtype);
1081 el_off += el_size;
1082 }
1083 }
1084 }
1085 break;
1086 default:
1087 gcc_unreachable ();
1088 }
1089}
1090
1091/* Create total_scalarization accesses for a member of type TYPE, which must
1092 satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
1093 top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1094 the member, REVERSE gives its torage order. and REF must be the reference
1095 expression for it. */
1096
1097static void
1098scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1099 tree ref, tree type)
1100{
1101 if (is_gimple_reg_type (type))
1102 {
1103 struct access *access = create_access_1 (base, pos, size);
1104 access->expr = ref;
1105 access->type = type;
1106 access->grp_total_scalarization = 1;
1107 access->reverse = reverse;
1108 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1109 }
1110 else
1111 completely_scalarize (base, type, pos, ref);
1112}
1113
1114/* Create a total_scalarization access for VAR as a whole. VAR must be of a
1115 RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p. */
1116
1117static void
1118create_total_scalarization_access (tree var)
1119{
1120 HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1121 struct access *access;
1122
1123 access = create_access_1 (var, 0, size);
1124 access->expr = var;
1125 access->type = TREE_TYPE (var);
1126 access->grp_total_scalarization = 1;
1127}
1128
1129/* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1130
1131static inline bool
1132contains_view_convert_expr_p (const_tree ref)
1133{
1134 while (handled_component_p (ref))
1135 {
1136 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1137 return true;
1138 ref = TREE_OPERAND (ref, 0);
1139 }
1140
1141 return false;
1142}
1143
1144/* Return true if REF contains a VIEW_CONVERT_EXPR or a MEM_REF that performs
1145 type conversion or a COMPONENT_REF with a bit-field field declaration. */
1146
1147static bool
1148contains_vce_or_bfcref_p (const_tree ref)
1149{
1150 while (handled_component_p (ref))
1151 {
1152 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
1153 || (TREE_CODE (ref) == COMPONENT_REF
1154 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
1155 return true;
1156 ref = TREE_OPERAND (ref, 0);
1157 }
1158
1159 if (TREE_CODE (ref) != MEM_REF
1160 || TREE_CODE (TREE_OPERAND (ref, 0)) != ADDR_EXPR)
1161 return false;
1162
1163 tree mem = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
1164 if (TYPE_MAIN_VARIANT (TREE_TYPE (ref))
1165 != TYPE_MAIN_VARIANT (TREE_TYPE (mem)))
1166 return true;
1167
1168 return false;
1169}
1170
1171/* Search the given tree for a declaration by skipping handled components and
1172 exclude it from the candidates. */
1173
1174static void
1175disqualify_base_of_expr (tree t, const char *reason)
1176{
1177 t = get_base_address (t);
1178 if (sra_mode == SRA_MODE_EARLY_IPA
1179 && TREE_CODE (t) == MEM_REF)
1180 t = get_ssa_base_param (TREE_OPERAND (t, 0));
1181
1182 if (t && DECL_P (t))
1183 disqualify_candidate (t, reason);
1184}
1185
1186/* Scan expression EXPR and create access structures for all accesses to
1187 candidates for scalarization. Return the created access or NULL if none is
1188 created. */
1189
1190static struct access *
1191build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1192{
1193 struct access *ret = NULL;
1194 bool partial_ref;
1195
1196 if (TREE_CODE (expr) == BIT_FIELD_REF
1197 || TREE_CODE (expr) == IMAGPART_EXPR
1198 || TREE_CODE (expr) == REALPART_EXPR)
1199 {
1200 expr = TREE_OPERAND (expr, 0);
1201 partial_ref = true;
1202 }
1203 else
1204 partial_ref = false;
1205
1206 if (storage_order_barrier_p (expr))
1207 {
1208 disqualify_base_of_expr (expr, "storage order barrier.");
1209 return NULL;
1210 }
1211
1212 /* We need to dive through V_C_Es in order to get the size of its parameter
1213 and not the result type. Ada produces such statements. We are also
1214 capable of handling the topmost V_C_E but not any of those buried in other
1215 handled components. */
1216 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1217 expr = TREE_OPERAND (expr, 0);
1218
1219 if (contains_view_convert_expr_p (expr))
1220 {
1221 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1222 "component.");
1223 return NULL;
1224 }
1225 if (TREE_THIS_VOLATILE (expr))
1226 {
1227 disqualify_base_of_expr (expr, "part of a volatile reference.");
1228 return NULL;
1229 }
1230
1231 switch (TREE_CODE (expr))
1232 {
1233 case MEM_REF:
1234 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1235 && sra_mode != SRA_MODE_EARLY_IPA)
1236 return NULL;
1237 /* fall through */
1238 case VAR_DECL:
1239 case PARM_DECL:
1240 case RESULT_DECL:
1241 case COMPONENT_REF:
1242 case ARRAY_REF:
1243 case ARRAY_RANGE_REF:
1244 ret = create_access (expr, stmt, write);
1245 break;
1246
1247 default:
1248 break;
1249 }
1250
1251 if (write && partial_ref && ret)
1252 ret->grp_partial_lhs = 1;
1253
1254 return ret;
1255}
1256
1257/* Scan expression EXPR and create access structures for all accesses to
1258 candidates for scalarization. Return true if any access has been inserted.
1259 STMT must be the statement from which the expression is taken, WRITE must be
1260 true if the expression is a store and false otherwise. */
1261
1262static bool
1263build_access_from_expr (tree expr, gimple *stmt, bool write)
1264{
1265 struct access *access;
1266
1267 access = build_access_from_expr_1 (expr, stmt, write);
1268 if (access)
1269 {
1270 /* This means the aggregate is accesses as a whole in a way other than an
1271 assign statement and thus cannot be removed even if we had a scalar
1272 replacement for everything. */
1273 if (cannot_scalarize_away_bitmap)
1274 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1275 return true;
1276 }
1277 return false;
1278}
1279
1280/* Return the single non-EH successor edge of BB or NULL if there is none or
1281 more than one. */
1282
1283static edge
1284single_non_eh_succ (basic_block bb)
1285{
1286 edge e, res = NULL;
1287 edge_iterator ei;
1288
1289 FOR_EACH_EDGE (e, ei, bb->succs)
1290 if (!(e->flags & EDGE_EH))
1291 {
1292 if (res)
1293 return NULL;
1294 res = e;
1295 }
1296
1297 return res;
1298}
1299
1300/* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1301 there is no alternative spot where to put statements SRA might need to
1302 generate after it. The spot we are looking for is an edge leading to a
1303 single non-EH successor, if it exists and is indeed single. RHS may be
1304 NULL, in that case ignore it. */
1305
1306static bool
1307disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1308{
1309 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1310 && stmt_ends_bb_p (stmt))
1311 {
1312 if (single_non_eh_succ (gimple_bb (stmt)))
1313 return false;
1314
1315 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1316 if (rhs)
1317 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1318 return true;
1319 }
1320 return false;
1321}
1322
1323/* Return true if the nature of BASE is such that it contains data even if
1324 there is no write to it in the function. */
1325
1326static bool
1327comes_initialized_p (tree base)
1328{
1329 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1330}
1331
1332/* Scan expressions occurring in STMT, create access structures for all accesses
1333 to candidates for scalarization and remove those candidates which occur in
1334 statements or expressions that prevent them from being split apart. Return
1335 true if any access has been inserted. */
1336
1337static bool
1338build_accesses_from_assign (gimple *stmt)
1339{
1340 tree lhs, rhs;
1341 struct access *lacc, *racc;
1342
1343 if (!gimple_assign_single_p (stmt)
1344 /* Scope clobbers don't influence scalarization. */
1345 || gimple_clobber_p (stmt))
1346 return false;
1347
1348 lhs = gimple_assign_lhs (stmt);
1349 rhs = gimple_assign_rhs1 (stmt);
1350
1351 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1352 return false;
1353
1354 racc = build_access_from_expr_1 (rhs, stmt, false);
1355 lacc = build_access_from_expr_1 (lhs, stmt, true);
1356
1357 if (lacc)
1358 {
1359 lacc->grp_assignment_write = 1;
1360 if (storage_order_barrier_p (rhs))
1361 lacc->grp_unscalarizable_region = 1;
1362 }
1363
1364 if (racc)
1365 {
1366 racc->grp_assignment_read = 1;
1367 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1368 && !is_gimple_reg_type (racc->type))
1369 {
1370 if (contains_vce_or_bfcref_p (rhs))
1371 bitmap_set_bit (cannot_scalarize_away_bitmap,
1372 DECL_UID (racc->base));
1373 else
1374 bitmap_set_bit (should_scalarize_away_bitmap,
1375 DECL_UID (racc->base));
1376 }
1377 if (storage_order_barrier_p (lhs))
1378 racc->grp_unscalarizable_region = 1;
1379 }
1380
1381 if (lacc && racc
1382 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1383 && !lacc->grp_unscalarizable_region
1384 && !racc->grp_unscalarizable_region
1385 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1386 && lacc->size == racc->size
1387 && useless_type_conversion_p (lacc->type, racc->type))
1388 {
1389 struct assign_link *link;
1390
1391 link = assign_link_pool.allocate ();
1392 memset (link, 0, sizeof (struct assign_link));
1393
1394 link->lacc = lacc;
1395 link->racc = racc;
1396 add_link_to_rhs (racc, link);
1397 add_access_to_work_queue (racc);
1398
1399 /* Let's delay marking the areas as written until propagation of accesses
1400 across link, unless the nature of rhs tells us that its data comes
1401 from elsewhere. */
1402 if (!comes_initialized_p (racc->base))
1403 lacc->write = false;
1404 }
1405
1406 return lacc || racc;
1407}
1408
1409/* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1410 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1411
1412static bool
1413asm_visit_addr (gimple *, tree op, tree, void *)
1414{
1415 op = get_base_address (op);
1416 if (op
1417 && DECL_P (op))
1418 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1419
1420 return false;
1421}
1422
1423/* Return true iff callsite CALL has at least as many actual arguments as there
1424 are formal parameters of the function currently processed by IPA-SRA and
1425 that their types match. */
1426
1427static inline bool
1428callsite_arguments_match_p (gimple *call)
1429{
1430 if (gimple_call_num_args (call) < (unsigned) func_param_count)
1431 return false;
1432
1433 tree parm;
1434 int i;
1435 for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1436 parm;
1437 parm = DECL_CHAIN (parm), i++)
1438 {
1439 tree arg = gimple_call_arg (call, i);
1440 if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1441 return false;
1442 }
1443 return true;
1444}
1445
1446/* Scan function and look for interesting expressions and create access
1447 structures for them. Return true iff any access is created. */
1448
1449static bool
1450scan_function (void)
1451{
1452 basic_block bb;
1453 bool ret = false;
1454
1455 FOR_EACH_BB_FN (bb, cfun)
1456 {
1457 gimple_stmt_iterator gsi;
1458 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1459 {
1460 gimple *stmt = gsi_stmt (gsi);
1461 tree t;
1462 unsigned i;
1463
1464 if (final_bbs && stmt_can_throw_external (stmt))
1465 bitmap_set_bit (final_bbs, bb->index);
1466 switch (gimple_code (stmt))
1467 {
1468 case GIMPLE_RETURN:
1469 t = gimple_return_retval (as_a <greturn *> (stmt));
1470 if (t != NULL_TREE)
1471 ret |= build_access_from_expr (t, stmt, false);
1472 if (final_bbs)
1473 bitmap_set_bit (final_bbs, bb->index);
1474 break;
1475
1476 case GIMPLE_ASSIGN:
1477 ret |= build_accesses_from_assign (stmt);
1478 break;
1479
1480 case GIMPLE_CALL:
1481 for (i = 0; i < gimple_call_num_args (stmt); i++)
1482 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1483 stmt, false);
1484
1485 if (sra_mode == SRA_MODE_EARLY_IPA)
1486 {
1487 tree dest = gimple_call_fndecl (stmt);
1488 int flags = gimple_call_flags (stmt);
1489
1490 if (dest)
1491 {
1492 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1493 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1494 encountered_apply_args = true;
1495 if (recursive_call_p (current_function_decl, dest))
1496 {
1497 encountered_recursive_call = true;
1498 if (!callsite_arguments_match_p (stmt))
1499 encountered_unchangable_recursive_call = true;
1500 }
1501 }
1502
1503 if (final_bbs
1504 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1505 bitmap_set_bit (final_bbs, bb->index);
1506 }
1507
1508 t = gimple_call_lhs (stmt);
1509 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1510 ret |= build_access_from_expr (t, stmt, true);
1511 break;
1512
1513 case GIMPLE_ASM:
1514 {
1515 gasm *asm_stmt = as_a <gasm *> (stmt);
1516 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1517 asm_visit_addr);
1518 if (final_bbs)
1519 bitmap_set_bit (final_bbs, bb->index);
1520
1521 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1522 {
1523 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1524 ret |= build_access_from_expr (t, asm_stmt, false);
1525 }
1526 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1527 {
1528 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1529 ret |= build_access_from_expr (t, asm_stmt, true);
1530 }
1531 }
1532 break;
1533
1534 default:
1535 break;
1536 }
1537 }
1538 }
1539
1540 return ret;
1541}
1542
1543/* Helper of QSORT function. There are pointers to accesses in the array. An
1544 access is considered smaller than another if it has smaller offset or if the
1545 offsets are the same but is size is bigger. */
1546
1547static int
1548compare_access_positions (const void *a, const void *b)
1549{
1550 const access_p *fp1 = (const access_p *) a;
1551 const access_p *fp2 = (const access_p *) b;
1552 const access_p f1 = *fp1;
1553 const access_p f2 = *fp2;
1554
1555 if (f1->offset != f2->offset)
1556 return f1->offset < f2->offset ? -1 : 1;
1557
1558 if (f1->size == f2->size)
1559 {
1560 if (f1->type == f2->type)
1561 return 0;
1562 /* Put any non-aggregate type before any aggregate type. */
1563 else if (!is_gimple_reg_type (f1->type)
1564 && is_gimple_reg_type (f2->type))
1565 return 1;
1566 else if (is_gimple_reg_type (f1->type)
1567 && !is_gimple_reg_type (f2->type))
1568 return -1;
1569 /* Put any complex or vector type before any other scalar type. */
1570 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1571 && TREE_CODE (f1->type) != VECTOR_TYPE
1572 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1573 || TREE_CODE (f2->type) == VECTOR_TYPE))
1574 return 1;
1575 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1576 || TREE_CODE (f1->type) == VECTOR_TYPE)
1577 && TREE_CODE (f2->type) != COMPLEX_TYPE
1578 && TREE_CODE (f2->type) != VECTOR_TYPE)
1579 return -1;
1580 /* Put any integral type before any non-integral type. When splicing, we
1581 make sure that those with insufficient precision and occupying the
1582 same space are not scalarized. */
1583 else if (INTEGRAL_TYPE_P (f1->type)
1584 && !INTEGRAL_TYPE_P (f2->type))
1585 return -1;
1586 else if (!INTEGRAL_TYPE_P (f1->type)
1587 && INTEGRAL_TYPE_P (f2->type))
1588 return 1;
1589 /* Put the integral type with the bigger precision first. */
1590 else if (INTEGRAL_TYPE_P (f1->type)
1591 && INTEGRAL_TYPE_P (f2->type)
1592 && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1593 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1594 /* Stabilize the sort. */
1595 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1596 }
1597
1598 /* We want the bigger accesses first, thus the opposite operator in the next
1599 line: */
1600 return f1->size > f2->size ? -1 : 1;
1601}
1602
1603
1604/* Append a name of the declaration to the name obstack. A helper function for
1605 make_fancy_name. */
1606
1607static void
1608make_fancy_decl_name (tree decl)
1609{
1610 char buffer[32];
1611
1612 tree name = DECL_NAME (decl);
1613 if (name)
1614 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1615 IDENTIFIER_LENGTH (name));
1616 else
1617 {
1618 sprintf (buffer, "D%u", DECL_UID (decl));
1619 obstack_grow (&name_obstack, buffer, strlen (buffer));
1620 }
1621}
1622
1623/* Helper for make_fancy_name. */
1624
1625static void
1626make_fancy_name_1 (tree expr)
1627{
1628 char buffer[32];
1629 tree index;
1630
1631 if (DECL_P (expr))
1632 {
1633 make_fancy_decl_name (expr);
1634 return;
1635 }
1636
1637 switch (TREE_CODE (expr))
1638 {
1639 case COMPONENT_REF:
1640 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1641 obstack_1grow (&name_obstack, '$');
1642 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1643 break;
1644
1645 case ARRAY_REF:
1646 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1647 obstack_1grow (&name_obstack, '$');
1648 /* Arrays with only one element may not have a constant as their
1649 index. */
1650 index = TREE_OPERAND (expr, 1);
1651 if (TREE_CODE (index) != INTEGER_CST)
1652 break;
1653 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1654 obstack_grow (&name_obstack, buffer, strlen (buffer));
1655 break;
1656
1657 case ADDR_EXPR:
1658 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1659 break;
1660
1661 case MEM_REF:
1662 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1663 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1664 {
1665 obstack_1grow (&name_obstack, '$');
1666 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1667 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1668 obstack_grow (&name_obstack, buffer, strlen (buffer));
1669 }
1670 break;
1671
1672 case BIT_FIELD_REF:
1673 case REALPART_EXPR:
1674 case IMAGPART_EXPR:
1675 gcc_unreachable (); /* we treat these as scalars. */
1676 break;
1677 default:
1678 break;
1679 }
1680}
1681
1682/* Create a human readable name for replacement variable of ACCESS. */
1683
1684static char *
1685make_fancy_name (tree expr)
1686{
1687 make_fancy_name_1 (expr);
1688 obstack_1grow (&name_obstack, '\0');
1689 return XOBFINISH (&name_obstack, char *);
1690}
1691
1692/* Construct a MEM_REF that would reference a part of aggregate BASE of type
1693 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1694 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1695 be non-NULL and is used to insert new statements either before or below
1696 the current one as specified by INSERT_AFTER. This function is not capable
1697 of handling bitfields. */
1698
1699tree
1700build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1701 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1702 bool insert_after)
1703{
1704 tree prev_base = base;
1705 tree off;
1706 tree mem_ref;
1707 HOST_WIDE_INT base_offset;
1708 unsigned HOST_WIDE_INT misalign;
1709 unsigned int align;
1710
1711 /* Preserve address-space information. */
1712 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1713 if (as != TYPE_ADDR_SPACE (exp_type))
1714 exp_type = build_qualified_type (exp_type,
1715 TYPE_QUALS (exp_type)
1716 | ENCODE_QUAL_ADDR_SPACE (as));
1717
1718 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1719 get_object_alignment_1 (base, &align, &misalign);
1720 base = get_addr_base_and_unit_offset (base, &base_offset);
1721
1722 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1723 offset such as array[var_index]. */
1724 if (!base)
1725 {
1726 gassign *stmt;
1727 tree tmp, addr;
1728
1729 gcc_checking_assert (gsi);
1730 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1731 addr = build_fold_addr_expr (unshare_expr (prev_base));
1732 STRIP_USELESS_TYPE_CONVERSION (addr);
1733 stmt = gimple_build_assign (tmp, addr);
1734 gimple_set_location (stmt, loc);
1735 if (insert_after)
1736 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1737 else
1738 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1739
1740 off = build_int_cst (reference_alias_ptr_type (prev_base),
1741 offset / BITS_PER_UNIT);
1742 base = tmp;
1743 }
1744 else if (TREE_CODE (base) == MEM_REF)
1745 {
1746 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1747 base_offset + offset / BITS_PER_UNIT);
1748 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1749 base = unshare_expr (TREE_OPERAND (base, 0));
1750 }
1751 else
1752 {
1753 off = build_int_cst (reference_alias_ptr_type (prev_base),
1754 base_offset + offset / BITS_PER_UNIT);
1755 base = build_fold_addr_expr (unshare_expr (base));
1756 }
1757
1758 misalign = (misalign + offset) & (align - 1);
1759 if (misalign != 0)
1760 align = least_bit_hwi (misalign);
1761 if (align != TYPE_ALIGN (exp_type))
1762 exp_type = build_aligned_type (exp_type, align);
1763
1764 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1765 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1766 if (TREE_THIS_VOLATILE (prev_base))
1767 TREE_THIS_VOLATILE (mem_ref) = 1;
1768 if (TREE_SIDE_EFFECTS (prev_base))
1769 TREE_SIDE_EFFECTS (mem_ref) = 1;
1770 return mem_ref;
1771}
1772
1773/* Construct a memory reference to a part of an aggregate BASE at the given
1774 OFFSET and of the same type as MODEL. In case this is a reference to a
1775 bit-field, the function will replicate the last component_ref of model's
1776 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1777 build_ref_for_offset. */
1778
1779static tree
1780build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1781 struct access *model, gimple_stmt_iterator *gsi,
1782 bool insert_after)
1783{
1784 if (TREE_CODE (model->expr) == COMPONENT_REF
1785 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1786 {
1787 /* This access represents a bit-field. */
1788 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1789
1790 offset -= int_bit_position (fld);
1791 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1792 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1793 gsi, insert_after);
1794 /* The flag will be set on the record type. */
1795 REF_REVERSE_STORAGE_ORDER (t) = 0;
1796 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1797 NULL_TREE);
1798 }
1799 else
1800 return
1801 build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1802 gsi, insert_after);
1803}
1804
1805/* Attempt to build a memory reference that we could but into a gimple
1806 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1807 create statements and return s NULL instead. This function also ignores
1808 alignment issues and so its results should never end up in non-debug
1809 statements. */
1810
1811static tree
1812build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1813 struct access *model)
1814{
1815 HOST_WIDE_INT base_offset;
1816 tree off;
1817
1818 if (TREE_CODE (model->expr) == COMPONENT_REF
1819 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1820 return NULL_TREE;
1821
1822 base = get_addr_base_and_unit_offset (base, &base_offset);
1823 if (!base)
1824 return NULL_TREE;
1825 if (TREE_CODE (base) == MEM_REF)
1826 {
1827 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1828 base_offset + offset / BITS_PER_UNIT);
1829 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1830 base = unshare_expr (TREE_OPERAND (base, 0));
1831 }
1832 else
1833 {
1834 off = build_int_cst (reference_alias_ptr_type (base),
1835 base_offset + offset / BITS_PER_UNIT);
1836 base = build_fold_addr_expr (unshare_expr (base));
1837 }
1838
1839 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1840}
1841
1842/* Construct a memory reference consisting of component_refs and array_refs to
1843 a part of an aggregate *RES (which is of type TYPE). The requested part
1844 should have type EXP_TYPE at be the given OFFSET. This function might not
1845 succeed, it returns true when it does and only then *RES points to something
1846 meaningful. This function should be used only to build expressions that we
1847 might need to present to user (e.g. in warnings). In all other situations,
1848 build_ref_for_model or build_ref_for_offset should be used instead. */
1849
1850static bool
1851build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1852 tree exp_type)
1853{
1854 while (1)
1855 {
1856 tree fld;
1857 tree tr_size, index, minidx;
1858 HOST_WIDE_INT el_size;
1859
1860 if (offset == 0 && exp_type
1861 && types_compatible_p (exp_type, type))
1862 return true;
1863
1864 switch (TREE_CODE (type))
1865 {
1866 case UNION_TYPE:
1867 case QUAL_UNION_TYPE:
1868 case RECORD_TYPE:
1869 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1870 {
1871 HOST_WIDE_INT pos, size;
1872 tree tr_pos, expr, *expr_ptr;
1873
1874 if (TREE_CODE (fld) != FIELD_DECL)
1875 continue;
1876
1877 tr_pos = bit_position (fld);
1878 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1879 continue;
1880 pos = tree_to_uhwi (tr_pos);
1881 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1882 tr_size = DECL_SIZE (fld);
1883 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1884 continue;
1885 size = tree_to_uhwi (tr_size);
1886 if (size == 0)
1887 {
1888 if (pos != offset)
1889 continue;
1890 }
1891 else if (pos > offset || (pos + size) <= offset)
1892 continue;
1893
1894 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1895 NULL_TREE);
1896 expr_ptr = &expr;
1897 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1898 offset - pos, exp_type))
1899 {
1900 *res = expr;
1901 return true;
1902 }
1903 }
1904 return false;
1905
1906 case ARRAY_TYPE:
1907 tr_size = TYPE_SIZE (TREE_TYPE (type));
1908 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1909 return false;
1910 el_size = tree_to_uhwi (tr_size);
1911
1912 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1913 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1914 return false;
1915 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1916 if (!integer_zerop (minidx))
1917 index = int_const_binop (PLUS_EXPR, index, minidx);
1918 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1919 NULL_TREE, NULL_TREE);
1920 offset = offset % el_size;
1921 type = TREE_TYPE (type);
1922 break;
1923
1924 default:
1925 if (offset != 0)
1926 return false;
1927
1928 if (exp_type)
1929 return false;
1930 else
1931 return true;
1932 }
1933 }
1934}
1935
1936/* Return true iff TYPE is stdarg va_list type. */
1937
1938static inline bool
1939is_va_list_type (tree type)
1940{
1941 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1942}
1943
1944/* Print message to dump file why a variable was rejected. */
1945
1946static void
1947reject (tree var, const char *msg)
1948{
1949 if (dump_file && (dump_flags & TDF_DETAILS))
1950 {
1951 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1952 print_generic_expr (dump_file, var);
1953 fprintf (dump_file, "\n");
1954 }
1955}
1956
1957/* Return true if VAR is a candidate for SRA. */
1958
1959static bool
1960maybe_add_sra_candidate (tree var)
1961{
1962 tree type = TREE_TYPE (var);
1963 const char *msg;
1964 tree_node **slot;
1965
1966 if (!AGGREGATE_TYPE_P (type))
1967 {
1968 reject (var, "not aggregate");
1969 return false;
1970 }
1971 /* Allow constant-pool entries (that "need to live in memory")
1972 unless we are doing IPA SRA. */
1973 if (needs_to_live_in_memory (var)
1974 && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
1975 {
1976 reject (var, "needs to live in memory");
1977 return false;
1978 }
1979 if (TREE_THIS_VOLATILE (var))
1980 {
1981 reject (var, "is volatile");
1982 return false;
1983 }
1984 if (!COMPLETE_TYPE_P (type))
1985 {
1986 reject (var, "has incomplete type");
1987 return false;
1988 }
1989 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1990 {
1991 reject (var, "type size not fixed");
1992 return false;
1993 }
1994 if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1995 {
1996 reject (var, "type size is zero");
1997 return false;
1998 }
1999 if (type_internals_preclude_sra_p (type, &msg))
2000 {
2001 reject (var, msg);
2002 return false;
2003 }
2004 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
2005 we also want to schedule it rather late. Thus we ignore it in
2006 the early pass. */
2007 (sra_mode == SRA_MODE_EARLY_INTRA
2008 && is_va_list_type (type)))
2009 {
2010 reject (var, "is va_list");
2011 return false;
2012 }
2013
2014 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
2015 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
2016 *slot = var;
2017
2018 if (dump_file && (dump_flags & TDF_DETAILS))
2019 {
2020 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
2021 print_generic_expr (dump_file, var);
2022 fprintf (dump_file, "\n");
2023 }
2024
2025 return true;
2026}
2027
2028/* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
2029 those with type which is suitable for scalarization. */
2030
2031static bool
2032find_var_candidates (void)
2033{
2034 tree var, parm;
2035 unsigned int i;
2036 bool ret = false;
2037
2038 for (parm = DECL_ARGUMENTS (current_function_decl);
2039 parm;
2040 parm = DECL_CHAIN (parm))
2041 ret |= maybe_add_sra_candidate (parm);
2042
2043 FOR_EACH_LOCAL_DECL (cfun, i, var)
2044 {
2045 if (!VAR_P (var))
2046 continue;
2047
2048 ret |= maybe_add_sra_candidate (var);
2049 }
2050
2051 return ret;
2052}
2053
2054/* Sort all accesses for the given variable, check for partial overlaps and
2055 return NULL if there are any. If there are none, pick a representative for
2056 each combination of offset and size and create a linked list out of them.
2057 Return the pointer to the first representative and make sure it is the first
2058 one in the vector of accesses. */
2059
2060static struct access *
2061sort_and_splice_var_accesses (tree var)
2062{
2063 int i, j, access_count;
2064 struct access *res, **prev_acc_ptr = &res;
2065 vec<access_p> *access_vec;
2066 bool first = true;
2067 HOST_WIDE_INT low = -1, high = 0;
2068
2069 access_vec = get_base_access_vector (var);
2070 if (!access_vec)
2071 return NULL;
2072 access_count = access_vec->length ();
2073
2074 /* Sort by <OFFSET, SIZE>. */
2075 access_vec->qsort (compare_access_positions);
2076
2077 i = 0;
2078 while (i < access_count)
2079 {
2080 struct access *access = (*access_vec)[i];
2081 bool grp_write = access->write;
2082 bool grp_read = !access->write;
2083 bool grp_scalar_write = access->write
2084 && is_gimple_reg_type (access->type);
2085 bool grp_scalar_read = !access->write
2086 && is_gimple_reg_type (access->type);
2087 bool grp_assignment_read = access->grp_assignment_read;
2088 bool grp_assignment_write = access->grp_assignment_write;
2089 bool multiple_scalar_reads = false;
2090 bool total_scalarization = access->grp_total_scalarization;
2091 bool grp_partial_lhs = access->grp_partial_lhs;
2092 bool first_scalar = is_gimple_reg_type (access->type);
2093 bool unscalarizable_region = access->grp_unscalarizable_region;
2094 bool bf_non_full_precision
2095 = (INTEGRAL_TYPE_P (access->type)
2096 && TYPE_PRECISION (access->type) != access->size
2097 && TREE_CODE (access->expr) == COMPONENT_REF
2098 && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
2099
2100 if (first || access->offset >= high)
2101 {
2102 first = false;
2103 low = access->offset;
2104 high = access->offset + access->size;
2105 }
2106 else if (access->offset > low && access->offset + access->size > high)
2107 return NULL;
2108 else
2109 gcc_assert (access->offset >= low
2110 && access->offset + access->size <= high);
2111
2112 j = i + 1;
2113 while (j < access_count)
2114 {
2115 struct access *ac2 = (*access_vec)[j];
2116 if (ac2->offset != access->offset || ac2->size != access->size)
2117 break;
2118 if (ac2->write)
2119 {
2120 grp_write = true;
2121 grp_scalar_write = (grp_scalar_write
2122 || is_gimple_reg_type (ac2->type));
2123 }
2124 else
2125 {
2126 grp_read = true;
2127 if (is_gimple_reg_type (ac2->type))
2128 {
2129 if (grp_scalar_read)
2130 multiple_scalar_reads = true;
2131 else
2132 grp_scalar_read = true;
2133 }
2134 }
2135 grp_assignment_read |= ac2->grp_assignment_read;
2136 grp_assignment_write |= ac2->grp_assignment_write;
2137 grp_partial_lhs |= ac2->grp_partial_lhs;
2138 unscalarizable_region |= ac2->grp_unscalarizable_region;
2139 total_scalarization |= ac2->grp_total_scalarization;
2140 relink_to_new_repr (access, ac2);
2141
2142 /* If there are both aggregate-type and scalar-type accesses with
2143 this combination of size and offset, the comparison function
2144 should have put the scalars first. */
2145 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2146 /* It also prefers integral types to non-integral. However, when the
2147 precision of the selected type does not span the entire area and
2148 should also be used for a non-integer (i.e. float), we must not
2149 let that happen. Normally analyze_access_subtree expands the type
2150 to cover the entire area but for bit-fields it doesn't. */
2151 if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2152 {
2153 if (dump_file && (dump_flags & TDF_DETAILS))
2154 {
2155 fprintf (dump_file, "Cannot scalarize the following access "
2156 "because insufficient precision integer type was "
2157 "selected.\n ");
2158 dump_access (dump_file, access, false);
2159 }
2160 unscalarizable_region = true;
2161 }
2162 ac2->group_representative = access;
2163 j++;
2164 }
2165
2166 i = j;
2167
2168 access->group_representative = access;
2169 access->grp_write = grp_write;
2170 access->grp_read = grp_read;
2171 access->grp_scalar_read = grp_scalar_read;
2172 access->grp_scalar_write = grp_scalar_write;
2173 access->grp_assignment_read = grp_assignment_read;
2174 access->grp_assignment_write = grp_assignment_write;
2175 access->grp_hint = total_scalarization
2176 || (multiple_scalar_reads && !constant_decl_p (var));
2177 access->grp_total_scalarization = total_scalarization;
2178 access->grp_partial_lhs = grp_partial_lhs;
2179 access->grp_unscalarizable_region = unscalarizable_region;
2180
2181 *prev_acc_ptr = access;
2182 prev_acc_ptr = &access->next_grp;
2183 }
2184
2185 gcc_assert (res == (*access_vec)[0]);
2186 return res;
2187}
2188
2189/* Create a variable for the given ACCESS which determines the type, name and a
2190 few other properties. Return the variable declaration and store it also to
2191 ACCESS->replacement. */
2192
2193static tree
2194create_access_replacement (struct access *access)
2195{
2196 tree repl;
2197
2198 if (access->grp_to_be_debug_replaced)
2199 {
2200 repl = create_tmp_var_raw (access->type);
2201 DECL_CONTEXT (repl) = current_function_decl;
2202 }
2203 else
2204 /* Drop any special alignment on the type if it's not on the main
2205 variant. This avoids issues with weirdo ABIs like AAPCS. */
2206 repl = create_tmp_var (build_qualified_type
2207 (TYPE_MAIN_VARIANT (access->type),
2208 TYPE_QUALS (access->type)), "SR");
2209 if (TREE_CODE (access->type) == COMPLEX_TYPE
2210 || TREE_CODE (access->type) == VECTOR_TYPE)
2211 {
2212 if (!access->grp_partial_lhs)
2213 DECL_GIMPLE_REG_P (repl) = 1;
2214 }
2215 else if (access->grp_partial_lhs
2216 && is_gimple_reg_type (access->type))
2217 TREE_ADDRESSABLE (repl) = 1;
2218
2219 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2220 DECL_ARTIFICIAL (repl) = 1;
2221 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2222
2223 if (DECL_NAME (access->base)
2224 && !DECL_IGNORED_P (access->base)
2225 && !DECL_ARTIFICIAL (access->base))
2226 {
2227 char *pretty_name = make_fancy_name (access->expr);
2228 tree debug_expr = unshare_expr_without_location (access->expr), d;
2229 bool fail = false;
2230
2231 DECL_NAME (repl) = get_identifier (pretty_name);
2232 DECL_NAMELESS (repl) = 1;
2233 obstack_free (&name_obstack, pretty_name);
2234
2235 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2236 as DECL_DEBUG_EXPR isn't considered when looking for still
2237 used SSA_NAMEs and thus they could be freed. All debug info
2238 generation cares is whether something is constant or variable
2239 and that get_ref_base_and_extent works properly on the
2240 expression. It cannot handle accesses at a non-constant offset
2241 though, so just give up in those cases. */
2242 for (d = debug_expr;
2243 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2244 d = TREE_OPERAND (d, 0))
2245 switch (TREE_CODE (d))
2246 {
2247 case ARRAY_REF:
2248 case ARRAY_RANGE_REF:
2249 if (TREE_OPERAND (d, 1)
2250 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2251 fail = true;
2252 if (TREE_OPERAND (d, 3)
2253 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2254 fail = true;
2255 /* FALLTHRU */
2256 case COMPONENT_REF:
2257 if (TREE_OPERAND (d, 2)
2258 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2259 fail = true;
2260 break;
2261 case MEM_REF:
2262 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2263 fail = true;
2264 else
2265 d = TREE_OPERAND (d, 0);
2266 break;
2267 default:
2268 break;
2269 }
2270 if (!fail)
2271 {
2272 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2273 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2274 }
2275 if (access->grp_no_warning)
2276 TREE_NO_WARNING (repl) = 1;
2277 else
2278 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2279 }
2280 else
2281 TREE_NO_WARNING (repl) = 1;
2282
2283 if (dump_file)
2284 {
2285 if (access->grp_to_be_debug_replaced)
2286 {
2287 fprintf (dump_file, "Created a debug-only replacement for ");
2288 print_generic_expr (dump_file, access->base);
2289 fprintf (dump_file, " offset: %u, size: %u\n",
2290 (unsigned) access->offset, (unsigned) access->size);
2291 }
2292 else
2293 {
2294 fprintf (dump_file, "Created a replacement for ");
2295 print_generic_expr (dump_file, access->base);
2296 fprintf (dump_file, " offset: %u, size: %u: ",
2297 (unsigned) access->offset, (unsigned) access->size);
2298 print_generic_expr (dump_file, repl);
2299 fprintf (dump_file, "\n");
2300 }
2301 }
2302 sra_stats.replacements++;
2303
2304 return repl;
2305}
2306
2307/* Return ACCESS scalar replacement, which must exist. */
2308
2309static inline tree
2310get_access_replacement (struct access *access)
2311{
2312 gcc_checking_assert (access->replacement_decl);
2313 return access->replacement_decl;
2314}
2315
2316
2317/* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2318 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2319 to it is not "within" the root. Return false iff some accesses partially
2320 overlap. */
2321
2322static bool
2323build_access_subtree (struct access **access)
2324{
2325 struct access *root = *access, *last_child = NULL;
2326 HOST_WIDE_INT limit = root->offset + root->size;
2327
2328 *access = (*access)->next_grp;
2329 while (*access && (*access)->offset + (*access)->size <= limit)
2330 {
2331 if (!last_child)
2332 root->first_child = *access;
2333 else
2334 last_child->next_sibling = *access;
2335 last_child = *access;
2336 (*access)->parent = root;
2337 (*access)->grp_write |= root->grp_write;
2338
2339 if (!build_access_subtree (access))
2340 return false;
2341 }
2342
2343 if (*access && (*access)->offset < limit)
2344 return false;
2345
2346 return true;
2347}
2348
2349/* Build a tree of access representatives, ACCESS is the pointer to the first
2350 one, others are linked in a list by the next_grp field. Return false iff
2351 some accesses partially overlap. */
2352
2353static bool
2354build_access_trees (struct access *access)
2355{
2356 while (access)
2357 {
2358 struct access *root = access;
2359
2360 if (!build_access_subtree (&access))
2361 return false;
2362 root->next_grp = access;
2363 }
2364 return true;
2365}
2366
2367/* Return true if expr contains some ARRAY_REFs into a variable bounded
2368 array. */
2369
2370static bool
2371expr_with_var_bounded_array_refs_p (tree expr)
2372{
2373 while (handled_component_p (expr))
2374 {
2375 if (TREE_CODE (expr) == ARRAY_REF
2376 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2377 return true;
2378 expr = TREE_OPERAND (expr, 0);
2379 }
2380 return false;
2381}
2382
2383/* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2384 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2385 sorts of access flags appropriately along the way, notably always set
2386 grp_read and grp_assign_read according to MARK_READ and grp_write when
2387 MARK_WRITE is true.
2388
2389 Creating a replacement for a scalar access is considered beneficial if its
2390 grp_hint is set (this means we are either attempting total scalarization or
2391 there is more than one direct read access) or according to the following
2392 table:
2393
2394 Access written to through a scalar type (once or more times)
2395 |
2396 | Written to in an assignment statement
2397 | |
2398 | | Access read as scalar _once_
2399 | | |
2400 | | | Read in an assignment statement
2401 | | | |
2402 | | | | Scalarize Comment
2403-----------------------------------------------------------------------------
2404 0 0 0 0 No access for the scalar
2405 0 0 0 1 No access for the scalar
2406 0 0 1 0 No Single read - won't help
2407 0 0 1 1 No The same case
2408 0 1 0 0 No access for the scalar
2409 0 1 0 1 No access for the scalar
2410 0 1 1 0 Yes s = *g; return s.i;
2411 0 1 1 1 Yes The same case as above
2412 1 0 0 0 No Won't help
2413 1 0 0 1 Yes s.i = 1; *g = s;
2414 1 0 1 0 Yes s.i = 5; g = s.i;
2415 1 0 1 1 Yes The same case as above
2416 1 1 0 0 No Won't help.
2417 1 1 0 1 Yes s.i = 1; *g = s;
2418 1 1 1 0 Yes s = *g; return s.i;
2419 1 1 1 1 Yes Any of the above yeses */
2420
2421static bool
2422analyze_access_subtree (struct access *root, struct access *parent,
2423 bool allow_replacements)
2424{
2425 struct access *child;
2426 HOST_WIDE_INT limit = root->offset + root->size;
2427 HOST_WIDE_INT covered_to = root->offset;
2428 bool scalar = is_gimple_reg_type (root->type);
2429 bool hole = false, sth_created = false;
2430
2431 if (parent)
2432 {
2433 if (parent->grp_read)
2434 root->grp_read = 1;
2435 if (parent->grp_assignment_read)
2436 root->grp_assignment_read = 1;
2437 if (parent->grp_write)
2438 root->grp_write = 1;
2439 if (parent->grp_assignment_write)
2440 root->grp_assignment_write = 1;
2441 if (parent->grp_total_scalarization)
2442 root->grp_total_scalarization = 1;
2443 }
2444
2445 if (root->grp_unscalarizable_region)
2446 allow_replacements = false;
2447
2448 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2449 allow_replacements = false;
2450
2451 for (child = root->first_child; child; child = child->next_sibling)
2452 {
2453 hole |= covered_to < child->offset;
2454 sth_created |= analyze_access_subtree (child, root,
2455 allow_replacements && !scalar);
2456
2457 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2458 root->grp_total_scalarization &= child->grp_total_scalarization;
2459 if (child->grp_covered)
2460 covered_to += child->size;
2461 else
2462 hole = true;
2463 }
2464
2465 if (allow_replacements && scalar && !root->first_child
2466 && (root->grp_hint
2467 || ((root->grp_scalar_read || root->grp_assignment_read)
2468 && (root->grp_scalar_write || root->grp_assignment_write))))
2469 {
2470 /* Always create access replacements that cover the whole access.
2471 For integral types this means the precision has to match.
2472 Avoid assumptions based on the integral type kind, too. */
2473 if (INTEGRAL_TYPE_P (root->type)
2474 && (TREE_CODE (root->type) != INTEGER_TYPE
2475 || TYPE_PRECISION (root->type) != root->size)
2476 /* But leave bitfield accesses alone. */
2477 && (TREE_CODE (root->expr) != COMPONENT_REF
2478 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2479 {
2480 tree rt = root->type;
2481 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2482 && (root->size % BITS_PER_UNIT) == 0);
2483 root->type = build_nonstandard_integer_type (root->size,
2484 TYPE_UNSIGNED (rt));
2485 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2486 root->offset, root->reverse,
2487 root->type, NULL, false);
2488
2489 if (dump_file && (dump_flags & TDF_DETAILS))
2490 {
2491 fprintf (dump_file, "Changing the type of a replacement for ");
2492 print_generic_expr (dump_file, root->base);
2493 fprintf (dump_file, " offset: %u, size: %u ",
2494 (unsigned) root->offset, (unsigned) root->size);
2495 fprintf (dump_file, " to an integer.\n");
2496 }
2497 }
2498
2499 root->grp_to_be_replaced = 1;
2500 root->replacement_decl = create_access_replacement (root);
2501 sth_created = true;
2502 hole = false;
2503 }
2504 else
2505 {
2506 if (allow_replacements
2507 && scalar && !root->first_child
2508 && (root->grp_scalar_write || root->grp_assignment_write)
2509 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2510 DECL_UID (root->base)))
2511 {
2512 gcc_checking_assert (!root->grp_scalar_read
2513 && !root->grp_assignment_read);
2514 sth_created = true;
2515 if (MAY_HAVE_DEBUG_BIND_STMTS)
2516 {
2517 root->grp_to_be_debug_replaced = 1;
2518 root->replacement_decl = create_access_replacement (root);
2519 }
2520 }
2521
2522 if (covered_to < limit)
2523 hole = true;
2524 if (scalar || !allow_replacements)
2525 root->grp_total_scalarization = 0;
2526 }
2527
2528 if (!hole || root->grp_total_scalarization)
2529 root->grp_covered = 1;
2530 else if (root->grp_write || comes_initialized_p (root->base))
2531 root->grp_unscalarized_data = 1; /* not covered and written to */
2532 return sth_created;
2533}
2534
2535/* Analyze all access trees linked by next_grp by the means of
2536 analyze_access_subtree. */
2537static bool
2538analyze_access_trees (struct access *access)
2539{
2540 bool ret = false;
2541
2542 while (access)
2543 {
2544 if (analyze_access_subtree (access, NULL, true))
2545 ret = true;
2546 access = access->next_grp;
2547 }
2548
2549 return ret;
2550}
2551
2552/* Return true iff a potential new child of LACC at offset OFFSET and with size
2553 SIZE would conflict with an already existing one. If exactly such a child
2554 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2555
2556static bool
2557child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2558 HOST_WIDE_INT size, struct access **exact_match)
2559{
2560 struct access *child;
2561
2562 for (child = lacc->first_child; child; child = child->next_sibling)
2563 {
2564 if (child->offset == norm_offset && child->size == size)
2565 {
2566 *exact_match = child;
2567 return true;
2568 }
2569
2570 if (child->offset < norm_offset + size
2571 && child->offset + child->size > norm_offset)
2572 return true;
2573 }
2574
2575 return false;
2576}
2577
2578/* Create a new child access of PARENT, with all properties just like MODEL
2579 except for its offset and with its grp_write false and grp_read true.
2580 Return the new access or NULL if it cannot be created. Note that this
2581 access is created long after all splicing and sorting, it's not located in
2582 any access vector and is automatically a representative of its group. Set
2583 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2584
2585static struct access *
2586create_artificial_child_access (struct access *parent, struct access *model,
2587 HOST_WIDE_INT new_offset,
2588 bool set_grp_write)
2589{
2590 struct access **child;
2591 tree expr = parent->base;
2592
2593 gcc_assert (!model->grp_unscalarizable_region);
2594
2595 struct access *access = access_pool.allocate ();
2596 memset (access, 0, sizeof (struct access));
2597 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2598 model->type))
2599 {
2600 access->grp_no_warning = true;
2601 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2602 new_offset, model, NULL, false);
2603 }
2604
2605 access->base = parent->base;
2606 access->expr = expr;
2607 access->offset = new_offset;
2608 access->size = model->size;
2609 access->type = model->type;
2610 access->grp_write = set_grp_write;
2611 access->grp_read = false;
2612 access->reverse = model->reverse;
2613
2614 child = &parent->first_child;
2615 while (*child && (*child)->offset < new_offset)
2616 child = &(*child)->next_sibling;
2617
2618 access->next_sibling = *child;
2619 *child = access;
2620
2621 return access;
2622}
2623
2624
2625/* Beginning with ACCESS, traverse its whole access subtree and mark all
2626 sub-trees as written to. If any of them has not been marked so previously
2627 and has assignment links leading from it, re-enqueue it. */
2628
2629static void
2630subtree_mark_written_and_enqueue (struct access *access)
2631{
2632 if (access->grp_write)
2633 return;
2634 access->grp_write = true;
2635 add_access_to_work_queue (access);
2636
2637 struct access *child;
2638 for (child = access->first_child; child; child = child->next_sibling)
2639 subtree_mark_written_and_enqueue (child);
2640}
2641
2642/* Propagate subaccesses and grp_write flags of RACC across an assignment link
2643 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2644 propagated transitively. Return true if anything changed. Additionally, if
2645 RACC is a scalar access but LACC is not, change the type of the latter, if
2646 possible. */
2647
2648static bool
2649propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2650{
2651 struct access *rchild;
2652 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2653 bool ret = false;
2654
2655 /* IF the LHS is still not marked as being written to, we only need to do so
2656 if the RHS at this level actually was. */
2657 if (!lacc->grp_write)
2658 {
2659 gcc_checking_assert (!comes_initialized_p (racc->base));
2660 if (racc->grp_write)
2661 {
2662 subtree_mark_written_and_enqueue (lacc);
2663 ret = true;
2664 }
2665 }
2666
2667 if (is_gimple_reg_type (lacc->type)
2668 || lacc->grp_unscalarizable_region
2669 || racc->grp_unscalarizable_region)
2670 {
2671 if (!lacc->grp_write)
2672 {
2673 ret = true;
2674 subtree_mark_written_and_enqueue (lacc);
2675 }
2676 return ret;
2677 }
2678
2679 if (is_gimple_reg_type (racc->type))
2680 {
2681 if (!lacc->grp_write)
2682 {
2683 ret = true;
2684 subtree_mark_written_and_enqueue (lacc);
2685 }
2686 if (!lacc->first_child && !racc->first_child)
2687 {
2688 tree t = lacc->base;
2689
2690 lacc->type = racc->type;
2691 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2692 lacc->offset, racc->type))
2693 lacc->expr = t;
2694 else
2695 {
2696 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2697 lacc->base, lacc->offset,
2698 racc, NULL, false);
2699 lacc->grp_no_warning = true;
2700 }
2701 }
2702 return ret;
2703 }
2704
2705 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2706 {
2707 struct access *new_acc = NULL;
2708 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2709
2710 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2711 &new_acc))
2712 {
2713 if (new_acc)
2714 {
2715 if (!new_acc->grp_write && rchild->grp_write)
2716 {
2717 gcc_assert (!lacc->grp_write);
2718 subtree_mark_written_and_enqueue (new_acc);
2719 ret = true;
2720 }
2721
2722 rchild->grp_hint = 1;
2723 new_acc->grp_hint |= new_acc->grp_read;
2724 if (rchild->first_child)
2725 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2726 }
2727 else
2728 {
2729 if (!lacc->grp_write)
2730 {
2731 ret = true;
2732 subtree_mark_written_and_enqueue (lacc);
2733 }
2734 }
2735 continue;
2736 }
2737
2738 if (rchild->grp_unscalarizable_region)
2739 {
2740 if (rchild->grp_write && !lacc->grp_write)
2741 {
2742 ret = true;
2743 subtree_mark_written_and_enqueue (lacc);
2744 }
2745 continue;
2746 }
2747
2748 rchild->grp_hint = 1;
2749 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2750 lacc->grp_write
2751 || rchild->grp_write);
2752 gcc_checking_assert (new_acc);
2753 if (racc->first_child)
2754 propagate_subaccesses_across_link (new_acc, rchild);
2755
2756 add_access_to_work_queue (lacc);
2757 ret = true;
2758 }
2759
2760 return ret;
2761}
2762
2763/* Propagate all subaccesses across assignment links. */
2764
2765static void
2766propagate_all_subaccesses (void)
2767{
2768 while (work_queue_head)
2769 {
2770 struct access *racc = pop_access_from_work_queue ();
2771 struct assign_link *link;
2772
2773 if (racc->group_representative)
2774 racc= racc->group_representative;
2775 gcc_assert (racc->first_link);
2776
2777 for (link = racc->first_link; link; link = link->next)
2778 {
2779 struct access *lacc = link->lacc;
2780
2781 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2782 continue;
2783 lacc = lacc->group_representative;
2784
2785 bool reque_parents = false;
2786 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2787 {
2788 if (!lacc->grp_write)
2789 {
2790 subtree_mark_written_and_enqueue (lacc);
2791 reque_parents = true;
2792 }
2793 }
2794 else if (propagate_subaccesses_across_link (lacc, racc))
2795 reque_parents = true;
2796
2797 if (reque_parents)
2798 do
2799 {
2800 add_access_to_work_queue (lacc);
2801 lacc = lacc->parent;
2802 }
2803 while (lacc);
2804 }
2805 }
2806}
2807
2808/* Go through all accesses collected throughout the (intraprocedural) analysis
2809 stage, exclude overlapping ones, identify representatives and build trees
2810 out of them, making decisions about scalarization on the way. Return true
2811 iff there are any to-be-scalarized variables after this stage. */
2812
2813static bool
2814analyze_all_variable_accesses (void)
2815{
2816 int res = 0;
2817 bitmap tmp = BITMAP_ALLOC (NULL);
2818 bitmap_iterator bi;
2819 unsigned i;
2820 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2821
2822 enum compiler_param param = optimize_speed_p
2823 ? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2824 : PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2825
2826 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2827 fall back to a target default. */
2828 unsigned HOST_WIDE_INT max_scalarization_size
2829 = global_options_set.x_param_values[param]
2830 ? PARAM_VALUE (param)
2831 : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2832
2833 max_scalarization_size *= BITS_PER_UNIT;
2834
2835 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2836 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2837 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2838 {
2839 tree var = candidate (i);
2840
2841 if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var),
2842 constant_decl_p (var)))
2843 {
2844 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2845 <= max_scalarization_size)
2846 {
2847 create_total_scalarization_access (var);
2848 completely_scalarize (var, TREE_TYPE (var), 0, var);
2849 statistics_counter_event (cfun,
2850 "Totally-scalarized aggregates", 1);
2851 if (dump_file && (dump_flags & TDF_DETAILS))
2852 {
2853 fprintf (dump_file, "Will attempt to totally scalarize ");
2854 print_generic_expr (dump_file, var);
2855 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2856 }
2857 }
2858 else if (dump_file && (dump_flags & TDF_DETAILS))
2859 {
2860 fprintf (dump_file, "Too big to totally scalarize: ");
2861 print_generic_expr (dump_file, var);
2862 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2863 }
2864 }
2865 }
2866
2867 bitmap_copy (tmp, candidate_bitmap);
2868 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2869 {
2870 tree var = candidate (i);
2871 struct access *access;
2872
2873 access = sort_and_splice_var_accesses (var);
2874 if (!access || !build_access_trees (access))
2875 disqualify_candidate (var,
2876 "No or inhibitingly overlapping accesses.");
2877 }
2878
2879 propagate_all_subaccesses ();
2880
2881 bitmap_copy (tmp, candidate_bitmap);
2882 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2883 {
2884 tree var = candidate (i);
2885 struct access *access = get_first_repr_for_decl (var);
2886
2887 if (analyze_access_trees (access))
2888 {
2889 res++;
2890 if (dump_file && (dump_flags & TDF_DETAILS))
2891 {
2892 fprintf (dump_file, "\nAccess trees for ");
2893 print_generic_expr (dump_file, var);
2894 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2895 dump_access_tree (dump_file, access);
2896 fprintf (dump_file, "\n");
2897 }
2898 }
2899 else
2900 disqualify_candidate (var, "No scalar replacements to be created.");
2901 }
2902
2903 BITMAP_FREE (tmp);
2904
2905 if (res)
2906 {
2907 statistics_counter_event (cfun, "Scalarized aggregates", res);
2908 return true;
2909 }
2910 else
2911 return false;
2912}
2913
2914/* Generate statements copying scalar replacements of accesses within a subtree
2915 into or out of AGG. ACCESS, all its children, siblings and their children
2916 are to be processed. AGG is an aggregate type expression (can be a
2917 declaration but does not have to be, it can for example also be a mem_ref or
2918 a series of handled components). TOP_OFFSET is the offset of the processed
2919 subtree which has to be subtracted from offsets of individual accesses to
2920 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2921 replacements in the interval <start_offset, start_offset + chunk_size>,
2922 otherwise copy all. GSI is a statement iterator used to place the new
2923 statements. WRITE should be true when the statements should write from AGG
2924 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2925 statements will be added after the current statement in GSI, they will be
2926 added before the statement otherwise. */
2927
2928static void
2929generate_subtree_copies (struct access *access, tree agg,
2930 HOST_WIDE_INT top_offset,
2931 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2932 gimple_stmt_iterator *gsi, bool write,
2933 bool insert_after, location_t loc)
2934{
2935 /* Never write anything into constant pool decls. See PR70602. */
2936 if (!write && constant_decl_p (agg))
2937 return;
2938 do
2939 {
2940 if (chunk_size && access->offset >= start_offset + chunk_size)
2941 return;
2942
2943 if (access->grp_to_be_replaced
2944 && (chunk_size == 0
2945 || access->offset + access->size > start_offset))
2946 {
2947 tree expr, repl = get_access_replacement (access);
2948 gassign *stmt;
2949
2950 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2951 access, gsi, insert_after);
2952
2953 if (write)
2954 {
2955 if (access->grp_partial_lhs)
2956 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2957 !insert_after,
2958 insert_after ? GSI_NEW_STMT
2959 : GSI_SAME_STMT);
2960 stmt = gimple_build_assign (repl, expr);
2961 }
2962 else
2963 {
2964 TREE_NO_WARNING (repl) = 1;
2965 if (access->grp_partial_lhs)
2966 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2967 !insert_after,
2968 insert_after ? GSI_NEW_STMT
2969 : GSI_SAME_STMT);
2970 stmt = gimple_build_assign (expr, repl);
2971 }
2972 gimple_set_location (stmt, loc);
2973
2974 if (insert_after)
2975 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2976 else
2977 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2978 update_stmt (stmt);
2979 sra_stats.subtree_copies++;
2980 }
2981 else if (write
2982 && access->grp_to_be_debug_replaced
2983 && (chunk_size == 0
2984 || access->offset + access->size > start_offset))
2985 {
2986 gdebug *ds;
2987 tree drhs = build_debug_ref_for_model (loc, agg,
2988 access->offset - top_offset,
2989 access);
2990 ds = gimple_build_debug_bind (get_access_replacement (access),
2991 drhs, gsi_stmt (*gsi));
2992 if (insert_after)
2993 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2994 else
2995 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2996 }
2997
2998 if (access->first_child)
2999 generate_subtree_copies (access->first_child, agg, top_offset,
3000 start_offset, chunk_size, gsi,
3001 write, insert_after, loc);
3002
3003 access = access->next_sibling;
3004 }
3005 while (access);
3006}
3007
3008/* Assign zero to all scalar replacements in an access subtree. ACCESS is the
3009 root of the subtree to be processed. GSI is the statement iterator used
3010 for inserting statements which are added after the current statement if
3011 INSERT_AFTER is true or before it otherwise. */
3012
3013static void
3014init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
3015 bool insert_after, location_t loc)
3016
3017{
3018 struct access *child;
3019
3020 if (access->grp_to_be_replaced)
3021 {
3022 gassign *stmt;
3023
3024 stmt = gimple_build_assign (get_access_replacement (access),
3025 build_zero_cst (access->type));
3026 if (insert_after)
3027 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3028 else
3029 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3030 update_stmt (stmt);
3031 gimple_set_location (stmt, loc);
3032 }
3033 else if (access->grp_to_be_debug_replaced)
3034 {
3035 gdebug *ds
3036 = gimple_build_debug_bind (get_access_replacement (access),
3037 build_zero_cst (access->type),
3038 gsi_stmt (*gsi));
3039 if (insert_after)
3040 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3041 else
3042 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3043 }
3044
3045 for (child = access->first_child; child; child = child->next_sibling)
3046 init_subtree_with_zero (child, gsi, insert_after, loc);
3047}
3048
3049/* Clobber all scalar replacements in an access subtree. ACCESS is the
3050 root of the subtree to be processed. GSI is the statement iterator used
3051 for inserting statements which are added after the current statement if
3052 INSERT_AFTER is true or before it otherwise. */
3053
3054static void
3055clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
3056 bool insert_after, location_t loc)
3057
3058{
3059 struct access *child;
3060
3061 if (access->grp_to_be_replaced)
3062 {
3063 tree rep = get_access_replacement (access);
3064 tree clobber = build_constructor (access->type, NULL);
3065 TREE_THIS_VOLATILE (clobber) = 1;
3066 gimple *stmt = gimple_build_assign (rep, clobber);
3067
3068 if (insert_after)
3069 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3070 else
3071 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3072 update_stmt (stmt);
3073 gimple_set_location (stmt, loc);
3074 }
3075
3076 for (child = access->first_child; child; child = child->next_sibling)
3077 clobber_subtree (child, gsi, insert_after, loc);
3078}
3079
3080/* Search for an access representative for the given expression EXPR and
3081 return it or NULL if it cannot be found. */
3082
3083static struct access *
3084get_access_for_expr (tree expr)
3085{
3086 HOST_WIDE_INT offset, size, max_size;
3087 tree base;
3088 bool reverse;
3089
3090 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3091 a different size than the size of its argument and we need the latter
3092 one. */
3093 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3094 expr = TREE_OPERAND (expr, 0);
3095
3096 base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
3097 if (max_size == -1 || !DECL_P (base))
3098 return NULL;
3099
3100 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3101 return NULL;
3102
3103 return get_var_base_offset_size_access (base, offset, max_size);
3104}
3105
3106/* Replace the expression EXPR with a scalar replacement if there is one and
3107 generate other statements to do type conversion or subtree copying if
3108 necessary. GSI is used to place newly created statements, WRITE is true if
3109 the expression is being written to (it is on a LHS of a statement or output
3110 in an assembly statement). */
3111
3112static bool
3113sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
3114{
3115 location_t loc;
3116 struct access *access;
3117 tree type, bfr, orig_expr;
3118
3119 if (TREE_CODE (*expr) == BIT_FIELD_REF)
3120 {
3121 bfr = *expr;
3122 expr = &TREE_OPERAND (*expr, 0);
3123 }
3124 else
3125 bfr = NULL_TREE;
3126
3127 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3128 expr = &TREE_OPERAND (*expr, 0);
3129 access = get_access_for_expr (*expr);
3130 if (!access)
3131 return false;
3132 type = TREE_TYPE (*expr);
3133 orig_expr = *expr;
3134
3135 loc = gimple_location (gsi_stmt (*gsi));
3136 gimple_stmt_iterator alt_gsi = gsi_none ();
3137 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3138 {
3139 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3140 gsi = &alt_gsi;
3141 }
3142
3143 if (access->grp_to_be_replaced)
3144 {
3145 tree repl = get_access_replacement (access);
3146 /* If we replace a non-register typed access simply use the original
3147 access expression to extract the scalar component afterwards.
3148 This happens if scalarizing a function return value or parameter
3149 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3150 gcc.c-torture/compile/20011217-1.c.
3151
3152 We also want to use this when accessing a complex or vector which can
3153 be accessed as a different type too, potentially creating a need for
3154 type conversion (see PR42196) and when scalarized unions are involved
3155 in assembler statements (see PR42398). */
3156 if (!useless_type_conversion_p (type, access->type))
3157 {
3158 tree ref;
3159
3160 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3161
3162 if (write)
3163 {
3164 gassign *stmt;
3165
3166 if (access->grp_partial_lhs)
3167 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3168 false, GSI_NEW_STMT);
3169 stmt = gimple_build_assign (repl, ref);
3170 gimple_set_location (stmt, loc);
3171 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3172 }
3173 else
3174 {
3175 gassign *stmt;
3176
3177 if (access->grp_partial_lhs)
3178 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3179 true, GSI_SAME_STMT);
3180 stmt = gimple_build_assign (ref, repl);
3181 gimple_set_location (stmt, loc);
3182 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3183 }
3184 }
3185 else
3186 *expr = repl;
3187 sra_stats.exprs++;
3188 }
3189 else if (write && access->grp_to_be_debug_replaced)
3190 {
3191 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3192 NULL_TREE,
3193 gsi_stmt (*gsi));
3194 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3195 }
3196
3197 if (access->first_child)
3198 {
3199 HOST_WIDE_INT start_offset, chunk_size;
3200 if (bfr
3201 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3202 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3203 {
3204 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3205 start_offset = access->offset
3206 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3207 }
3208 else
3209 start_offset = chunk_size = 0;
3210
3211 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3212 start_offset, chunk_size, gsi, write, write,
3213 loc);
3214 }
3215 return true;
3216}
3217
3218/* Where scalar replacements of the RHS have been written to when a replacement
3219 of a LHS of an assigments cannot be direclty loaded from a replacement of
3220 the RHS. */
3221enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3222 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3223 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3224
3225struct subreplacement_assignment_data
3226{
3227 /* Offset of the access representing the lhs of the assignment. */
3228 HOST_WIDE_INT left_offset;
3229
3230 /* LHS and RHS of the original assignment. */
3231 tree assignment_lhs, assignment_rhs;
3232
3233 /* Access representing the rhs of the whole assignment. */
3234 struct access *top_racc;
3235
3236 /* Stmt iterator used for statement insertions after the original assignment.
3237 It points to the main GSI used to traverse a BB during function body
3238 modification. */
3239 gimple_stmt_iterator *new_gsi;
3240
3241 /* Stmt iterator used for statement insertions before the original
3242 assignment. Keeps on pointing to the original statement. */
3243 gimple_stmt_iterator old_gsi;
3244
3245 /* Location of the assignment. */
3246 location_t loc;
3247
3248 /* Keeps the information whether we have needed to refresh replacements of
3249 the LHS and from which side of the assignments this takes place. */
3250 enum unscalarized_data_handling refreshed;
3251};
3252
3253/* Store all replacements in the access tree rooted in TOP_RACC either to their
3254 base aggregate if there are unscalarized data or directly to LHS of the
3255 statement that is pointed to by GSI otherwise. */
3256
3257static void
3258handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3259{
3260 tree src;
3261 if (sad->top_racc->grp_unscalarized_data)
3262 {
3263 src = sad->assignment_rhs;
3264 sad->refreshed = SRA_UDH_RIGHT;
3265 }
3266 else
3267 {
3268 src = sad->assignment_lhs;
3269 sad->refreshed = SRA_UDH_LEFT;
3270 }
3271 generate_subtree_copies (sad->top_racc->first_child, src,
3272 sad->top_racc->offset, 0, 0,
3273 &sad->old_gsi, false, false, sad->loc);
3274}
3275
3276/* Try to generate statements to load all sub-replacements in an access subtree
3277 formed by children of LACC from scalar replacements in the SAD->top_racc
3278 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3279 and load the accesses from it. */
3280
3281static void
3282load_assign_lhs_subreplacements (struct access *lacc,
3283 struct subreplacement_assignment_data *sad)
3284{
3285 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3286 {
3287 HOST_WIDE_INT offset;
3288 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3289
3290 if (lacc->grp_to_be_replaced)
3291 {
3292 struct access *racc;
3293 gassign *stmt;
3294 tree rhs;
3295
3296 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3297 if (racc && racc->grp_to_be_replaced)
3298 {
3299 rhs = get_access_replacement (racc);
3300 if (!useless_type_conversion_p (lacc->type, racc->type))
3301 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3302 lacc->type, rhs);
3303
3304 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3305 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3306 NULL_TREE, true, GSI_SAME_STMT);
3307 }
3308 else
3309 {
3310 /* No suitable access on the right hand side, need to load from
3311 the aggregate. See if we have to update it first... */
3312 if (sad->refreshed == SRA_UDH_NONE)
3313 handle_unscalarized_data_in_subtree (sad);
3314
3315 if (sad->refreshed == SRA_UDH_LEFT)
3316 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3317 lacc->offset - sad->left_offset,
3318 lacc, sad->new_gsi, true);
3319 else
3320 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3321 lacc->offset - sad->left_offset,
3322 lacc, sad->new_gsi, true);
3323 if (lacc->grp_partial_lhs)
3324 rhs = force_gimple_operand_gsi (sad->new_gsi,
3325 rhs, true, NULL_TREE,
3326 false, GSI_NEW_STMT);
3327 }
3328
3329 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3330 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3331 gimple_set_location (stmt, sad->loc);
3332 update_stmt (stmt);
3333 sra_stats.subreplacements++;
3334 }
3335 else
3336 {
3337 if (sad->refreshed == SRA_UDH_NONE
3338 && lacc->grp_read && !lacc->grp_covered)
3339 handle_unscalarized_data_in_subtree (sad);
3340
3341 if (lacc && lacc->grp_to_be_debug_replaced)
3342 {
3343 gdebug *ds;
3344 tree drhs;
3345 struct access *racc = find_access_in_subtree (sad->top_racc,
3346 offset,
3347 lacc->size);
3348
3349 if (racc && racc->grp_to_be_replaced)
3350 {
3351 if (racc->grp_write || constant_decl_p (racc->base))
3352 drhs = get_access_replacement (racc);
3353 else
3354 drhs = NULL;
3355 }
3356 else if (sad->refreshed == SRA_UDH_LEFT)
3357 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3358 lacc->offset, lacc);
3359 else if (sad->refreshed == SRA_UDH_RIGHT)
3360 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3361 offset, lacc);
3362 else
3363 drhs = NULL_TREE;
3364 if (drhs
3365 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3366 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3367 lacc->type, drhs);
3368 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3369 drhs, gsi_stmt (sad->old_gsi));
3370 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3371 }
3372 }
3373
3374 if (lacc->first_child)
3375 load_assign_lhs_subreplacements (lacc, sad);
3376 }
3377}
3378
3379/* Result code for SRA assignment modification. */
3380enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
3381 SRA_AM_MODIFIED, /* stmt changed but not
3382 removed */
3383 SRA_AM_REMOVED }; /* stmt eliminated */
3384
3385/* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3386 to the assignment and GSI is the statement iterator pointing at it. Returns
3387 the same values as sra_modify_assign. */
3388
3389static enum assignment_mod_result
3390sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3391{
3392 tree lhs = gimple_assign_lhs (stmt);
3393 struct access *acc = get_access_for_expr (lhs);
3394 if (!acc)
3395 return SRA_AM_NONE;
3396 location_t loc = gimple_location (stmt);
3397
3398 if (gimple_clobber_p (stmt))
3399 {
3400 /* Clobber the replacement variable. */
3401 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3402 /* Remove clobbers of fully scalarized variables, they are dead. */
3403 if (acc->grp_covered)
3404 {
3405 unlink_stmt_vdef (stmt);
3406 gsi_remove (gsi, true);
3407 release_defs (stmt);
3408 return SRA_AM_REMOVED;
3409 }
3410 else
3411 return SRA_AM_MODIFIED;
3412 }
3413
3414 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
3415 {
3416 /* I have never seen this code path trigger but if it can happen the
3417 following should handle it gracefully. */
3418 if (access_has_children_p (acc))
3419 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3420 true, true, loc);
3421 return SRA_AM_MODIFIED;
3422 }
3423
3424 if (acc->grp_covered)
3425 {
3426 init_subtree_with_zero (acc, gsi, false, loc);
3427 unlink_stmt_vdef (stmt);
3428 gsi_remove (gsi, true);
3429 release_defs (stmt);
3430 return SRA_AM_REMOVED;
3431 }
3432 else
3433 {
3434 init_subtree_with_zero (acc, gsi, true, loc);
3435 return SRA_AM_MODIFIED;
3436 }
3437}
3438
3439/* Create and return a new suitable default definition SSA_NAME for RACC which
3440 is an access describing an uninitialized part of an aggregate that is being
3441 loaded. */
3442
3443static tree
3444get_repl_default_def_ssa_name (struct access *racc)
3445{
3446 gcc_checking_assert (!racc->grp_to_be_replaced
3447 && !racc->grp_to_be_debug_replaced);
3448 if (!racc->replacement_decl)
3449 racc->replacement_decl = create_access_replacement (racc);
3450 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3451}
3452
3453/* Examine both sides of the assignment statement pointed to by STMT, replace
3454 them with a scalare replacement if there is one and generate copying of
3455 replacements if scalarized aggregates have been used in the assignment. GSI
3456 is used to hold generated statements for type conversions and subtree
3457 copying. */
3458
3459static enum assignment_mod_result
3460sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3461{
3462 struct access *lacc, *racc;
3463 tree lhs, rhs;
3464 bool modify_this_stmt = false;
3465 bool force_gimple_rhs = false;
3466 location_t loc;
3467 gimple_stmt_iterator orig_gsi = *gsi;
3468
3469 if (!gimple_assign_single_p (stmt))
3470 return SRA_AM_NONE;
3471 lhs = gimple_assign_lhs (stmt);
3472 rhs = gimple_assign_rhs1 (stmt);
3473
3474 if (TREE_CODE (rhs) == CONSTRUCTOR)
3475 return sra_modify_constructor_assign (stmt, gsi);
3476
3477 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3478 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3479 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3480 {
3481 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3482 gsi, false);
3483 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3484 gsi, true);
3485 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3486 }
3487
3488 lacc = get_access_for_expr (lhs);
3489 racc = get_access_for_expr (rhs);
3490 if (!lacc && !racc)
3491 return SRA_AM_NONE;
3492 /* Avoid modifying initializations of constant-pool replacements. */
3493 if (racc && (racc->replacement_decl == lhs))
3494 return SRA_AM_NONE;
3495
3496 loc = gimple_location (stmt);
3497 if (lacc && lacc->grp_to_be_replaced)
3498 {
3499 lhs = get_access_replacement (lacc);
3500 gimple_assign_set_lhs (stmt, lhs);
3501 modify_this_stmt = true;
3502 if (lacc->grp_partial_lhs)
3503 force_gimple_rhs = true;
3504 sra_stats.exprs++;
3505 }
3506
3507 if (racc && racc->grp_to_be_replaced)
3508 {
3509 rhs = get_access_replacement (racc);
3510 modify_this_stmt = true;
3511 if (racc->grp_partial_lhs)
3512 force_gimple_rhs = true;
3513 sra_stats.exprs++;
3514 }
3515 else if (racc
3516 && !racc->grp_unscalarized_data
3517 && !racc->grp_unscalarizable_region
3518 && TREE_CODE (lhs) == SSA_NAME
3519 && !access_has_replacements_p (racc))
3520 {
3521 rhs = get_repl_default_def_ssa_name (racc);
3522 modify_this_stmt = true;
3523 sra_stats.exprs++;
3524 }
3525
3526 if (modify_this_stmt)
3527 {
3528 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3529 {
3530 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3531 ??? This should move to fold_stmt which we simply should
3532 call after building a VIEW_CONVERT_EXPR here. */
3533 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3534 && !contains_bitfld_component_ref_p (lhs))
3535 {
3536 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3537 gimple_assign_set_lhs (stmt, lhs);
3538 }
3539 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3540 && !contains_vce_or_bfcref_p (rhs))
3541 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3542
3543 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3544 {
3545 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3546 rhs);
3547 if (is_gimple_reg_type (TREE_TYPE (lhs))
3548 && TREE_CODE (lhs) != SSA_NAME)
3549 force_gimple_rhs = true;
3550 }
3551 }
3552 }
3553
3554 if (lacc && lacc->grp_to_be_debug_replaced)
3555 {
3556 tree dlhs = get_access_replacement (lacc);
3557 tree drhs = unshare_expr (rhs);
3558 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3559 {
3560 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3561 && !contains_vce_or_bfcref_p (drhs))
3562 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3563 if (drhs
3564 && !useless_type_conversion_p (TREE_TYPE (dlhs),
3565 TREE_TYPE (drhs)))
3566 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3567 TREE_TYPE (dlhs), drhs);
3568 }
3569 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3570 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3571 }
3572
3573 /* From this point on, the function deals with assignments in between
3574 aggregates when at least one has scalar reductions of some of its
3575 components. There are three possible scenarios: Both the LHS and RHS have
3576 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3577
3578 In the first case, we would like to load the LHS components from RHS
3579 components whenever possible. If that is not possible, we would like to
3580 read it directly from the RHS (after updating it by storing in it its own
3581 components). If there are some necessary unscalarized data in the LHS,
3582 those will be loaded by the original assignment too. If neither of these
3583 cases happen, the original statement can be removed. Most of this is done
3584 by load_assign_lhs_subreplacements.
3585
3586 In the second case, we would like to store all RHS scalarized components
3587 directly into LHS and if they cover the aggregate completely, remove the
3588 statement too. In the third case, we want the LHS components to be loaded
3589 directly from the RHS (DSE will remove the original statement if it
3590 becomes redundant).
3591
3592 This is a bit complex but manageable when types match and when unions do
3593 not cause confusion in a way that we cannot really load a component of LHS
3594 from the RHS or vice versa (the access representing this level can have
3595 subaccesses that are accessible only through a different union field at a
3596 higher level - different from the one used in the examined expression).
3597 Unions are fun.
3598
3599 Therefore, I specially handle a fourth case, happening when there is a
3600 specific type cast or it is impossible to locate a scalarized subaccess on
3601 the other side of the expression. If that happens, I simply "refresh" the
3602 RHS by storing in it is scalarized components leave the original statement
3603 there to do the copying and then load the scalar replacements of the LHS.
3604 This is what the first branch does. */
3605
3606 if (modify_this_stmt
3607 || gimple_has_volatile_ops (stmt)
3608 || contains_vce_or_bfcref_p (rhs)
3609 || contains_vce_or_bfcref_p (lhs)
3610 || stmt_ends_bb_p (stmt))
3611 {
3612 /* No need to copy into a constant-pool, it comes pre-initialized. */
3613 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3614 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3615 gsi, false, false, loc);
3616 if (access_has_children_p (lacc))
3617 {
3618 gimple_stmt_iterator alt_gsi = gsi_none ();
3619 if (stmt_ends_bb_p (stmt))
3620 {
3621 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3622 gsi = &alt_gsi;
3623 }
3624 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3625 gsi, true, true, loc);
3626 }
3627 sra_stats.separate_lhs_rhs_handling++;
3628
3629 /* This gimplification must be done after generate_subtree_copies,
3630 lest we insert the subtree copies in the middle of the gimplified
3631 sequence. */
3632 if (force_gimple_rhs)
3633 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3634 true, GSI_SAME_STMT);
3635 if (gimple_assign_rhs1 (stmt) != rhs)
3636 {
3637 modify_this_stmt = true;
3638 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3639 gcc_assert (stmt == gsi_stmt (orig_gsi));
3640 }
3641
3642 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3643 }
3644 else
3645 {
3646 if (access_has_children_p (lacc)
3647 && access_has_children_p (racc)
3648 /* When an access represents an unscalarizable region, it usually
3649 represents accesses with variable offset and thus must not be used
3650 to generate new memory accesses. */
3651 && !lacc->grp_unscalarizable_region
3652 && !racc->grp_unscalarizable_region)
3653 {
3654 struct subreplacement_assignment_data sad;
3655
3656 sad.left_offset = lacc->offset;
3657 sad.assignment_lhs = lhs;
3658 sad.assignment_rhs = rhs;
3659 sad.top_racc = racc;
3660 sad.old_gsi = *gsi;
3661 sad.new_gsi = gsi;
3662 sad.loc = gimple_location (stmt);
3663 sad.refreshed = SRA_UDH_NONE;
3664
3665 if (lacc->grp_read && !lacc->grp_covered)
3666 handle_unscalarized_data_in_subtree (&sad);
3667
3668 load_assign_lhs_subreplacements (lacc, &sad);
3669 if (sad.refreshed != SRA_UDH_RIGHT)
3670 {
3671 gsi_next (gsi);
3672 unlink_stmt_vdef (stmt);
3673 gsi_remove (&sad.old_gsi, true);
3674 release_defs (stmt);
3675 sra_stats.deleted++;
3676 return SRA_AM_REMOVED;
3677 }
3678 }
3679 else
3680 {
3681 if (access_has_children_p (racc)
3682 && !racc->grp_unscalarized_data
3683 && TREE_CODE (lhs) != SSA_NAME)
3684 {
3685 if (dump_file)
3686 {
3687 fprintf (dump_file, "Removing load: ");
3688 print_gimple_stmt (dump_file, stmt, 0);
3689 }
3690 generate_subtree_copies (racc->first_child, lhs,
3691 racc->offset, 0, 0, gsi,
3692 false, false, loc);
3693 gcc_assert (stmt == gsi_stmt (*gsi));
3694 unlink_stmt_vdef (stmt);
3695 gsi_remove (gsi, true);
3696 release_defs (stmt);
3697 sra_stats.deleted++;
3698 return SRA_AM_REMOVED;
3699 }
3700 /* Restore the aggregate RHS from its components so the
3701 prevailing aggregate copy does the right thing. */
3702 if (access_has_children_p (racc))
3703 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3704 gsi, false, false, loc);
3705 /* Re-load the components of the aggregate copy destination.
3706 But use the RHS aggregate to load from to expose more
3707 optimization opportunities. */
3708 if (access_has_children_p (lacc))
3709 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3710 0, 0, gsi, true, true, loc);
3711 }
3712
3713 return SRA_AM_NONE;
3714 }
3715}
3716
3717/* Set any scalar replacements of values in the constant pool to the initial
3718 value of the constant. (Constant-pool decls like *.LC0 have effectively
3719 been initialized before the program starts, we must do the same for their
3720 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
3721 the function's entry block. */
3722
3723static void
3724initialize_constant_pool_replacements (void)
3725{
3726 gimple_seq seq = NULL;
3727 gimple_stmt_iterator gsi = gsi_start (seq);
3728 bitmap_iterator bi;
3729 unsigned i;
3730
3731 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3732 {
3733 tree var = candidate (i);
3734 if (!constant_decl_p (var))
3735 continue;
3736 vec<access_p> *access_vec = get_base_access_vector (var);
3737 if (!access_vec)
3738 continue;
3739 for (unsigned i = 0; i < access_vec->length (); i++)
3740 {
3741 struct access *access = (*access_vec)[i];
3742 if (!access->replacement_decl)
3743 continue;
3744 gassign *stmt
3745 = gimple_build_assign (get_access_replacement (access),
3746 unshare_expr (access->expr));
3747 if (dump_file && (dump_flags & TDF_DETAILS))
3748 {
3749 fprintf (dump_file, "Generating constant initializer: ");
3750 print_gimple_stmt (dump_file, stmt, 0);
3751 fprintf (dump_file, "\n");
3752 }
3753 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3754 update_stmt (stmt);
3755 }
3756 }
3757
3758 seq = gsi_seq (gsi);
3759 if (seq)
3760 gsi_insert_seq_on_edge_immediate (
3761 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3762}
3763
3764/* Traverse the function body and all modifications as decided in
3765 analyze_all_variable_accesses. Return true iff the CFG has been
3766 changed. */
3767
3768static bool
3769sra_modify_function_body (void)
3770{
3771 bool cfg_changed = false;
3772 basic_block bb;
3773
3774 initialize_constant_pool_replacements ();
3775
3776 FOR_EACH_BB_FN (bb, cfun)
3777 {
3778 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3779 while (!gsi_end_p (gsi))
3780 {
3781 gimple *stmt = gsi_stmt (gsi);
3782 enum assignment_mod_result assign_result;
3783 bool modified = false, deleted = false;
3784 tree *t;
3785 unsigned i;
3786
3787 switch (gimple_code (stmt))
3788 {
3789 case GIMPLE_RETURN:
3790 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3791 if (*t != NULL_TREE)
3792 modified |= sra_modify_expr (t, &gsi, false);
3793 break;
3794
3795 case GIMPLE_ASSIGN:
3796 assign_result = sra_modify_assign (stmt, &gsi);
3797 modified |= assign_result == SRA_AM_MODIFIED;
3798 deleted = assign_result == SRA_AM_REMOVED;
3799 break;
3800
3801 case GIMPLE_CALL:
3802 /* Operands must be processed before the lhs. */
3803 for (i = 0; i < gimple_call_num_args (stmt); i++)
3804 {
3805 t = gimple_call_arg_ptr (stmt, i);
3806 modified |= sra_modify_expr (t, &gsi, false);
3807 }
3808
3809 if (gimple_call_lhs (stmt))
3810 {
3811 t = gimple_call_lhs_ptr (stmt);
3812 modified |= sra_modify_expr (t, &gsi, true);
3813 }
3814 break;
3815
3816 case GIMPLE_ASM:
3817 {
3818 gasm *asm_stmt = as_a <gasm *> (stmt);
3819 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3820 {
3821 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3822 modified |= sra_modify_expr (t, &gsi, false);
3823 }
3824 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3825 {
3826 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3827 modified |= sra_modify_expr (t, &gsi, true);
3828 }
3829 }
3830 break;
3831
3832 default:
3833 break;
3834 }
3835
3836 if (modified)
3837 {
3838 update_stmt (stmt);
3839 if (maybe_clean_eh_stmt (stmt)
3840 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3841 cfg_changed = true;
3842 }
3843 if (!deleted)
3844 gsi_next (&gsi);
3845 }
3846 }
3847
3848 gsi_commit_edge_inserts ();
3849 return cfg_changed;
3850}
3851
3852/* Generate statements initializing scalar replacements of parts of function
3853 parameters. */
3854
3855static void
3856initialize_parameter_reductions (void)
3857{
3858 gimple_stmt_iterator gsi;
3859 gimple_seq seq = NULL;
3860 tree parm;
3861
3862 gsi = gsi_start (seq);
3863 for (parm = DECL_ARGUMENTS (current_function_decl);
3864 parm;
3865 parm = DECL_CHAIN (parm))
3866 {
3867 vec<access_p> *access_vec;
3868 struct access *access;
3869
3870 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3871 continue;
3872 access_vec = get_base_access_vector (parm);
3873 if (!access_vec)
3874 continue;
3875
3876 for (access = (*access_vec)[0];
3877 access;
3878 access = access->next_grp)
3879 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3880 EXPR_LOCATION (parm));
3881 }
3882
3883 seq = gsi_seq (gsi);
3884 if (seq)
3885 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3886}
3887
3888/* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3889 it reveals there are components of some aggregates to be scalarized, it runs
3890 the required transformations. */
3891static unsigned int
3892perform_intra_sra (void)
3893{
3894 int ret = 0;
3895 sra_initialize ();
3896
3897 if (!find_var_candidates ())
3898 goto out;
3899
3900 if (!scan_function ())
3901 goto out;
3902
3903 if (!analyze_all_variable_accesses ())
3904 goto out;
3905
3906 if (sra_modify_function_body ())
3907 ret = TODO_update_ssa | TODO_cleanup_cfg;
3908 else
3909 ret = TODO_update_ssa;
3910 initialize_parameter_reductions ();
3911
3912 statistics_counter_event (cfun, "Scalar replacements created",
3913 sra_stats.replacements);
3914 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3915 statistics_counter_event (cfun, "Subtree copy stmts",
3916 sra_stats.subtree_copies);
3917 statistics_counter_event (cfun, "Subreplacement stmts",
3918 sra_stats.subreplacements);
3919 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3920 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3921 sra_stats.separate_lhs_rhs_handling);
3922
3923 out:
3924 sra_deinitialize ();
3925 return ret;
3926}
3927
3928/* Perform early intraprocedural SRA. */
3929static unsigned int
3930early_intra_sra (void)
3931{
3932 sra_mode = SRA_MODE_EARLY_INTRA;
3933 return perform_intra_sra ();
3934}
3935
3936/* Perform "late" intraprocedural SRA. */
3937static unsigned int
3938late_intra_sra (void)
3939{
3940 sra_mode = SRA_MODE_INTRA;
3941 return perform_intra_sra ();
3942}
3943
3944
3945static bool
3946gate_intra_sra (void)
3947{
3948 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3949}
3950
3951
3952namespace {
3953
3954const pass_data pass_data_sra_early =
3955{
3956 GIMPLE_PASS, /* type */
3957 "esra", /* name */
3958 OPTGROUP_NONE, /* optinfo_flags */
3959 TV_TREE_SRA, /* tv_id */
3960 ( PROP_cfg | PROP_ssa ), /* properties_required */
3961 0, /* properties_provided */
3962 0, /* properties_destroyed */
3963 0, /* todo_flags_start */
3964 TODO_update_ssa, /* todo_flags_finish */
3965};
3966
3967class pass_sra_early : public gimple_opt_pass
3968{
3969public:
3970 pass_sra_early (gcc::context *ctxt)
3971 : gimple_opt_pass (pass_data_sra_early, ctxt)
3972 {}
3973
3974 /* opt_pass methods: */
3975 virtual bool gate (function *) { return gate_intra_sra (); }
3976 virtual unsigned int execute (function *) { return early_intra_sra (); }
3977
3978}; // class pass_sra_early
3979
3980} // anon namespace
3981
3982gimple_opt_pass *
3983make_pass_sra_early (gcc::context *ctxt)
3984{
3985 return new pass_sra_early (ctxt);
3986}
3987
3988namespace {
3989
3990const pass_data pass_data_sra =
3991{
3992 GIMPLE_PASS, /* type */
3993 "sra", /* name */
3994 OPTGROUP_NONE, /* optinfo_flags */
3995 TV_TREE_SRA, /* tv_id */
3996 ( PROP_cfg | PROP_ssa ), /* properties_required */
3997 0, /* properties_provided */
3998 0, /* properties_destroyed */
3999 TODO_update_address_taken, /* todo_flags_start */
4000 TODO_update_ssa, /* todo_flags_finish */
4001};
4002
4003class pass_sra : public gimple_opt_pass
4004{
4005public:
4006 pass_sra (gcc::context *ctxt)
4007 : gimple_opt_pass (pass_data_sra, ctxt)
4008 {}
4009
4010 /* opt_pass methods: */
4011 virtual bool gate (function *) { return gate_intra_sra (); }
4012 virtual unsigned int execute (function *) { return late_intra_sra (); }
4013
4014}; // class pass_sra
4015
4016} // anon namespace
4017
4018gimple_opt_pass *
4019make_pass_sra (gcc::context *ctxt)
4020{
4021 return new pass_sra (ctxt);
4022}
4023
4024
4025/* Return true iff PARM (which must be a parm_decl) is an unused scalar
4026 parameter. */
4027
4028static bool
4029is_unused_scalar_param (tree parm)
4030{
4031 tree name;
4032 return (is_gimple_reg (parm)
4033 && (!(name = ssa_default_def (cfun, parm))
4034 || has_zero_uses (name)));
4035}
4036
4037/* Scan immediate uses of a default definition SSA name of a parameter PARM and
4038 examine whether there are any direct or otherwise infeasible ones. If so,
4039 return true, otherwise return false. PARM must be a gimple register with a
4040 non-NULL default definition. */
4041
4042static bool
4043ptr_parm_has_direct_uses (tree parm)
4044{
4045 imm_use_iterator ui;
4046 gimple *stmt;
4047 tree name = ssa_default_def (cfun, parm);
4048 bool ret = false;
4049
4050 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4051 {
4052 int uses_ok = 0;
4053 use_operand_p use_p;
4054
4055 if (is_gimple_debug (stmt))
4056 continue;
4057
4058 /* Valid uses include dereferences on the lhs and the rhs. */
4059 if (gimple_has_lhs (stmt))
4060 {
4061 tree lhs = gimple_get_lhs (stmt);
4062 while (handled_component_p (lhs))
4063 lhs = TREE_OPERAND (lhs, 0);
4064 if (TREE_CODE (lhs) == MEM_REF
4065 && TREE_OPERAND (lhs, 0) == name
4066 && integer_zerop (TREE_OPERAND (lhs, 1))
4067 && types_compatible_p (TREE_TYPE (lhs),
4068 TREE_TYPE (TREE_TYPE (name)))
4069 && !TREE_THIS_VOLATILE (lhs))
4070 uses_ok++;
4071 }
4072 if (gimple_assign_single_p (stmt))
4073 {
4074 tree rhs = gimple_assign_rhs1 (stmt);
4075 while (handled_component_p (rhs))
4076 rhs = TREE_OPERAND (rhs, 0);
4077 if (TREE_CODE (rhs) == MEM_REF
4078 && TREE_OPERAND (rhs, 0) == name
4079 && integer_zerop (TREE_OPERAND (rhs, 1))
4080 && types_compatible_p (TREE_TYPE (rhs),
4081 TREE_TYPE (TREE_TYPE (name)))
4082 && !TREE_THIS_VOLATILE (rhs))
4083 uses_ok++;
4084 }
4085 else if (is_gimple_call (stmt))
4086 {
4087 unsigned i;
4088 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4089 {
4090 tree arg = gimple_call_arg (stmt, i);
4091 while (handled_component_p (arg))
4092 arg = TREE_OPERAND (arg, 0);
4093 if (TREE_CODE (arg) == MEM_REF
4094 && TREE_OPERAND (arg, 0) == name
4095 && integer_zerop (TREE_OPERAND (arg, 1))
4096 && types_compatible_p (TREE_TYPE (arg),
4097 TREE_TYPE (TREE_TYPE (name)))
4098 && !TREE_THIS_VOLATILE (arg))
4099 uses_ok++;
4100 }
4101 }
4102
4103 /* If the number of valid uses does not match the number of
4104 uses in this stmt there is an unhandled use. */
4105 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4106 --uses_ok;
4107
4108 if (uses_ok != 0)
4109 ret = true;
4110
4111 if (ret)
4112 BREAK_FROM_IMM_USE_STMT (ui);
4113 }
4114
4115 return ret;
4116}
4117
4118/* Identify candidates for reduction for IPA-SRA based on their type and mark
4119 them in candidate_bitmap. Note that these do not necessarily include
4120 parameter which are unused and thus can be removed. Return true iff any
4121 such candidate has been found. */
4122
4123static bool
4124find_param_candidates (void)
4125{
4126 tree parm;
4127 int count = 0;
4128 bool ret = false;
4129 const char *msg;
4130
4131 for (parm = DECL_ARGUMENTS (current_function_decl);
4132 parm;
4133 parm = DECL_CHAIN (parm))
4134 {
4135 tree type = TREE_TYPE (parm);
4136 tree_node **slot;
4137
4138 count++;
4139
4140 if (TREE_THIS_VOLATILE (parm)
4141 || TREE_ADDRESSABLE (parm)
4142 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
4143 continue;
4144
4145 if (is_unused_scalar_param (parm))
4146 {
4147 ret = true;
4148 continue;
4149 }
4150
4151 if (POINTER_TYPE_P (type))
4152 {
4153 type = TREE_TYPE (type);
4154
4155 if (TREE_CODE (type) == FUNCTION_TYPE
4156 || TYPE_VOLATILE (type)
4157 || (TREE_CODE (type) == ARRAY_TYPE
4158 && TYPE_NONALIASED_COMPONENT (type))
4159 || !is_gimple_reg (parm)
4160 || is_va_list_type (type)
4161 || ptr_parm_has_direct_uses (parm))
4162 continue;
4163 }
4164 else if (!AGGREGATE_TYPE_P (type))
4165 continue;
4166
4167 if (!COMPLETE_TYPE_P (type)
4168 || !tree_fits_uhwi_p (TYPE_SIZE (type))
4169 || tree_to_uhwi (TYPE_SIZE (type)) == 0
4170 || (AGGREGATE_TYPE_P (type)
4171 && type_internals_preclude_sra_p (type, &msg)))
4172 continue;
4173
4174 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4175 slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4176 *slot = parm;
4177
4178 ret = true;
4179 if (dump_file && (dump_flags & TDF_DETAILS))
4180 {
4181 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4182 print_generic_expr (dump_file, parm);
4183 fprintf (dump_file, "\n");
4184 }
4185 }
4186
4187 func_param_count = count;
4188 return ret;
4189}
4190
4191/* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4192 maybe_modified. */
4193
4194static bool
4195mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4196 void *data)
4197{
4198 struct access *repr = (struct access *) data;
4199
4200 repr->grp_maybe_modified = 1;
4201 return true;
4202}
4203
4204/* Analyze what representatives (in linked lists accessible from
4205 REPRESENTATIVES) can be modified by side effects of statements in the
4206 current function. */
4207
4208static void
4209analyze_modified_params (vec<access_p> representatives)
4210{
4211 int i;
4212
4213 for (i = 0; i < func_param_count; i++)
4214 {
4215 struct access *repr;
4216
4217 for (repr = representatives[i];
4218 repr;
4219 repr = repr->next_grp)
4220 {
4221 struct access *access;
4222 bitmap visited;
4223 ao_ref ar;
4224
4225 if (no_accesses_p (repr))
4226 continue;
4227 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4228 || repr->grp_maybe_modified)
4229 continue;
4230
4231 ao_ref_init (&ar, repr->expr);
4232 visited = BITMAP_ALLOC (NULL);
4233 for (access = repr; access; access = access->next_sibling)
4234 {
4235 /* All accesses are read ones, otherwise grp_maybe_modified would
4236 be trivially set. */
4237 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4238 mark_maybe_modified, repr, &visited);
4239 if (repr->grp_maybe_modified)
4240 break;
4241 }
4242 BITMAP_FREE (visited);
4243 }
4244 }
4245}
4246
4247/* Propagate distances in bb_dereferences in the opposite direction than the
4248 control flow edges, in each step storing the maximum of the current value
4249 and the minimum of all successors. These steps are repeated until the table
4250 stabilizes. Note that BBs which might terminate the functions (according to
4251 final_bbs bitmap) never updated in this way. */
4252
4253static void
4254propagate_dereference_distances (void)
4255{
4256 basic_block bb;
4257
4258 auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4259 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4260 FOR_EACH_BB_FN (bb, cfun)
4261 {
4262 queue.quick_push (bb);
4263 bb->aux = bb;
4264 }
4265
4266 while (!queue.is_empty ())
4267 {
4268 edge_iterator ei;
4269 edge e;
4270 bool change = false;
4271 int i;
4272
4273 bb = queue.pop ();
4274 bb->aux = NULL;
4275
4276 if (bitmap_bit_p (final_bbs, bb->index))
4277 continue;
4278
4279 for (i = 0; i < func_param_count; i++)
4280 {
4281 int idx = bb->index * func_param_count + i;
4282 bool first = true;
4283 HOST_WIDE_INT inh = 0;
4284
4285 FOR_EACH_EDGE (e, ei, bb->succs)
4286 {
4287 int succ_idx = e->dest->index * func_param_count + i;
4288
4289 if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4290 continue;
4291
4292 if (first)
4293 {
4294 first = false;
4295 inh = bb_dereferences [succ_idx];
4296 }
4297 else if (bb_dereferences [succ_idx] < inh)
4298 inh = bb_dereferences [succ_idx];
4299 }
4300
4301 if (!first && bb_dereferences[idx] < inh)
4302 {
4303 bb_dereferences[idx] = inh;
4304 change = true;
4305 }
4306 }
4307
4308 if (change && !bitmap_bit_p (final_bbs, bb->index))
4309 FOR_EACH_EDGE (e, ei, bb->preds)
4310 {
4311 if (e->src->aux)
4312 continue;
4313
4314 e->src->aux = e->src;
4315 queue.quick_push (e->src);
4316 }
4317 }
4318}
4319
4320/* Dump a dereferences TABLE with heading STR to file F. */
4321
4322static void
4323dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4324{
4325 basic_block bb;
4326
4327 fprintf (dump_file, "%s", str);
4328 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4329 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4330 {
4331 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4332 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4333 {
4334 int i;
4335 for (i = 0; i < func_param_count; i++)
4336 {
4337 int idx = bb->index * func_param_count + i;
4338 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4339 }
4340 }
4341 fprintf (f, "\n");
4342 }
4343 fprintf (dump_file, "\n");
4344}
4345
4346/* Determine what (parts of) parameters passed by reference that are not
4347 assigned to are not certainly dereferenced in this function and thus the
4348 dereferencing cannot be safely moved to the caller without potentially
4349 introducing a segfault. Mark such REPRESENTATIVES as
4350 grp_not_necessarilly_dereferenced.
4351
4352 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4353 part is calculated rather than simple booleans are calculated for each
4354 pointer parameter to handle cases when only a fraction of the whole
4355 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4356 an example).
4357
4358 The maximum dereference distances for each pointer parameter and BB are
4359 already stored in bb_dereference. This routine simply propagates these
4360 values upwards by propagate_dereference_distances and then compares the
4361 distances of individual parameters in the ENTRY BB to the equivalent
4362 distances of each representative of a (fraction of a) parameter. */
4363
4364static void
4365