1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2024 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "backend.h"
27#include "target.h"
28#include "rtl.h"
29#include "tree.h"
30#include "memmodel.h"
31#include "tm_p.h"
32#include "gimple.h"
33#include "gimple-predict.h"
34#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35#include "ssa.h"
36#include "cgraph.h"
37#include "tree-pretty-print.h"
38#include "diagnostic-core.h"
39#include "diagnostic.h" /* For errorcount. */
40#include "alias.h"
41#include "fold-const.h"
42#include "calls.h"
43#include "varasm.h"
44#include "stmt.h"
45#include "expr.h"
46#include "gimple-iterator.h"
47#include "gimple-fold.h"
48#include "tree-eh.h"
49#include "gimplify.h"
50#include "stor-layout.h"
51#include "print-tree.h"
52#include "tree-iterator.h"
53#include "tree-inline.h"
54#include "langhooks.h"
55#include "tree-cfg.h"
56#include "tree-ssa.h"
57#include "tree-hash-traits.h"
58#include "omp-general.h"
59#include "omp-low.h"
60#include "gimple-low.h"
61#include "gomp-constants.h"
62#include "splay-tree.h"
63#include "gimple-walk.h"
64#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65#include "builtins.h"
66#include "stringpool.h"
67#include "attribs.h"
68#include "asan.h"
69#include "dbgcnt.h"
70#include "omp-offload.h"
71#include "context.h"
72#include "tree-nested.h"
73
74/* Identifier for a basic condition, mapping it to other basic conditions of
75 its Boolean expression. Basic conditions given the same uid (in the same
76 function) are parts of the same ANDIF/ORIF expression. Used for condition
77 coverage. */
78static unsigned nextuid = 1;
79/* Get a fresh identifier for a new condition expression. This is used for
80 condition coverage. */
81static unsigned
82next_cond_uid ()
83{
84 return nextuid++;
85}
86/* Reset the condition uid to the value it should have when compiling a new
87 function. 0 is already the default/untouched value, so start at non-zero.
88 A valid and set id should always be > 0. This is used for condition
89 coverage. */
90static void
91reset_cond_uid ()
92{
93 nextuid = 1;
94}
95
96/* Hash set of poisoned variables in a bind expr. */
97static hash_set<tree> *asan_poisoned_variables = NULL;
98
99enum gimplify_omp_var_data
100{
101 GOVD_SEEN = 0x000001,
102 GOVD_EXPLICIT = 0x000002,
103 GOVD_SHARED = 0x000004,
104 GOVD_PRIVATE = 0x000008,
105 GOVD_FIRSTPRIVATE = 0x000010,
106 GOVD_LASTPRIVATE = 0x000020,
107 GOVD_REDUCTION = 0x000040,
108 GOVD_LOCAL = 0x00080,
109 GOVD_MAP = 0x000100,
110 GOVD_DEBUG_PRIVATE = 0x000200,
111 GOVD_PRIVATE_OUTER_REF = 0x000400,
112 GOVD_LINEAR = 0x000800,
113 GOVD_ALIGNED = 0x001000,
114
115 /* Flag for GOVD_MAP: don't copy back. */
116 GOVD_MAP_TO_ONLY = 0x002000,
117
118 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
119 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
120
121 GOVD_MAP_0LEN_ARRAY = 0x008000,
122
123 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
124 GOVD_MAP_ALWAYS_TO = 0x010000,
125
126 /* Flag for shared vars that are or might be stored to in the region. */
127 GOVD_WRITTEN = 0x020000,
128
129 /* Flag for GOVD_MAP, if it is a forced mapping. */
130 GOVD_MAP_FORCE = 0x040000,
131
132 /* Flag for GOVD_MAP: must be present already. */
133 GOVD_MAP_FORCE_PRESENT = 0x080000,
134
135 /* Flag for GOVD_MAP: only allocate. */
136 GOVD_MAP_ALLOC_ONLY = 0x100000,
137
138 /* Flag for GOVD_MAP: only copy back. */
139 GOVD_MAP_FROM_ONLY = 0x200000,
140
141 GOVD_NONTEMPORAL = 0x400000,
142
143 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
144 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
145
146 GOVD_CONDTEMP = 0x1000000,
147
148 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
149 GOVD_REDUCTION_INSCAN = 0x2000000,
150
151 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
152 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
153
154 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
155 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
156 | GOVD_LOCAL)
157};
158
159
160enum omp_region_type
161{
162 ORT_WORKSHARE = 0x00,
163 ORT_TASKGROUP = 0x01,
164 ORT_SIMD = 0x04,
165
166 ORT_PARALLEL = 0x08,
167 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
168
169 ORT_TASK = 0x10,
170 ORT_UNTIED_TASK = ORT_TASK | 1,
171 ORT_TASKLOOP = ORT_TASK | 2,
172 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
173
174 ORT_TEAMS = 0x20,
175 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
176 ORT_HOST_TEAMS = ORT_TEAMS | 2,
177 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
178
179 /* Data region. */
180 ORT_TARGET_DATA = 0x40,
181
182 /* Data region with offloading. */
183 ORT_TARGET = 0x80,
184 ORT_COMBINED_TARGET = ORT_TARGET | 1,
185 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
186
187 /* OpenACC variants. */
188 ORT_ACC = 0x100, /* A generic OpenACC region. */
189 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
190 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
191 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
192 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
193 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
194
195 /* Dummy OpenMP region, used to disable expansion of
196 DECL_VALUE_EXPRs in taskloop pre body. */
197 ORT_NONE = 0x200
198};
199
200/* Gimplify hashtable helper. */
201
202struct gimplify_hasher : free_ptr_hash <elt_t>
203{
204 static inline hashval_t hash (const elt_t *);
205 static inline bool equal (const elt_t *, const elt_t *);
206};
207
208struct gimplify_ctx
209{
210 struct gimplify_ctx *prev_context;
211
212 vec<gbind *> bind_expr_stack;
213 tree temps;
214 gimple_seq conditional_cleanups;
215 tree exit_label;
216 tree return_temp;
217
218 vec<tree> case_labels;
219 hash_set<tree> *live_switch_vars;
220 /* The formal temporary table. Should this be persistent? */
221 hash_table<gimplify_hasher> *temp_htab;
222
223 int conditions;
224 unsigned into_ssa : 1;
225 unsigned allow_rhs_cond_expr : 1;
226 unsigned in_cleanup_point_expr : 1;
227 unsigned keep_stack : 1;
228 unsigned save_stack : 1;
229 unsigned in_switch_expr : 1;
230};
231
232enum gimplify_defaultmap_kind
233{
234 GDMK_SCALAR,
235 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
236 GDMK_AGGREGATE,
237 GDMK_ALLOCATABLE,
238 GDMK_POINTER
239};
240
241struct gimplify_omp_ctx
242{
243 struct gimplify_omp_ctx *outer_context;
244 splay_tree variables;
245 hash_set<tree> *privatized_types;
246 tree clauses;
247 /* Iteration variables in an OMP_FOR. */
248 vec<tree> loop_iter_var;
249 location_t location;
250 enum omp_clause_default_kind default_kind;
251 enum omp_region_type region_type;
252 enum tree_code code;
253 bool combined_loop;
254 bool distribute;
255 bool target_firstprivatize_array_bases;
256 bool add_safelen1;
257 bool order_concurrent;
258 bool has_depend;
259 bool in_for_exprs;
260 int defaultmap[5];
261};
262
263static struct gimplify_ctx *gimplify_ctxp;
264static struct gimplify_omp_ctx *gimplify_omp_ctxp;
265static bool in_omp_construct;
266
267/* Forward declaration. */
268static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
269static hash_map<tree, tree> *oacc_declare_returns;
270static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
271 bool (*) (tree), fallback_t, bool);
272static void prepare_gimple_addressable (tree *, gimple_seq *);
273
274/* Shorter alias name for the above function for use in gimplify.cc
275 only. */
276
277static inline void
278gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
279{
280 gimple_seq_add_stmt_without_update (seq_p, gs);
281}
282
283/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
284 NULL, a new sequence is allocated. This function is
285 similar to gimple_seq_add_seq, but does not scan the operands.
286 During gimplification, we need to manipulate statement sequences
287 before the def/use vectors have been constructed. */
288
289static void
290gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
291{
292 gimple_stmt_iterator si;
293
294 if (src == NULL)
295 return;
296
297 si = gsi_last (seq&: *dst_p);
298 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
299}
300
301
302/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
303 and popping gimplify contexts. */
304
305static struct gimplify_ctx *ctx_pool = NULL;
306
307/* Return a gimplify context struct from the pool. */
308
309static inline struct gimplify_ctx *
310ctx_alloc (void)
311{
312 struct gimplify_ctx * c = ctx_pool;
313
314 if (c)
315 ctx_pool = c->prev_context;
316 else
317 c = XNEW (struct gimplify_ctx);
318
319 memset (s: c, c: '\0', n: sizeof (*c));
320 return c;
321}
322
323/* Put gimplify context C back into the pool. */
324
325static inline void
326ctx_free (struct gimplify_ctx *c)
327{
328 c->prev_context = ctx_pool;
329 ctx_pool = c;
330}
331
332/* Free allocated ctx stack memory. */
333
334void
335free_gimplify_stack (void)
336{
337 struct gimplify_ctx *c;
338
339 while ((c = ctx_pool))
340 {
341 ctx_pool = c->prev_context;
342 free (ptr: c);
343 }
344}
345
346
347/* Set up a context for the gimplifier. */
348
349void
350push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
351{
352 struct gimplify_ctx *c = ctx_alloc ();
353
354 c->prev_context = gimplify_ctxp;
355 gimplify_ctxp = c;
356 gimplify_ctxp->into_ssa = in_ssa;
357 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
358}
359
360/* Tear down a context for the gimplifier. If BODY is non-null, then
361 put the temporaries into the outer BIND_EXPR. Otherwise, put them
362 in the local_decls.
363
364 BODY is not a sequence, but the first tuple in a sequence. */
365
366void
367pop_gimplify_context (gimple *body)
368{
369 struct gimplify_ctx *c = gimplify_ctxp;
370
371 gcc_assert (c
372 && (!c->bind_expr_stack.exists ()
373 || c->bind_expr_stack.is_empty ()));
374 c->bind_expr_stack.release ();
375 gimplify_ctxp = c->prev_context;
376
377 if (body)
378 declare_vars (c->temps, body, false);
379 else
380 record_vars (c->temps);
381
382 delete c->temp_htab;
383 c->temp_htab = NULL;
384 ctx_free (c);
385}
386
387/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
388
389static void
390gimple_push_bind_expr (gbind *bind_stmt)
391{
392 gimplify_ctxp->bind_expr_stack.reserve (nelems: 8);
393 gimplify_ctxp->bind_expr_stack.safe_push (obj: bind_stmt);
394}
395
396/* Pop the first element off the stack of bindings. */
397
398static void
399gimple_pop_bind_expr (void)
400{
401 gimplify_ctxp->bind_expr_stack.pop ();
402}
403
404/* Return the first element of the stack of bindings. */
405
406gbind *
407gimple_current_bind_expr (void)
408{
409 return gimplify_ctxp->bind_expr_stack.last ();
410}
411
412/* Return the stack of bindings created during gimplification. */
413
414vec<gbind *>
415gimple_bind_expr_stack (void)
416{
417 return gimplify_ctxp->bind_expr_stack;
418}
419
420/* Return true iff there is a COND_EXPR between us and the innermost
421 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
422
423static bool
424gimple_conditional_context (void)
425{
426 return gimplify_ctxp->conditions > 0;
427}
428
429/* Note that we've entered a COND_EXPR. */
430
431static void
432gimple_push_condition (void)
433{
434#ifdef ENABLE_GIMPLE_CHECKING
435 if (gimplify_ctxp->conditions == 0)
436 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
437#endif
438 ++(gimplify_ctxp->conditions);
439}
440
441/* Note that we've left a COND_EXPR. If we're back at unconditional scope
442 now, add any conditional cleanups we've seen to the prequeue. */
443
444static void
445gimple_pop_condition (gimple_seq *pre_p)
446{
447 int conds = --(gimplify_ctxp->conditions);
448
449 gcc_assert (conds >= 0);
450 if (conds == 0)
451 {
452 gimplify_seq_add_seq (dst_p: pre_p, src: gimplify_ctxp->conditional_cleanups);
453 gimplify_ctxp->conditional_cleanups = NULL;
454 }
455}
456
457/* A stable comparison routine for use with splay trees and DECLs. */
458
459static int
460splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
461{
462 tree a = (tree) xa;
463 tree b = (tree) xb;
464
465 return DECL_UID (a) - DECL_UID (b);
466}
467
468/* Create a new omp construct that deals with variable remapping. */
469
470static struct gimplify_omp_ctx *
471new_omp_context (enum omp_region_type region_type)
472{
473 struct gimplify_omp_ctx *c;
474
475 c = XCNEW (struct gimplify_omp_ctx);
476 c->outer_context = gimplify_omp_ctxp;
477 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
478 c->privatized_types = new hash_set<tree>;
479 c->location = input_location;
480 c->region_type = region_type;
481 if ((region_type & ORT_TASK) == 0)
482 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
483 else
484 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
485 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
486 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
487 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
488 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
489 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
490
491 return c;
492}
493
494/* Destroy an omp construct that deals with variable remapping. */
495
496static void
497delete_omp_context (struct gimplify_omp_ctx *c)
498{
499 splay_tree_delete (c->variables);
500 delete c->privatized_types;
501 c->loop_iter_var.release ();
502 XDELETE (c);
503}
504
505static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
506static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
507
508/* Both gimplify the statement T and append it to *SEQ_P. This function
509 behaves exactly as gimplify_stmt, but you don't have to pass T as a
510 reference. */
511
512void
513gimplify_and_add (tree t, gimple_seq *seq_p)
514{
515 gimplify_stmt (&t, seq_p);
516}
517
518/* Gimplify statement T into sequence *SEQ_P, and return the first
519 tuple in the sequence of generated tuples for this statement.
520 Return NULL if gimplifying T produced no tuples. */
521
522static gimple *
523gimplify_and_return_first (tree t, gimple_seq *seq_p)
524{
525 gimple_stmt_iterator last = gsi_last (seq&: *seq_p);
526
527 gimplify_and_add (t, seq_p);
528
529 if (!gsi_end_p (i: last))
530 {
531 gsi_next (i: &last);
532 return gsi_stmt (i: last);
533 }
534 else
535 return gimple_seq_first_stmt (s: *seq_p);
536}
537
538/* Returns true iff T is a valid RHS for an assignment to an un-renamed
539 LHS, or for a call argument. */
540
541static bool
542is_gimple_mem_rhs (tree t)
543{
544 /* If we're dealing with a renamable type, either source or dest must be
545 a renamed variable. */
546 if (is_gimple_reg_type (TREE_TYPE (t)))
547 return is_gimple_val (t);
548 else
549 return is_gimple_val (t) || is_gimple_lvalue (t);
550}
551
552/* Return true if T is a CALL_EXPR or an expression that can be
553 assigned to a temporary. Note that this predicate should only be
554 used during gimplification. See the rationale for this in
555 gimplify_modify_expr. */
556
557static bool
558is_gimple_reg_rhs_or_call (tree t)
559{
560 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
561 || TREE_CODE (t) == CALL_EXPR);
562}
563
564/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
565 this predicate should only be used during gimplification. See the
566 rationale for this in gimplify_modify_expr. */
567
568static bool
569is_gimple_mem_rhs_or_call (tree t)
570{
571 /* If we're dealing with a renamable type, either source or dest must be
572 a renamed variable. */
573 if (is_gimple_reg_type (TREE_TYPE (t)))
574 return is_gimple_val (t);
575 else
576 return (is_gimple_val (t)
577 || is_gimple_lvalue (t)
578 || TREE_CLOBBER_P (t)
579 || TREE_CODE (t) == CALL_EXPR);
580}
581
582/* Create a temporary with a name derived from VAL. Subroutine of
583 lookup_tmp_var; nobody else should call this function. */
584
585static inline tree
586create_tmp_from_val (tree val)
587{
588 /* Drop all qualifiers and address-space information from the value type. */
589 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
590 tree var = create_tmp_var (type, get_name (val));
591 return var;
592}
593
594/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
595 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
596
597static tree
598lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
599{
600 tree ret;
601
602 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
603 gcc_assert (!is_formal || !not_gimple_reg);
604
605 /* If not optimizing, never really reuse a temporary. local-alloc
606 won't allocate any variable that is used in more than one basic
607 block, which means it will go into memory, causing much extra
608 work in reload and final and poorer code generation, outweighing
609 the extra memory allocation here. */
610 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
611 {
612 ret = create_tmp_from_val (val);
613 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
614 }
615 else
616 {
617 elt_t elt, *elt_p;
618 elt_t **slot;
619
620 elt.val = val;
621 if (!gimplify_ctxp->temp_htab)
622 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
623 slot = gimplify_ctxp->temp_htab->find_slot (value: &elt, insert: INSERT);
624 if (*slot == NULL)
625 {
626 elt_p = XNEW (elt_t);
627 elt_p->val = val;
628 elt_p->temp = ret = create_tmp_from_val (val);
629 *slot = elt_p;
630 }
631 else
632 {
633 elt_p = *slot;
634 ret = elt_p->temp;
635 }
636 }
637
638 return ret;
639}
640
641/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
642
643static tree
644internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
645 bool is_formal, bool allow_ssa, bool not_gimple_reg)
646{
647 tree t, mod;
648
649 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
650 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
651 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
652 fb_rvalue);
653
654 if (allow_ssa
655 && gimplify_ctxp->into_ssa
656 && is_gimple_reg_type (TREE_TYPE (val)))
657 {
658 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
659 if (! gimple_in_ssa_p (cfun))
660 {
661 const char *name = get_name (val);
662 if (name)
663 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
664 }
665 }
666 else
667 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
668
669 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
670
671 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
672
673 /* gimplify_modify_expr might want to reduce this further. */
674 gimplify_and_add (t: mod, seq_p: pre_p);
675 ggc_free (mod);
676
677 /* If we failed to gimplify VAL then we can end up with the temporary
678 SSA name not having a definition. In this case return a decl. */
679 if (TREE_CODE (t) == SSA_NAME && ! SSA_NAME_DEF_STMT (t))
680 return lookup_tmp_var (val, is_formal, not_gimple_reg);
681
682 return t;
683}
684
685/* Return a formal temporary variable initialized with VAL. PRE_P is as
686 in gimplify_expr. Only use this function if:
687
688 1) The value of the unfactored expression represented by VAL will not
689 change between the initialization and use of the temporary, and
690 2) The temporary will not be otherwise modified.
691
692 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
693 and #2 means it is inappropriate for && temps.
694
695 For other cases, use get_initialized_tmp_var instead. */
696
697tree
698get_formal_tmp_var (tree val, gimple_seq *pre_p)
699{
700 return internal_get_tmp_var (val, pre_p, NULL, is_formal: true, allow_ssa: true, not_gimple_reg: false);
701}
702
703/* Return a temporary variable initialized with VAL. PRE_P and POST_P
704 are as in gimplify_expr. */
705
706tree
707get_initialized_tmp_var (tree val, gimple_seq *pre_p,
708 gimple_seq *post_p /* = NULL */,
709 bool allow_ssa /* = true */)
710{
711 return internal_get_tmp_var (val, pre_p, post_p, is_formal: false, allow_ssa, not_gimple_reg: false);
712}
713
714/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
715 generate debug info for them; otherwise don't. */
716
717void
718declare_vars (tree vars, gimple *gs, bool debug_info)
719{
720 tree last = vars;
721 if (last)
722 {
723 tree temps, block;
724
725 gbind *scope = as_a <gbind *> (p: gs);
726
727 temps = nreverse (last);
728
729 block = gimple_bind_block (bind_stmt: scope);
730 gcc_assert (!block || TREE_CODE (block) == BLOCK);
731 if (!block || !debug_info)
732 {
733 DECL_CHAIN (last) = gimple_bind_vars (bind_stmt: scope);
734 gimple_bind_set_vars (bind_stmt: scope, vars: temps);
735 }
736 else
737 {
738 /* We need to attach the nodes both to the BIND_EXPR and to its
739 associated BLOCK for debugging purposes. The key point here
740 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
741 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
742 if (BLOCK_VARS (block))
743 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
744 else
745 {
746 gimple_bind_set_vars (bind_stmt: scope,
747 vars: chainon (gimple_bind_vars (bind_stmt: scope), temps));
748 BLOCK_VARS (block) = temps;
749 }
750 }
751 }
752}
753
754/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
755 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
756 no such upper bound can be obtained. */
757
758static void
759force_constant_size (tree var)
760{
761 /* The only attempt we make is by querying the maximum size of objects
762 of the variable's type. */
763
764 HOST_WIDE_INT max_size;
765
766 gcc_assert (VAR_P (var));
767
768 max_size = max_int_size_in_bytes (TREE_TYPE (var));
769
770 gcc_assert (max_size >= 0);
771
772 DECL_SIZE_UNIT (var)
773 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
774 DECL_SIZE (var)
775 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
776}
777
778/* Push the temporary variable TMP into the current binding. */
779
780void
781gimple_add_tmp_var_fn (struct function *fn, tree tmp)
782{
783 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
784
785 /* Later processing assumes that the object size is constant, which might
786 not be true at this point. Force the use of a constant upper bound in
787 this case. */
788 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
789 force_constant_size (var: tmp);
790
791 DECL_CONTEXT (tmp) = fn->decl;
792 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
793
794 record_vars_into (tmp, fn->decl);
795}
796
797/* Push the temporary variable TMP into the current binding. */
798
799void
800gimple_add_tmp_var (tree tmp)
801{
802 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
803
804 /* Later processing assumes that the object size is constant, which might
805 not be true at this point. Force the use of a constant upper bound in
806 this case. */
807 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
808 force_constant_size (var: tmp);
809
810 DECL_CONTEXT (tmp) = current_function_decl;
811 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
812
813 if (gimplify_ctxp)
814 {
815 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
816 gimplify_ctxp->temps = tmp;
817
818 /* Mark temporaries local within the nearest enclosing parallel. */
819 if (gimplify_omp_ctxp)
820 {
821 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
822 int flag = GOVD_LOCAL | GOVD_SEEN;
823 while (ctx
824 && (ctx->region_type == ORT_WORKSHARE
825 || ctx->region_type == ORT_TASKGROUP
826 || ctx->region_type == ORT_SIMD
827 || ctx->region_type == ORT_ACC))
828 {
829 if (ctx->region_type == ORT_SIMD
830 && TREE_ADDRESSABLE (tmp)
831 && !TREE_STATIC (tmp))
832 {
833 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
834 ctx->add_safelen1 = true;
835 else if (ctx->in_for_exprs)
836 flag = GOVD_PRIVATE;
837 else
838 flag = GOVD_PRIVATE | GOVD_SEEN;
839 break;
840 }
841 ctx = ctx->outer_context;
842 }
843 if (ctx)
844 omp_add_variable (ctx, tmp, flag);
845 }
846 }
847 else if (cfun)
848 record_vars (tmp);
849 else
850 {
851 gimple_seq body_seq;
852
853 /* This case is for nested functions. We need to expose the locals
854 they create. */
855 body_seq = gimple_body (current_function_decl);
856 declare_vars (vars: tmp, gs: gimple_seq_first_stmt (s: body_seq), debug_info: false);
857 }
858}
859
860
861
862/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
863 nodes that are referenced more than once in GENERIC functions. This is
864 necessary because gimplification (translation into GIMPLE) is performed
865 by modifying tree nodes in-place, so gimplication of a shared node in a
866 first context could generate an invalid GIMPLE form in a second context.
867
868 This is achieved with a simple mark/copy/unmark algorithm that walks the
869 GENERIC representation top-down, marks nodes with TREE_VISITED the first
870 time it encounters them, duplicates them if they already have TREE_VISITED
871 set, and finally removes the TREE_VISITED marks it has set.
872
873 The algorithm works only at the function level, i.e. it generates a GENERIC
874 representation of a function with no nodes shared within the function when
875 passed a GENERIC function (except for nodes that are allowed to be shared).
876
877 At the global level, it is also necessary to unshare tree nodes that are
878 referenced in more than one function, for the same aforementioned reason.
879 This requires some cooperation from the front-end. There are 2 strategies:
880
881 1. Manual unsharing. The front-end needs to call unshare_expr on every
882 expression that might end up being shared across functions.
883
884 2. Deep unsharing. This is an extension of regular unsharing. Instead
885 of calling unshare_expr on expressions that might be shared across
886 functions, the front-end pre-marks them with TREE_VISITED. This will
887 ensure that they are unshared on the first reference within functions
888 when the regular unsharing algorithm runs. The counterpart is that
889 this algorithm must look deeper than for manual unsharing, which is
890 specified by LANG_HOOKS_DEEP_UNSHARING.
891
892 If there are only few specific cases of node sharing across functions, it is
893 probably easier for a front-end to unshare the expressions manually. On the
894 contrary, if the expressions generated at the global level are as widespread
895 as expressions generated within functions, deep unsharing is very likely the
896 way to go. */
897
898/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
899 These nodes model computations that must be done once. If we were to
900 unshare something like SAVE_EXPR(i++), the gimplification process would
901 create wrong code. However, if DATA is non-null, it must hold a pointer
902 set that is used to unshare the subtrees of these nodes. */
903
904static tree
905mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
906{
907 tree t = *tp;
908 enum tree_code code = TREE_CODE (t);
909
910 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
911 copy their subtrees if we can make sure to do it only once. */
912 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
913 {
914 if (data && !((hash_set<tree> *)data)->add (k: t))
915 ;
916 else
917 *walk_subtrees = 0;
918 }
919
920 /* Stop at types, decls, constants like copy_tree_r. */
921 else if (TREE_CODE_CLASS (code) == tcc_type
922 || TREE_CODE_CLASS (code) == tcc_declaration
923 || TREE_CODE_CLASS (code) == tcc_constant)
924 *walk_subtrees = 0;
925
926 /* Cope with the statement expression extension. */
927 else if (code == STATEMENT_LIST)
928 ;
929
930 /* Leave the bulk of the work to copy_tree_r itself. */
931 else
932 copy_tree_r (tp, walk_subtrees, NULL);
933
934 return NULL_TREE;
935}
936
937/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
938 If *TP has been visited already, then *TP is deeply copied by calling
939 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
940
941static tree
942copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
943{
944 tree t = *tp;
945 enum tree_code code = TREE_CODE (t);
946
947 /* Skip types, decls, and constants. But we do want to look at their
948 types and the bounds of types. Mark them as visited so we properly
949 unmark their subtrees on the unmark pass. If we've already seen them,
950 don't look down further. */
951 if (TREE_CODE_CLASS (code) == tcc_type
952 || TREE_CODE_CLASS (code) == tcc_declaration
953 || TREE_CODE_CLASS (code) == tcc_constant)
954 {
955 if (TREE_VISITED (t))
956 *walk_subtrees = 0;
957 else
958 TREE_VISITED (t) = 1;
959 }
960
961 /* If this node has been visited already, unshare it and don't look
962 any deeper. */
963 else if (TREE_VISITED (t))
964 {
965 walk_tree (tp, mostly_copy_tree_r, data, NULL);
966 *walk_subtrees = 0;
967 }
968
969 /* Otherwise, mark the node as visited and keep looking. */
970 else
971 TREE_VISITED (t) = 1;
972
973 return NULL_TREE;
974}
975
976/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
977 copy_if_shared_r callback unmodified. */
978
979void
980copy_if_shared (tree *tp, void *data)
981{
982 walk_tree (tp, copy_if_shared_r, data, NULL);
983}
984
985/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
986 any nested functions. */
987
988static void
989unshare_body (tree fndecl)
990{
991 struct cgraph_node *cgn = cgraph_node::get (decl: fndecl);
992 /* If the language requires deep unsharing, we need a pointer set to make
993 sure we don't repeatedly unshare subtrees of unshareable nodes. */
994 hash_set<tree> *visited
995 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
996
997 copy_if_shared (tp: &DECL_SAVED_TREE (fndecl), data: visited);
998 copy_if_shared (tp: &DECL_SIZE (DECL_RESULT (fndecl)), data: visited);
999 copy_if_shared (tp: &DECL_SIZE_UNIT (DECL_RESULT (fndecl)), data: visited);
1000
1001 delete visited;
1002
1003 if (cgn)
1004 for (cgn = first_nested_function (node: cgn); cgn;
1005 cgn = next_nested_function (node: cgn))
1006 unshare_body (fndecl: cgn->decl);
1007}
1008
1009/* Callback for walk_tree to unmark the visited trees rooted at *TP.
1010 Subtrees are walked until the first unvisited node is encountered. */
1011
1012static tree
1013unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1014{
1015 tree t = *tp;
1016
1017 /* If this node has been visited, unmark it and keep looking. */
1018 if (TREE_VISITED (t))
1019 TREE_VISITED (t) = 0;
1020
1021 /* Otherwise, don't look any deeper. */
1022 else
1023 *walk_subtrees = 0;
1024
1025 return NULL_TREE;
1026}
1027
1028/* Unmark the visited trees rooted at *TP. */
1029
1030static inline void
1031unmark_visited (tree *tp)
1032{
1033 walk_tree (tp, unmark_visited_r, NULL, NULL);
1034}
1035
1036/* Likewise, but mark all trees as not visited. */
1037
1038static void
1039unvisit_body (tree fndecl)
1040{
1041 struct cgraph_node *cgn = cgraph_node::get (decl: fndecl);
1042
1043 unmark_visited (tp: &DECL_SAVED_TREE (fndecl));
1044 unmark_visited (tp: &DECL_SIZE (DECL_RESULT (fndecl)));
1045 unmark_visited (tp: &DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1046
1047 if (cgn)
1048 for (cgn = first_nested_function (node: cgn);
1049 cgn; cgn = next_nested_function (node: cgn))
1050 unvisit_body (fndecl: cgn->decl);
1051}
1052
1053/* Unconditionally make an unshared copy of EXPR. This is used when using
1054 stored expressions which span multiple functions, such as BINFO_VTABLE,
1055 as the normal unsharing process can't tell that they're shared. */
1056
1057tree
1058unshare_expr (tree expr)
1059{
1060 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1061 return expr;
1062}
1063
1064/* Worker for unshare_expr_without_location. */
1065
1066static tree
1067prune_expr_location (tree *tp, int *walk_subtrees, void *)
1068{
1069 if (EXPR_P (*tp))
1070 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1071 else
1072 *walk_subtrees = 0;
1073 return NULL_TREE;
1074}
1075
1076/* Similar to unshare_expr but also prune all expression locations
1077 from EXPR. */
1078
1079tree
1080unshare_expr_without_location (tree expr)
1081{
1082 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1083 if (EXPR_P (expr))
1084 walk_tree (&expr, prune_expr_location, NULL, NULL);
1085 return expr;
1086}
1087
1088/* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1089 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1090 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1091 EXPR is the location of the EXPR. */
1092
1093static location_t
1094rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1095{
1096 if (!expr)
1097 return or_else;
1098
1099 if (EXPR_HAS_LOCATION (expr))
1100 return EXPR_LOCATION (expr);
1101
1102 if (TREE_CODE (expr) != STATEMENT_LIST)
1103 return or_else;
1104
1105 tree_stmt_iterator i = tsi_start (t: expr);
1106
1107 bool found = false;
1108 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1109 {
1110 found = true;
1111 tsi_next (i: &i);
1112 }
1113
1114 if (!found || !tsi_one_before_end_p (i))
1115 return or_else;
1116
1117 return rexpr_location (expr: tsi_stmt (i), or_else);
1118}
1119
1120/* Return TRUE iff EXPR (maybe recursively) has a location; see
1121 rexpr_location for the potential recursion. */
1122
1123static inline bool
1124rexpr_has_location (tree expr)
1125{
1126 return rexpr_location (expr) != UNKNOWN_LOCATION;
1127}
1128
1129
1130/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1131 contain statements and have a value. Assign its value to a temporary
1132 and give it void_type_node. Return the temporary, or NULL_TREE if
1133 WRAPPER was already void. */
1134
1135tree
1136voidify_wrapper_expr (tree wrapper, tree temp)
1137{
1138 tree type = TREE_TYPE (wrapper);
1139 if (type && !VOID_TYPE_P (type))
1140 {
1141 tree *p;
1142
1143 /* Set p to point to the body of the wrapper. Loop until we find
1144 something that isn't a wrapper. */
1145 for (p = &wrapper; p && *p; )
1146 {
1147 switch (TREE_CODE (*p))
1148 {
1149 case BIND_EXPR:
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1152 /* For a BIND_EXPR, the body is operand 1. */
1153 p = &BIND_EXPR_BODY (*p);
1154 break;
1155
1156 case CLEANUP_POINT_EXPR:
1157 case TRY_FINALLY_EXPR:
1158 case TRY_CATCH_EXPR:
1159 TREE_SIDE_EFFECTS (*p) = 1;
1160 TREE_TYPE (*p) = void_type_node;
1161 p = &TREE_OPERAND (*p, 0);
1162 break;
1163
1164 case STATEMENT_LIST:
1165 {
1166 tree_stmt_iterator i = tsi_last (t: *p);
1167 TREE_SIDE_EFFECTS (*p) = 1;
1168 TREE_TYPE (*p) = void_type_node;
1169 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1170 }
1171 break;
1172
1173 case COMPOUND_EXPR:
1174 /* Advance to the last statement. Set all container types to
1175 void. */
1176 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1177 {
1178 TREE_SIDE_EFFECTS (*p) = 1;
1179 TREE_TYPE (*p) = void_type_node;
1180 }
1181 break;
1182
1183 case TRANSACTION_EXPR:
1184 TREE_SIDE_EFFECTS (*p) = 1;
1185 TREE_TYPE (*p) = void_type_node;
1186 p = &TRANSACTION_EXPR_BODY (*p);
1187 break;
1188
1189 default:
1190 /* Assume that any tree upon which voidify_wrapper_expr is
1191 directly called is a wrapper, and that its body is op0. */
1192 if (p == &wrapper)
1193 {
1194 TREE_SIDE_EFFECTS (*p) = 1;
1195 TREE_TYPE (*p) = void_type_node;
1196 p = &TREE_OPERAND (*p, 0);
1197 break;
1198 }
1199 goto out;
1200 }
1201 }
1202
1203 out:
1204 if (p == NULL || IS_EMPTY_STMT (*p))
1205 temp = NULL_TREE;
1206 else if (temp)
1207 {
1208 /* The wrapper is on the RHS of an assignment that we're pushing
1209 down. */
1210 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1211 || TREE_CODE (temp) == MODIFY_EXPR);
1212 TREE_OPERAND (temp, 1) = *p;
1213 *p = temp;
1214 }
1215 else
1216 {
1217 temp = create_tmp_var (type, "retval");
1218 *p = build2 (INIT_EXPR, type, temp, *p);
1219 }
1220
1221 return temp;
1222 }
1223
1224 return NULL_TREE;
1225}
1226
1227/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1228 a temporary through which they communicate. */
1229
1230static void
1231build_stack_save_restore (gcall **save, gcall **restore)
1232{
1233 tree tmp_var;
1234
1235 *save = gimple_build_call (builtin_decl_implicit (fncode: BUILT_IN_STACK_SAVE), 0);
1236 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1237 gimple_call_set_lhs (gs: *save, lhs: tmp_var);
1238
1239 *restore
1240 = gimple_build_call (builtin_decl_implicit (fncode: BUILT_IN_STACK_RESTORE),
1241 1, tmp_var);
1242}
1243
1244/* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1245
1246static tree
1247build_asan_poison_call_expr (tree decl)
1248{
1249 /* Do not poison variables that have size equal to zero. */
1250 tree unit_size = DECL_SIZE_UNIT (decl);
1251 if (zerop (unit_size))
1252 return NULL_TREE;
1253
1254 tree base = build_fold_addr_expr (decl);
1255
1256 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1257 void_type_node, 3,
1258 build_int_cst (integer_type_node,
1259 ASAN_MARK_POISON),
1260 base, unit_size);
1261}
1262
1263/* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1264 on POISON flag, shadow memory of a DECL variable. The call will be
1265 put on location identified by IT iterator, where BEFORE flag drives
1266 position where the stmt will be put. */
1267
1268static void
1269asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1270 bool before)
1271{
1272 tree unit_size = DECL_SIZE_UNIT (decl);
1273 tree base = build_fold_addr_expr (decl);
1274
1275 /* Do not poison variables that have size equal to zero. */
1276 if (zerop (unit_size))
1277 return;
1278
1279 /* It's necessary to have all stack variables aligned to ASAN granularity
1280 bytes. */
1281 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1282 unsigned shadow_granularity
1283 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1284 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1285 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1286
1287 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1288
1289 gimple *g
1290 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1291 build_int_cst (integer_type_node, flags),
1292 base, unit_size);
1293
1294 if (before)
1295 gsi_insert_before (it, g, GSI_NEW_STMT);
1296 else
1297 gsi_insert_after (it, g, GSI_NEW_STMT);
1298}
1299
1300/* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1301 either poisons or unpoisons a DECL. Created statement is appended
1302 to SEQ_P gimple sequence. */
1303
1304static void
1305asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1306{
1307 gimple_stmt_iterator it = gsi_last (seq&: *seq_p);
1308 bool before = false;
1309
1310 if (gsi_end_p (i: it))
1311 before = true;
1312
1313 asan_poison_variable (decl, poison, it: &it, before);
1314}
1315
1316/* Sort pair of VAR_DECLs A and B by DECL_UID. */
1317
1318static int
1319sort_by_decl_uid (const void *a, const void *b)
1320{
1321 const tree *t1 = (const tree *)a;
1322 const tree *t2 = (const tree *)b;
1323
1324 int uid1 = DECL_UID (*t1);
1325 int uid2 = DECL_UID (*t2);
1326
1327 if (uid1 < uid2)
1328 return -1;
1329 else if (uid1 > uid2)
1330 return 1;
1331 else
1332 return 0;
1333}
1334
1335/* Generate IFN_ASAN_MARK internal call for all VARIABLES
1336 depending on POISON flag. Created statement is appended
1337 to SEQ_P gimple sequence. */
1338
1339static void
1340asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1341{
1342 unsigned c = variables->elements ();
1343 if (c == 0)
1344 return;
1345
1346 auto_vec<tree> sorted_variables (c);
1347
1348 for (hash_set<tree>::iterator it = variables->begin ();
1349 it != variables->end (); ++it)
1350 sorted_variables.safe_push (obj: *it);
1351
1352 sorted_variables.qsort (sort_by_decl_uid);
1353
1354 unsigned i;
1355 tree var;
1356 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1357 {
1358 asan_poison_variable (decl: var, poison, seq_p);
1359
1360 /* Add use_after_scope_memory attribute for the variable in order
1361 to prevent re-written into SSA. */
1362 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1363 DECL_ATTRIBUTES (var)))
1364 DECL_ATTRIBUTES (var)
1365 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1366 integer_one_node,
1367 DECL_ATTRIBUTES (var));
1368 }
1369}
1370
1371/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1372
1373static enum gimplify_status
1374gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1375{
1376 tree bind_expr = *expr_p;
1377 bool old_keep_stack = gimplify_ctxp->keep_stack;
1378 bool old_save_stack = gimplify_ctxp->save_stack;
1379 tree t;
1380 gbind *bind_stmt;
1381 gimple_seq body, cleanup;
1382 gcall *stack_save;
1383 location_t start_locus = 0, end_locus = 0;
1384 tree ret_clauses = NULL;
1385
1386 tree temp = voidify_wrapper_expr (wrapper: bind_expr, NULL);
1387
1388 /* Mark variables seen in this bind expr. */
1389 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1390 {
1391 if (VAR_P (t))
1392 {
1393 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1394 tree attr;
1395
1396 if (flag_openmp
1397 && !is_global_var (t)
1398 && DECL_CONTEXT (t) == current_function_decl
1399 && TREE_USED (t)
1400 && (attr = lookup_attribute (attr_name: "omp allocate", DECL_ATTRIBUTES (t)))
1401 != NULL_TREE)
1402 {
1403 gcc_assert (!DECL_HAS_VALUE_EXPR_P (t));
1404 tree alloc = TREE_PURPOSE (TREE_VALUE (attr));
1405 tree align = TREE_VALUE (TREE_VALUE (attr));
1406 /* Allocate directives that appear in a target region must specify
1407 an allocator clause unless a requires directive with the
1408 dynamic_allocators clause is present in the same compilation
1409 unit. */
1410 bool missing_dyn_alloc = false;
1411 if (alloc == NULL_TREE
1412 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS)
1413 == 0))
1414 {
1415 /* This comes too early for omp_discover_declare_target...,
1416 but should at least catch the most common cases. */
1417 missing_dyn_alloc
1418 = cgraph_node::get (decl: current_function_decl)->offloadable;
1419 for (struct gimplify_omp_ctx *ctx2 = ctx;
1420 ctx2 && !missing_dyn_alloc; ctx2 = ctx2->outer_context)
1421 if (ctx2->code == OMP_TARGET)
1422 missing_dyn_alloc = true;
1423 }
1424 if (missing_dyn_alloc)
1425 error_at (DECL_SOURCE_LOCATION (t),
1426 "%<allocate%> directive for %qD inside a target "
1427 "region must specify an %<allocator%> clause", t);
1428 /* Skip for omp_default_mem_alloc (= 1),
1429 unless align is present. */
1430 else if (!errorcount
1431 && (align != NULL_TREE
1432 || alloc == NULL_TREE
1433 || !integer_onep (alloc)))
1434 {
1435 /* Fortran might already use a pointer type internally;
1436 use that pointer except for type(C_ptr) and type(C_funptr);
1437 note that normal proc pointers are rejected. */
1438 tree type = TREE_TYPE (t);
1439 tree tmp, v;
1440 if (lang_GNU_Fortran ()
1441 && POINTER_TYPE_P (type)
1442 && TREE_TYPE (type) != void_type_node
1443 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
1444 {
1445 type = TREE_TYPE (type);
1446 v = t;
1447 }
1448 else
1449 {
1450 tmp = build_pointer_type (type);
1451 v = create_tmp_var (tmp, get_name (t));
1452 DECL_IGNORED_P (v) = 0;
1453 DECL_ATTRIBUTES (v)
1454 = tree_cons (get_identifier ("omp allocate var"),
1455 build_tree_list (NULL_TREE, t),
1456 remove_attribute ("omp allocate",
1457 DECL_ATTRIBUTES (t)));
1458 tmp = build_fold_indirect_ref (v);
1459 TREE_THIS_NOTRAP (tmp) = 1;
1460 SET_DECL_VALUE_EXPR (t, tmp);
1461 DECL_HAS_VALUE_EXPR_P (t) = 1;
1462 }
1463 tree sz = TYPE_SIZE_UNIT (type);
1464 /* The size to use in Fortran might not match TYPE_SIZE_UNIT;
1465 hence, for some decls, a size variable is saved in the
1466 attributes; use it, if available. */
1467 if (TREE_CHAIN (TREE_VALUE (attr))
1468 && TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))
1469 && TREE_PURPOSE (
1470 TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))))
1471 {
1472 sz = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1473 sz = TREE_PURPOSE (sz);
1474 }
1475 if (alloc == NULL_TREE)
1476 alloc = build_zero_cst (ptr_type_node);
1477 if (align == NULL_TREE)
1478 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (t));
1479 else
1480 align = build_int_cst (size_type_node,
1481 MAX (tree_to_uhwi (align),
1482 DECL_ALIGN_UNIT (t)));
1483 location_t loc = DECL_SOURCE_LOCATION (t);
1484 tmp = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ALLOC);
1485 tmp = build_call_expr_loc (loc, tmp, 3, align, sz, alloc);
1486 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1487 fold_convert (TREE_TYPE (v), tmp));
1488 gcc_assert (BIND_EXPR_BODY (bind_expr) != NULL_TREE);
1489 /* Ensure that either TREE_CHAIN (TREE_VALUE (attr) is set
1490 and GOMP_FREE added here or that DECL_HAS_VALUE_EXPR_P (t)
1491 is set, using in a condition much further below. */
1492 gcc_assert (DECL_HAS_VALUE_EXPR_P (t)
1493 || TREE_CHAIN (TREE_VALUE (attr)));
1494 if (TREE_CHAIN (TREE_VALUE (attr)))
1495 {
1496 /* Fortran is special as it does not have properly nest
1497 declarations in blocks. And as there is no
1498 initializer, there is also no expression to look for.
1499 Hence, the FE makes the statement list of the
1500 try-finally block available. We can put the GOMP_alloc
1501 at the top, unless an allocator or size expression
1502 requires to put it afterward; note that the size is
1503 always later in generated code; for strings, no
1504 size expr but still an expr might be available.
1505 As LTO does not handle a statement list, 'sl' has
1506 to be removed; done so by removing the attribute. */
1507 DECL_ATTRIBUTES (t)
1508 = remove_attribute ("omp allocate",
1509 DECL_ATTRIBUTES (t));
1510 tree sl = TREE_PURPOSE (TREE_CHAIN (TREE_VALUE (attr)));
1511 tree_stmt_iterator e = tsi_start (t: sl);
1512 tree needle = NULL_TREE;
1513 if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1514 {
1515 needle = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1516 needle = (TREE_VALUE (needle) ? TREE_VALUE (needle)
1517 : sz);
1518 }
1519 else if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1520 needle = sz;
1521 else if (DECL_P (alloc) && DECL_ARTIFICIAL (alloc))
1522 needle = alloc;
1523
1524 if (needle != NULL_TREE)
1525 {
1526 while (!tsi_end_p (i: e))
1527 {
1528 if (*e == needle
1529 || (TREE_CODE (*e) == MODIFY_EXPR
1530 && TREE_OPERAND (*e, 0) == needle))
1531 break;
1532 ++e;
1533 }
1534 gcc_assert (!tsi_end_p (e));
1535 }
1536 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1537
1538 /* As the cleanup is in BIND_EXPR_BODY, GOMP_free is added
1539 here; for C/C++ it will be added in the 'cleanup'
1540 section after gimplification. But Fortran already has
1541 a try-finally block. */
1542 sl = TREE_VALUE (TREE_CHAIN (TREE_VALUE (attr)));
1543 e = tsi_last (t: sl);
1544 tmp = builtin_decl_explicit (fncode: BUILT_IN_GOMP_FREE);
1545 tmp = build_call_expr_loc (EXPR_LOCATION (*e), tmp, 2, v,
1546 build_zero_cst (ptr_type_node));
1547 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1548 tmp = build_clobber (TREE_TYPE (v), CLOBBER_STORAGE_END);
1549 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1550 fold_convert (TREE_TYPE (v), tmp));
1551 ++e;
1552 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1553 }
1554 else
1555 {
1556 gcc_assert (TREE_CODE (BIND_EXPR_BODY (bind_expr))
1557 == STATEMENT_LIST);
1558 tree_stmt_iterator e;
1559 e = tsi_start (BIND_EXPR_BODY (bind_expr));
1560 while (!tsi_end_p (i: e))
1561 {
1562 if ((TREE_CODE (*e) == DECL_EXPR
1563 && TREE_OPERAND (*e, 0) == t)
1564 || (TREE_CODE (*e) == CLEANUP_POINT_EXPR
1565 && (TREE_CODE (TREE_OPERAND (*e, 0))
1566 == DECL_EXPR)
1567 && (TREE_OPERAND (TREE_OPERAND (*e, 0), 0)
1568 == t)))
1569 break;
1570 ++e;
1571 }
1572 gcc_assert (!tsi_end_p (e));
1573 tsi_link_before (&e, tmp, TSI_SAME_STMT);
1574 }
1575 }
1576 }
1577
1578 /* Mark variable as local. */
1579 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1580 {
1581 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1582 || splay_tree_lookup (ctx->variables,
1583 (splay_tree_key) t) == NULL)
1584 {
1585 int flag = GOVD_LOCAL;
1586 if (ctx->region_type == ORT_SIMD
1587 && TREE_ADDRESSABLE (t)
1588 && !TREE_STATIC (t))
1589 {
1590 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1591 ctx->add_safelen1 = true;
1592 else
1593 flag = GOVD_PRIVATE;
1594 }
1595 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1596 }
1597 /* Static locals inside of target construct or offloaded
1598 routines need to be "omp declare target". */
1599 if (TREE_STATIC (t))
1600 for (; ctx; ctx = ctx->outer_context)
1601 if ((ctx->region_type & ORT_TARGET) != 0)
1602 {
1603 if (!lookup_attribute (attr_name: "omp declare target",
1604 DECL_ATTRIBUTES (t)))
1605 {
1606 tree id = get_identifier ("omp declare target");
1607 DECL_ATTRIBUTES (t)
1608 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1609 varpool_node *node = varpool_node::get (decl: t);
1610 if (node)
1611 {
1612 node->offloadable = 1;
1613 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1614 {
1615 g->have_offload = true;
1616 if (!in_lto_p)
1617 vec_safe_push (v&: offload_vars, obj: t);
1618 }
1619 }
1620 }
1621 break;
1622 }
1623 }
1624
1625 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1626
1627 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1628 cfun->has_local_explicit_reg_vars = true;
1629 }
1630 }
1631
1632 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1633 BIND_EXPR_BLOCK (bind_expr));
1634 gimple_push_bind_expr (bind_stmt);
1635
1636 gimplify_ctxp->keep_stack = false;
1637 gimplify_ctxp->save_stack = false;
1638
1639 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1640 body = NULL;
1641 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1642 gimple_bind_set_body (bind_stmt, seq: body);
1643
1644 /* Source location wise, the cleanup code (stack_restore and clobbers)
1645 belongs to the end of the block, so propagate what we have. The
1646 stack_save operation belongs to the beginning of block, which we can
1647 infer from the bind_expr directly if the block has no explicit
1648 assignment. */
1649 if (BIND_EXPR_BLOCK (bind_expr))
1650 {
1651 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1652 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1653 }
1654 if (start_locus == 0)
1655 start_locus = EXPR_LOCATION (bind_expr);
1656
1657 cleanup = NULL;
1658 stack_save = NULL;
1659
1660 /* Add clobbers for all variables that go out of scope. */
1661 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1662 {
1663 if (VAR_P (t)
1664 && !is_global_var (t)
1665 && DECL_CONTEXT (t) == current_function_decl)
1666 {
1667 if (flag_openmp
1668 && DECL_HAS_VALUE_EXPR_P (t)
1669 && TREE_USED (t)
1670 && lookup_attribute (attr_name: "omp allocate", DECL_ATTRIBUTES (t)))
1671 {
1672 /* For Fortran, TREE_CHAIN (TREE_VALUE (attr)) is set, which
1673 causes that the GOMP_free call is already added above;
1674 and "omp allocate" is removed from DECL_ATTRIBUTES. */
1675 tree v = TREE_OPERAND (DECL_VALUE_EXPR (t), 0);
1676 tree tmp = builtin_decl_explicit (fncode: BUILT_IN_GOMP_FREE);
1677 tmp = build_call_expr_loc (end_locus, tmp, 2, v,
1678 build_zero_cst (ptr_type_node));
1679 gimplify_and_add (t: tmp, seq_p: &cleanup);
1680 gimple *clobber_stmt;
1681 tmp = build_clobber (TREE_TYPE (v), CLOBBER_STORAGE_END);
1682 clobber_stmt = gimple_build_assign (v, tmp);
1683 gimple_set_location (g: clobber_stmt, location: end_locus);
1684 gimplify_seq_add_stmt (seq_p: &cleanup, gs: clobber_stmt);
1685 }
1686 if (!DECL_HARD_REGISTER (t)
1687 && !TREE_THIS_VOLATILE (t)
1688 && !DECL_HAS_VALUE_EXPR_P (t)
1689 /* Only care for variables that have to be in memory. Others
1690 will be rewritten into SSA names, hence moved to the
1691 top-level. */
1692 && !is_gimple_reg (t)
1693 && flag_stack_reuse != SR_NONE)
1694 {
1695 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_STORAGE_END);
1696 gimple *clobber_stmt;
1697 clobber_stmt = gimple_build_assign (t, clobber);
1698 gimple_set_location (g: clobber_stmt, location: end_locus);
1699 gimplify_seq_add_stmt (seq_p: &cleanup, gs: clobber_stmt);
1700 }
1701
1702 if (flag_openacc && oacc_declare_returns != NULL)
1703 {
1704 tree key = t;
1705 if (DECL_HAS_VALUE_EXPR_P (key))
1706 {
1707 key = DECL_VALUE_EXPR (key);
1708 if (INDIRECT_REF_P (key))
1709 key = TREE_OPERAND (key, 0);
1710 }
1711 tree *c = oacc_declare_returns->get (k: key);
1712 if (c != NULL)
1713 {
1714 if (ret_clauses)
1715 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1716
1717 ret_clauses = unshare_expr (expr: *c);
1718
1719 oacc_declare_returns->remove (k: key);
1720
1721 if (oacc_declare_returns->is_empty ())
1722 {
1723 delete oacc_declare_returns;
1724 oacc_declare_returns = NULL;
1725 }
1726 }
1727 }
1728 }
1729
1730 if (asan_poisoned_variables != NULL
1731 && asan_poisoned_variables->contains (k: t))
1732 {
1733 asan_poisoned_variables->remove (k: t);
1734 asan_poison_variable (decl: t, poison: true, seq_p: &cleanup);
1735 }
1736
1737 if (gimplify_ctxp->live_switch_vars != NULL
1738 && gimplify_ctxp->live_switch_vars->contains (k: t))
1739 gimplify_ctxp->live_switch_vars->remove (k: t);
1740 }
1741
1742 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1743 the stack space allocated to the VLAs. */
1744 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1745 {
1746 gcall *stack_restore;
1747
1748 /* Save stack on entry and restore it on exit. Add a try_finally
1749 block to achieve this. */
1750 build_stack_save_restore (save: &stack_save, restore: &stack_restore);
1751
1752 gimple_set_location (g: stack_save, location: start_locus);
1753 gimple_set_location (g: stack_restore, location: end_locus);
1754
1755 gimplify_seq_add_stmt (seq_p: &cleanup, gs: stack_restore);
1756 }
1757
1758 if (ret_clauses)
1759 {
1760 gomp_target *stmt;
1761 gimple_stmt_iterator si = gsi_start (seq&: cleanup);
1762
1763 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1764 ret_clauses);
1765 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1766 }
1767
1768 if (cleanup)
1769 {
1770 gtry *gs;
1771 gimple_seq new_body;
1772
1773 new_body = NULL;
1774 gs = gimple_build_try (gimple_bind_body (gs: bind_stmt), cleanup,
1775 GIMPLE_TRY_FINALLY);
1776
1777 if (stack_save)
1778 gimplify_seq_add_stmt (seq_p: &new_body, gs: stack_save);
1779 gimplify_seq_add_stmt (seq_p: &new_body, gs);
1780 gimple_bind_set_body (bind_stmt, seq: new_body);
1781 }
1782
1783 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1784 if (!gimplify_ctxp->keep_stack)
1785 gimplify_ctxp->keep_stack = old_keep_stack;
1786 gimplify_ctxp->save_stack = old_save_stack;
1787
1788 gimple_pop_bind_expr ();
1789
1790 gimplify_seq_add_stmt (seq_p: pre_p, gs: bind_stmt);
1791
1792 if (temp)
1793 {
1794 *expr_p = temp;
1795 return GS_OK;
1796 }
1797
1798 *expr_p = NULL_TREE;
1799 return GS_ALL_DONE;
1800}
1801
1802/* Maybe add early return predict statement to PRE_P sequence. */
1803
1804static void
1805maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1806{
1807 /* If we are not in a conditional context, add PREDICT statement. */
1808 if (gimple_conditional_context ())
1809 {
1810 gimple *predict = gimple_build_predict (predictor: PRED_TREE_EARLY_RETURN,
1811 outcome: NOT_TAKEN);
1812 gimplify_seq_add_stmt (seq_p: pre_p, gs: predict);
1813 }
1814}
1815
1816/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1817 GIMPLE value, it is assigned to a new temporary and the statement is
1818 re-written to return the temporary.
1819
1820 PRE_P points to the sequence where side effects that must happen before
1821 STMT should be stored. */
1822
1823static enum gimplify_status
1824gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1825{
1826 greturn *ret;
1827 tree ret_expr = TREE_OPERAND (stmt, 0);
1828 tree result_decl, result;
1829
1830 if (ret_expr == error_mark_node)
1831 return GS_ERROR;
1832
1833 if (!ret_expr
1834 || TREE_CODE (ret_expr) == RESULT_DECL)
1835 {
1836 maybe_add_early_return_predict_stmt (pre_p);
1837 greturn *ret = gimple_build_return (ret_expr);
1838 copy_warning (ret, stmt);
1839 gimplify_seq_add_stmt (seq_p: pre_p, gs: ret);
1840 return GS_ALL_DONE;
1841 }
1842
1843 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1844 result_decl = NULL_TREE;
1845 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1846 {
1847 /* Used in C++ for handling EH cleanup of the return value if a local
1848 cleanup throws. Assume the front-end knows what it's doing. */
1849 result_decl = DECL_RESULT (current_function_decl);
1850 /* But crash if we end up trying to modify ret_expr below. */
1851 ret_expr = NULL_TREE;
1852 }
1853 else
1854 {
1855 result_decl = TREE_OPERAND (ret_expr, 0);
1856
1857 /* See through a return by reference. */
1858 if (INDIRECT_REF_P (result_decl))
1859 result_decl = TREE_OPERAND (result_decl, 0);
1860
1861 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1862 || TREE_CODE (ret_expr) == INIT_EXPR)
1863 && TREE_CODE (result_decl) == RESULT_DECL);
1864 }
1865
1866 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1867 Recall that aggregate_value_p is FALSE for any aggregate type that is
1868 returned in registers. If we're returning values in registers, then
1869 we don't want to extend the lifetime of the RESULT_DECL, particularly
1870 across another call. In addition, for those aggregates for which
1871 hard_function_value generates a PARALLEL, we'll die during normal
1872 expansion of structure assignments; there's special code in expand_return
1873 to handle this case that does not exist in expand_expr. */
1874 if (!result_decl)
1875 result = NULL_TREE;
1876 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1877 {
1878 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1879 {
1880 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1881 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1882 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1883 should be effectively allocated by the caller, i.e. all calls to
1884 this function must be subject to the Return Slot Optimization. */
1885 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1886 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1887 }
1888 result = result_decl;
1889 }
1890 else if (gimplify_ctxp->return_temp)
1891 result = gimplify_ctxp->return_temp;
1892 else
1893 {
1894 result = create_tmp_reg (TREE_TYPE (result_decl));
1895
1896 /* ??? With complex control flow (usually involving abnormal edges),
1897 we can wind up warning about an uninitialized value for this. Due
1898 to how this variable is constructed and initialized, this is never
1899 true. Give up and never warn. */
1900 suppress_warning (result, OPT_Wuninitialized);
1901
1902 gimplify_ctxp->return_temp = result;
1903 }
1904
1905 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1906 Then gimplify the whole thing. */
1907 if (result != result_decl)
1908 TREE_OPERAND (ret_expr, 0) = result;
1909
1910 gimplify_and_add (TREE_OPERAND (stmt, 0), seq_p: pre_p);
1911
1912 maybe_add_early_return_predict_stmt (pre_p);
1913 ret = gimple_build_return (result);
1914 copy_warning (ret, stmt);
1915 gimplify_seq_add_stmt (seq_p: pre_p, gs: ret);
1916
1917 return GS_ALL_DONE;
1918}
1919
1920/* Gimplify a variable-length array DECL. */
1921
1922static void
1923gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1924{
1925 /* This is a variable-sized decl. Simplify its size and mark it
1926 for deferred expansion. */
1927 tree t, addr, ptr_type;
1928
1929 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1930 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1931
1932 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1933 if (DECL_HAS_VALUE_EXPR_P (decl))
1934 return;
1935
1936 /* All occurrences of this decl in final gimplified code will be
1937 replaced by indirection. Setting DECL_VALUE_EXPR does two
1938 things: First, it lets the rest of the gimplifier know what
1939 replacement to use. Second, it lets the debug info know
1940 where to find the value. */
1941 ptr_type = build_pointer_type (TREE_TYPE (decl));
1942 addr = create_tmp_var (ptr_type, get_name (decl));
1943 DECL_IGNORED_P (addr) = 0;
1944 t = build_fold_indirect_ref (addr);
1945 TREE_THIS_NOTRAP (t) = 1;
1946 SET_DECL_VALUE_EXPR (decl, t);
1947 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1948
1949 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1950 max_int_size_in_bytes (TREE_TYPE (decl)));
1951 /* The call has been built for a variable-sized object. */
1952 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1953 t = fold_convert (ptr_type, t);
1954 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1955
1956 gimplify_and_add (t, seq_p);
1957
1958 /* Record the dynamic allocation associated with DECL if requested. */
1959 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1960 record_dynamic_alloc (decl_or_exp: decl);
1961}
1962
1963/* A helper function to be called via walk_tree. Mark all labels under *TP
1964 as being forced. To be called for DECL_INITIAL of static variables. */
1965
1966static tree
1967force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1968{
1969 if (TYPE_P (*tp))
1970 *walk_subtrees = 0;
1971 if (TREE_CODE (*tp) == LABEL_DECL)
1972 {
1973 FORCED_LABEL (*tp) = 1;
1974 cfun->has_forced_label_in_static = 1;
1975 }
1976
1977 return NULL_TREE;
1978}
1979
1980/* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1981 Build a call to internal const function DEFERRED_INIT:
1982 1st argument: SIZE of the DECL;
1983 2nd argument: INIT_TYPE;
1984 3rd argument: NAME of the DECL;
1985
1986 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1987
1988static void
1989gimple_add_init_for_auto_var (tree decl,
1990 enum auto_init_type init_type,
1991 gimple_seq *seq_p)
1992{
1993 gcc_assert (auto_var_p (decl));
1994 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1995 location_t loc = EXPR_LOCATION (decl);
1996 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1997
1998 tree init_type_node
1999 = build_int_cst (integer_type_node, (int) init_type);
2000
2001 tree decl_name = NULL_TREE;
2002 if (DECL_NAME (decl))
2003
2004 decl_name = build_string_literal (DECL_NAME (decl));
2005
2006 else
2007 {
2008 char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
2009 sprintf (s: decl_name_anonymous, format: "D.%u", DECL_UID (decl));
2010 decl_name = build_string_literal (p: decl_name_anonymous);
2011 }
2012
2013 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
2014 TREE_TYPE (decl), 3,
2015 decl_size, init_type_node,
2016 decl_name);
2017
2018 gimplify_assign (decl, call, seq_p);
2019}
2020
2021/* Generate padding initialization for automatic vairable DECL.
2022 C guarantees that brace-init with fewer initializers than members
2023 aggregate will initialize the rest of the aggregate as-if it were
2024 static initialization. In turn static initialization guarantees
2025 that padding is initialized to zero. So, we always initialize paddings
2026 to zeroes regardless INIT_TYPE.
2027 To do the padding initialization, we insert a call to
2028 __builtin_clear_padding (&decl, 0, for_auto_init = true).
2029 Note, we add an additional dummy argument for __builtin_clear_padding,
2030 'for_auto_init' to distinguish whether this call is for automatic
2031 variable initialization or not.
2032 */
2033static void
2034gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
2035 gimple_seq *seq_p)
2036{
2037 tree addr_of_decl = NULL_TREE;
2038 tree fn = builtin_decl_explicit (fncode: BUILT_IN_CLEAR_PADDING);
2039
2040 if (is_vla)
2041 {
2042 /* The temporary address variable for this vla should be
2043 created in gimplify_vla_decl. */
2044 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
2045 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
2046 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
2047 }
2048 else
2049 {
2050 mark_addressable (decl);
2051 addr_of_decl = build_fold_addr_expr (decl);
2052 }
2053
2054 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
2055 build_one_cst (TREE_TYPE (addr_of_decl)));
2056 gimplify_seq_add_stmt (seq_p, gs: call);
2057}
2058
2059/* Return true if the DECL need to be automaticly initialized by the
2060 compiler. */
2061static bool
2062is_var_need_auto_init (tree decl)
2063{
2064 if (auto_var_p (decl)
2065 && (TREE_CODE (decl) != VAR_DECL
2066 || !DECL_HARD_REGISTER (decl))
2067 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2068 && (!lookup_attribute (attr_name: "uninitialized", DECL_ATTRIBUTES (decl)))
2069 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
2070 && !is_empty_type (TREE_TYPE (decl)))
2071 return true;
2072 return false;
2073}
2074
2075/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
2076 and initialization explicit. */
2077
2078static enum gimplify_status
2079gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
2080{
2081 tree stmt = *stmt_p;
2082 tree decl = DECL_EXPR_DECL (stmt);
2083
2084 *stmt_p = NULL_TREE;
2085
2086 if (TREE_TYPE (decl) == error_mark_node)
2087 return GS_ERROR;
2088
2089 if ((TREE_CODE (decl) == TYPE_DECL
2090 || VAR_P (decl))
2091 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
2092 {
2093 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
2094 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
2095 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
2096 }
2097
2098 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
2099 in case its size expressions contain problematic nodes like CALL_EXPR. */
2100 if (TREE_CODE (decl) == TYPE_DECL
2101 && DECL_ORIGINAL_TYPE (decl)
2102 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
2103 {
2104 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
2105 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
2106 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
2107 }
2108
2109 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
2110 {
2111 tree init = DECL_INITIAL (decl);
2112 bool is_vla = false;
2113 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
2114 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
2115 If the decl has VALUE_EXPR that was created by FE (usually
2116 C++FE), it's a proxy varaible, and FE already initialized
2117 the VALUE_EXPR of it, we should not initialize it anymore. */
2118 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
2119
2120 poly_uint64 size;
2121 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), value: &size)
2122 || (!TREE_STATIC (decl)
2123 && flag_stack_check == GENERIC_STACK_CHECK
2124 && maybe_gt (size,
2125 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
2126 {
2127 gimplify_vla_decl (decl, seq_p);
2128 is_vla = true;
2129 }
2130
2131 if (asan_poisoned_variables
2132 && !is_vla
2133 && TREE_ADDRESSABLE (decl)
2134 && !TREE_STATIC (decl)
2135 && !DECL_HAS_VALUE_EXPR_P (decl)
2136 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
2137 && dbg_cnt (index: asan_use_after_scope)
2138 && !gimplify_omp_ctxp
2139 /* GNAT introduces temporaries to hold return values of calls in
2140 initializers of variables defined in other units, so the
2141 declaration of the variable is discarded completely. We do not
2142 want to issue poison calls for such dropped variables. */
2143 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
2144 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
2145 {
2146 asan_poisoned_variables->add (k: decl);
2147 asan_poison_variable (decl, poison: false, seq_p);
2148 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
2149 gimplify_ctxp->live_switch_vars->add (k: decl);
2150 }
2151
2152 /* Some front ends do not explicitly declare all anonymous
2153 artificial variables. We compensate here by declaring the
2154 variables, though it would be better if the front ends would
2155 explicitly declare them. */
2156 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
2157 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
2158 gimple_add_tmp_var (tmp: decl);
2159
2160 if (init && init != error_mark_node)
2161 {
2162 if (!TREE_STATIC (decl))
2163 {
2164 DECL_INITIAL (decl) = NULL_TREE;
2165 init = build2 (INIT_EXPR, void_type_node, decl, init);
2166 gimplify_and_add (t: init, seq_p);
2167 ggc_free (init);
2168 /* Clear TREE_READONLY if we really have an initialization. */
2169 if (!DECL_INITIAL (decl)
2170 && !omp_privatize_by_reference (decl))
2171 TREE_READONLY (decl) = 0;
2172 }
2173 else
2174 /* We must still examine initializers for static variables
2175 as they may contain a label address. */
2176 walk_tree (&init, force_labels_r, NULL, NULL);
2177 }
2178 /* When there is no explicit initializer, if the user requested,
2179 We should insert an artifical initializer for this automatic
2180 variable. */
2181 else if (is_var_need_auto_init (decl)
2182 && !decl_had_value_expr_p)
2183 {
2184 gimple_add_init_for_auto_var (decl,
2185 flag_auto_var_init,
2186 seq_p);
2187 /* The expanding of a call to the above .DEFERRED_INIT will apply
2188 block initialization to the whole space covered by this variable.
2189 As a result, all the paddings will be initialized to zeroes
2190 for zero initialization and 0xFE byte-repeatable patterns for
2191 pattern initialization.
2192 In order to make the paddings as zeroes for pattern init, We
2193 should add a call to __builtin_clear_padding to clear the
2194 paddings to zero in compatiple with CLANG.
2195 We cannot insert this call if the variable is a gimple register
2196 since __builtin_clear_padding will take the address of the
2197 variable. As a result, if a long double/_Complex long double
2198 variable will spilled into stack later, its padding is 0XFE. */
2199 if (flag_auto_var_init == AUTO_INIT_PATTERN
2200 && !is_gimple_reg (decl)
2201 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
2202 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
2203 }
2204 }
2205
2206 return GS_ALL_DONE;
2207}
2208
2209/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2210 and replacing the LOOP_EXPR with goto, but if the loop contains an
2211 EXIT_EXPR, we need to append a label for it to jump to. */
2212
2213static enum gimplify_status
2214gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
2215{
2216 tree saved_label = gimplify_ctxp->exit_label;
2217 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
2218
2219 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_label (label: start_label));
2220
2221 gimplify_ctxp->exit_label = NULL_TREE;
2222
2223 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), seq_p: pre_p);
2224
2225 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_goto (dest: start_label));
2226
2227 if (gimplify_ctxp->exit_label)
2228 gimplify_seq_add_stmt (seq_p: pre_p,
2229 gs: gimple_build_label (label: gimplify_ctxp->exit_label));
2230
2231 gimplify_ctxp->exit_label = saved_label;
2232
2233 *expr_p = NULL;
2234 return GS_ALL_DONE;
2235}
2236
2237/* Gimplify a statement list onto a sequence. These may be created either
2238 by an enlightened front-end, or by shortcut_cond_expr. */
2239
2240static enum gimplify_status
2241gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2242{
2243 tree temp = voidify_wrapper_expr (wrapper: *expr_p, NULL);
2244
2245 tree_stmt_iterator i = tsi_start (t: *expr_p);
2246
2247 while (!tsi_end_p (i))
2248 {
2249 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2250 tsi_delink (&i);
2251 }
2252
2253 if (temp)
2254 {
2255 *expr_p = temp;
2256 return GS_OK;
2257 }
2258
2259 return GS_ALL_DONE;
2260}
2261
2262
2263/* Emit warning for the unreachable statment STMT if needed.
2264 Return the gimple itself when the warning is emitted, otherwise
2265 return NULL. */
2266static gimple *
2267emit_warn_switch_unreachable (gimple *stmt)
2268{
2269 if (gimple_code (g: stmt) == GIMPLE_GOTO
2270 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2271 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2272 /* Don't warn for compiler-generated gotos. These occur
2273 in Duff's devices, for example. */
2274 return NULL;
2275 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2276 && ((gimple_call_internal_p (gs: stmt, fn: IFN_DEFERRED_INIT))
2277 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2278 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2279 || (is_gimple_assign (gs: stmt)
2280 && gimple_assign_single_p (gs: stmt)
2281 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2282 && gimple_call_internal_p (
2283 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2284 fn: IFN_DEFERRED_INIT))))
2285 /* Don't warn for compiler-generated initializations for
2286 -ftrivial-auto-var-init.
2287 There are 3 cases:
2288 case 1: a call to .DEFERRED_INIT;
2289 case 2: a call to __builtin_clear_padding with the 2nd argument is
2290 present and non-zero;
2291 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2292 that has the LHS of .DEFERRED_INIT as the RHS as following:
2293 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2294 i1 = _1. */
2295 return NULL;
2296 else
2297 warning_at (gimple_location (g: stmt), OPT_Wswitch_unreachable,
2298 "statement will never be executed");
2299 return stmt;
2300}
2301
2302/* Callback for walk_gimple_seq. */
2303
2304static tree
2305warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2306 bool *handled_ops_p,
2307 struct walk_stmt_info *wi)
2308{
2309 gimple *stmt = gsi_stmt (i: *gsi_p);
2310 bool unreachable_issued = wi->info != NULL;
2311
2312 *handled_ops_p = true;
2313 switch (gimple_code (g: stmt))
2314 {
2315 case GIMPLE_TRY:
2316 /* A compiler-generated cleanup or a user-written try block.
2317 If it's empty, don't dive into it--that would result in
2318 worse location info. */
2319 if (gimple_try_eval (gs: stmt) == NULL)
2320 {
2321 if (warn_switch_unreachable && !unreachable_issued)
2322 wi->info = emit_warn_switch_unreachable (stmt);
2323
2324 /* Stop when auto var init warning is not on. */
2325 if (!warn_trivial_auto_var_init)
2326 return integer_zero_node;
2327 }
2328 /* Fall through. */
2329 case GIMPLE_BIND:
2330 case GIMPLE_CATCH:
2331 case GIMPLE_EH_FILTER:
2332 case GIMPLE_TRANSACTION:
2333 /* Walk the sub-statements. */
2334 *handled_ops_p = false;
2335 break;
2336
2337 case GIMPLE_DEBUG:
2338 /* Ignore these. We may generate them before declarations that
2339 are never executed. If there's something to warn about,
2340 there will be non-debug stmts too, and we'll catch those. */
2341 break;
2342
2343 case GIMPLE_LABEL:
2344 /* Stop till the first Label. */
2345 return integer_zero_node;
2346 case GIMPLE_CALL:
2347 if (gimple_call_internal_p (gs: stmt, fn: IFN_ASAN_MARK))
2348 {
2349 *handled_ops_p = false;
2350 break;
2351 }
2352 if (warn_trivial_auto_var_init
2353 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2354 && gimple_call_internal_p (gs: stmt, fn: IFN_DEFERRED_INIT))
2355 {
2356 /* Get the variable name from the 3rd argument of call. */
2357 tree var_name = gimple_call_arg (gs: stmt, index: 2);
2358 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2359 const char *var_name_str = TREE_STRING_POINTER (var_name);
2360
2361 warning_at (gimple_location (g: stmt), OPT_Wtrivial_auto_var_init,
2362 "%qs cannot be initialized with"
2363 "%<-ftrivial-auto-var_init%>",
2364 var_name_str);
2365 break;
2366 }
2367
2368 /* Fall through. */
2369 default:
2370 /* check the first "real" statement (not a decl/lexical scope/...), issue
2371 warning if needed. */
2372 if (warn_switch_unreachable && !unreachable_issued)
2373 wi->info = emit_warn_switch_unreachable (stmt);
2374 /* Stop when auto var init warning is not on. */
2375 if (!warn_trivial_auto_var_init)
2376 return integer_zero_node;
2377 break;
2378 }
2379 return NULL_TREE;
2380}
2381
2382
2383/* Possibly warn about unreachable statements between switch's controlling
2384 expression and the first case. Also warn about -ftrivial-auto-var-init
2385 cannot initialize the auto variable under such situation.
2386 SEQ is the body of a switch expression. */
2387
2388static void
2389maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2390{
2391 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2392 /* This warning doesn't play well with Fortran when optimizations
2393 are on. */
2394 || lang_GNU_Fortran ()
2395 || seq == NULL)
2396 return;
2397
2398 struct walk_stmt_info wi;
2399
2400 memset (s: &wi, c: 0, n: sizeof (wi));
2401 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2402}
2403
2404
2405/* A label entry that pairs label and a location. */
2406struct label_entry
2407{
2408 tree label;
2409 location_t loc;
2410};
2411
2412/* Find LABEL in vector of label entries VEC. */
2413
2414static struct label_entry *
2415find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2416{
2417 unsigned int i;
2418 struct label_entry *l;
2419
2420 FOR_EACH_VEC_ELT (*vec, i, l)
2421 if (l->label == label)
2422 return l;
2423 return NULL;
2424}
2425
2426/* Return true if LABEL, a LABEL_DECL, represents a case label
2427 in a vector of labels CASES. */
2428
2429static bool
2430case_label_p (const vec<tree> *cases, tree label)
2431{
2432 unsigned int i;
2433 tree l;
2434
2435 FOR_EACH_VEC_ELT (*cases, i, l)
2436 if (CASE_LABEL (l) == label)
2437 return true;
2438 return false;
2439}
2440
2441/* Find the last nondebug statement in a scope STMT. */
2442
2443static gimple *
2444last_stmt_in_scope (gimple *stmt)
2445{
2446 if (!stmt)
2447 return NULL;
2448
2449 switch (gimple_code (g: stmt))
2450 {
2451 case GIMPLE_BIND:
2452 {
2453 gbind *bind = as_a <gbind *> (p: stmt);
2454 stmt = gimple_seq_last_nondebug_stmt (s: gimple_bind_body (gs: bind));
2455 return last_stmt_in_scope (stmt);
2456 }
2457
2458 case GIMPLE_TRY:
2459 {
2460 gtry *try_stmt = as_a <gtry *> (p: stmt);
2461 stmt = gimple_seq_last_nondebug_stmt (s: gimple_try_eval (gs: try_stmt));
2462 gimple *last_eval = last_stmt_in_scope (stmt);
2463 if (gimple_stmt_may_fallthru (last_eval)
2464 && (last_eval == NULL
2465 || !gimple_call_internal_p (gs: last_eval, fn: IFN_FALLTHROUGH))
2466 && gimple_try_kind (gs: try_stmt) == GIMPLE_TRY_FINALLY)
2467 {
2468 stmt = gimple_seq_last_nondebug_stmt (s: gimple_try_cleanup (gs: try_stmt));
2469 return last_stmt_in_scope (stmt);
2470 }
2471 else
2472 return last_eval;
2473 }
2474
2475 case GIMPLE_DEBUG:
2476 gcc_unreachable ();
2477
2478 default:
2479 return stmt;
2480 }
2481}
2482
2483/* Collect labels that may fall through into LABELS and return the statement
2484 preceding another case label, or a user-defined label. Store a location
2485 useful to give warnings at *PREVLOC (usually the location of the returned
2486 statement or of its surrounding scope). */
2487
2488static gimple *
2489collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2490 auto_vec <struct label_entry> *labels,
2491 location_t *prevloc)
2492{
2493 gimple *prev = NULL;
2494
2495 *prevloc = UNKNOWN_LOCATION;
2496 do
2497 {
2498 if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_BIND)
2499 {
2500 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2501 which starts on a GIMPLE_SWITCH and ends with a break label.
2502 Handle that as a single statement that can fall through. */
2503 gbind *bind = as_a <gbind *> (p: gsi_stmt (i: *gsi_p));
2504 gimple *first = gimple_seq_first_stmt (s: gimple_bind_body (gs: bind));
2505 gimple *last = gimple_seq_last_stmt (s: gimple_bind_body (gs: bind));
2506 if (last
2507 && gimple_code (g: first) == GIMPLE_SWITCH
2508 && gimple_code (g: last) == GIMPLE_LABEL)
2509 {
2510 tree label = gimple_label_label (gs: as_a <glabel *> (p: last));
2511 if (SWITCH_BREAK_LABEL_P (label))
2512 {
2513 prev = bind;
2514 gsi_next (i: gsi_p);
2515 continue;
2516 }
2517 }
2518 }
2519 if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_BIND
2520 || gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_TRY)
2521 {
2522 /* Nested scope. Only look at the last statement of
2523 the innermost scope. */
2524 location_t bind_loc = gimple_location (g: gsi_stmt (i: *gsi_p));
2525 gimple *last = last_stmt_in_scope (stmt: gsi_stmt (i: *gsi_p));
2526 if (last)
2527 {
2528 prev = last;
2529 /* It might be a label without a location. Use the
2530 location of the scope then. */
2531 if (!gimple_has_location (g: prev))
2532 *prevloc = bind_loc;
2533 }
2534 gsi_next (i: gsi_p);
2535 continue;
2536 }
2537
2538 /* Ifs are tricky. */
2539 if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_COND)
2540 {
2541 gcond *cond_stmt = as_a <gcond *> (p: gsi_stmt (i: *gsi_p));
2542 tree false_lab = gimple_cond_false_label (gs: cond_stmt);
2543 location_t if_loc = gimple_location (g: cond_stmt);
2544
2545 /* If we have e.g.
2546 if (i > 1) goto <D.2259>; else goto D;
2547 we can't do much with the else-branch. */
2548 if (!DECL_ARTIFICIAL (false_lab))
2549 break;
2550
2551 /* Go on until the false label, then one step back. */
2552 for (; !gsi_end_p (i: *gsi_p); gsi_next (i: gsi_p))
2553 {
2554 gimple *stmt = gsi_stmt (i: *gsi_p);
2555 if (gimple_code (g: stmt) == GIMPLE_LABEL
2556 && gimple_label_label (gs: as_a <glabel *> (p: stmt)) == false_lab)
2557 break;
2558 }
2559
2560 /* Not found? Oops. */
2561 if (gsi_end_p (i: *gsi_p))
2562 break;
2563
2564 /* A dead label can't fall through. */
2565 if (!UNUSED_LABEL_P (false_lab))
2566 {
2567 struct label_entry l = { .label: false_lab, .loc: if_loc };
2568 labels->safe_push (obj: l);
2569 }
2570
2571 /* Go to the last statement of the then branch. */
2572 gsi_prev (i: gsi_p);
2573
2574 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2575 <D.1759>:
2576 <stmt>;
2577 goto <D.1761>;
2578 <D.1760>:
2579 */
2580 if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_GOTO
2581 && !gimple_has_location (g: gsi_stmt (i: *gsi_p)))
2582 {
2583 /* Look at the statement before, it might be
2584 attribute fallthrough, in which case don't warn. */
2585 gsi_prev (i: gsi_p);
2586 bool fallthru_before_dest
2587 = gimple_call_internal_p (gs: gsi_stmt (i: *gsi_p), fn: IFN_FALLTHROUGH);
2588 gsi_next (i: gsi_p);
2589 tree goto_dest = gimple_goto_dest (gs: gsi_stmt (i: *gsi_p));
2590 if (!fallthru_before_dest)
2591 {
2592 struct label_entry l = { .label: goto_dest, .loc: if_loc };
2593 labels->safe_push (obj: l);
2594 }
2595 }
2596 /* This case is about
2597 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2598 <D.2022>:
2599 n = n + 1; // #1
2600 <D.2023>: // #2
2601 <D.1988>: // #3
2602 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2603 through to #3. So set PREV to #1. */
2604 else if (UNUSED_LABEL_P (false_lab))
2605 prev = gsi_stmt (i: *gsi_p);
2606
2607 /* And move back. */
2608 gsi_next (i: gsi_p);
2609 }
2610
2611 /* Remember the last statement. Skip labels that are of no interest
2612 to us. */
2613 if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_LABEL)
2614 {
2615 tree label = gimple_label_label (gs: as_a <glabel *> (p: gsi_stmt (i: *gsi_p)));
2616 if (find_label_entry (vec: labels, label))
2617 prev = gsi_stmt (i: *gsi_p);
2618 }
2619 else if (gimple_call_internal_p (gs: gsi_stmt (i: *gsi_p), fn: IFN_ASAN_MARK))
2620 ;
2621 else if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_PREDICT)
2622 ;
2623 else if (!is_gimple_debug (gs: gsi_stmt (i: *gsi_p)))
2624 prev = gsi_stmt (i: *gsi_p);
2625 gsi_next (i: gsi_p);
2626 }
2627 while (!gsi_end_p (i: *gsi_p)
2628 /* Stop if we find a case or a user-defined label. */
2629 && (gimple_code (g: gsi_stmt (i: *gsi_p)) != GIMPLE_LABEL
2630 || !gimple_has_location (g: gsi_stmt (i: *gsi_p))));
2631
2632 if (prev && gimple_has_location (g: prev))
2633 *prevloc = gimple_location (g: prev);
2634 return prev;
2635}
2636
2637/* Return true if the switch fallthough warning should occur. LABEL is
2638 the label statement that we're falling through to. */
2639
2640static bool
2641should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2642{
2643 gimple_stmt_iterator gsi = *gsi_p;
2644
2645 /* Don't warn if the label is marked with a "falls through" comment. */
2646 if (FALLTHROUGH_LABEL_P (label))
2647 return false;
2648
2649 /* Don't warn for non-case labels followed by a statement:
2650 case 0:
2651 foo ();
2652 label:
2653 bar ();
2654 as these are likely intentional. */
2655 if (!case_label_p (cases: &gimplify_ctxp->case_labels, label))
2656 {
2657 tree l;
2658 while (!gsi_end_p (i: gsi)
2659 && gimple_code (g: gsi_stmt (i: gsi)) == GIMPLE_LABEL
2660 && (l = gimple_label_label (gs: as_a <glabel *> (p: gsi_stmt (i: gsi))))
2661 && !case_label_p (cases: &gimplify_ctxp->case_labels, label: l))
2662 gsi_next_nondebug (i: &gsi);
2663 if (gsi_end_p (i: gsi) || gimple_code (g: gsi_stmt (i: gsi)) != GIMPLE_LABEL)
2664 return false;
2665 }
2666
2667 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2668 immediately breaks. */
2669 gsi = *gsi_p;
2670
2671 /* Skip all immediately following labels. */
2672 while (!gsi_end_p (i: gsi)
2673 && (gimple_code (g: gsi_stmt (i: gsi)) == GIMPLE_LABEL
2674 || gimple_code (g: gsi_stmt (i: gsi)) == GIMPLE_PREDICT))
2675 gsi_next_nondebug (i: &gsi);
2676
2677 /* { ... something; default:; } */
2678 if (gsi_end_p (i: gsi)
2679 /* { ... something; default: break; } or
2680 { ... something; default: goto L; } */
2681 || gimple_code (g: gsi_stmt (i: gsi)) == GIMPLE_GOTO
2682 /* { ... something; default: return; } */
2683 || gimple_code (g: gsi_stmt (i: gsi)) == GIMPLE_RETURN)
2684 return false;
2685
2686 return true;
2687}
2688
2689/* Callback for walk_gimple_seq. */
2690
2691static tree
2692warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2693 struct walk_stmt_info *)
2694{
2695 gimple *stmt = gsi_stmt (i: *gsi_p);
2696
2697 *handled_ops_p = true;
2698 switch (gimple_code (g: stmt))
2699 {
2700 case GIMPLE_TRY:
2701 case GIMPLE_BIND:
2702 case GIMPLE_CATCH:
2703 case GIMPLE_EH_FILTER:
2704 case GIMPLE_TRANSACTION:
2705 /* Walk the sub-statements. */
2706 *handled_ops_p = false;
2707 break;
2708
2709 /* Find a sequence of form:
2710
2711 GIMPLE_LABEL
2712 [...]
2713 <may fallthru stmt>
2714 GIMPLE_LABEL
2715
2716 and possibly warn. */
2717 case GIMPLE_LABEL:
2718 {
2719 /* Found a label. Skip all immediately following labels. */
2720 while (!gsi_end_p (i: *gsi_p)
2721 && gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_LABEL)
2722 gsi_next_nondebug (i: gsi_p);
2723
2724 /* There might be no more statements. */
2725 if (gsi_end_p (i: *gsi_p))
2726 return integer_zero_node;
2727
2728 /* Vector of labels that fall through. */
2729 auto_vec <struct label_entry> labels;
2730 location_t prevloc;
2731 gimple *prev = collect_fallthrough_labels (gsi_p, labels: &labels, prevloc: &prevloc);
2732
2733 /* There might be no more statements. */
2734 if (gsi_end_p (i: *gsi_p))
2735 return integer_zero_node;
2736
2737 gimple *next = gsi_stmt (i: *gsi_p);
2738 tree label;
2739 /* If what follows is a label, then we may have a fallthrough. */
2740 if (gimple_code (g: next) == GIMPLE_LABEL
2741 && gimple_has_location (g: next)
2742 && (label = gimple_label_label (gs: as_a <glabel *> (p: next)))
2743 && prev != NULL)
2744 {
2745 struct label_entry *l;
2746 bool warned_p = false;
2747 auto_diagnostic_group d;
2748 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2749 /* Quiet. */;
2750 else if (gimple_code (g: prev) == GIMPLE_LABEL
2751 && (label = gimple_label_label (gs: as_a <glabel *> (p: prev)))
2752 && (l = find_label_entry (vec: &labels, label)))
2753 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2754 "this statement may fall through");
2755 else if (!gimple_call_internal_p (gs: prev, fn: IFN_FALLTHROUGH)
2756 /* Try to be clever and don't warn when the statement
2757 can't actually fall through. */
2758 && gimple_stmt_may_fallthru (prev)
2759 && prevloc != UNKNOWN_LOCATION)
2760 warned_p = warning_at (prevloc,
2761 OPT_Wimplicit_fallthrough_,
2762 "this statement may fall through");
2763 if (warned_p)
2764 inform (gimple_location (g: next), "here");
2765
2766 /* Mark this label as processed so as to prevent multiple
2767 warnings in nested switches. */
2768 FALLTHROUGH_LABEL_P (label) = true;
2769
2770 /* So that next warn_implicit_fallthrough_r will start looking for
2771 a new sequence starting with this label. */
2772 gsi_prev (i: gsi_p);
2773 }
2774 }
2775 break;
2776 default:
2777 break;
2778 }
2779 return NULL_TREE;
2780}
2781
2782/* Warn when a switch case falls through. */
2783
2784static void
2785maybe_warn_implicit_fallthrough (gimple_seq seq)
2786{
2787 if (!warn_implicit_fallthrough)
2788 return;
2789
2790 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2791 if (!(lang_GNU_C ()
2792 || lang_GNU_CXX ()
2793 || lang_GNU_OBJC ()))
2794 return;
2795
2796 struct walk_stmt_info wi;
2797 memset (s: &wi, c: 0, n: sizeof (wi));
2798 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2799}
2800
2801/* Callback for walk_gimple_seq. */
2802
2803static tree
2804expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2805 struct walk_stmt_info *wi)
2806{
2807 gimple *stmt = gsi_stmt (i: *gsi_p);
2808
2809 *handled_ops_p = true;
2810 switch (gimple_code (g: stmt))
2811 {
2812 case GIMPLE_TRY:
2813 case GIMPLE_BIND:
2814 case GIMPLE_CATCH:
2815 case GIMPLE_EH_FILTER:
2816 case GIMPLE_TRANSACTION:
2817 /* Walk the sub-statements. */
2818 *handled_ops_p = false;
2819 break;
2820 case GIMPLE_CALL:
2821 static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
2822 if (gimple_call_internal_p (gs: stmt, fn: IFN_FALLTHROUGH))
2823 {
2824 location_t loc = gimple_location (g: stmt);
2825 gsi_remove (gsi_p, true);
2826 wi->removed_stmt = true;
2827
2828 /* nothrow flag is added by genericize_c_loop to mark fallthrough
2829 statement at the end of some loop's body. Those should be
2830 always diagnosed, either because they indeed don't precede
2831 a case label or default label, or because the next statement
2832 is not within the same iteration statement. */
2833 if ((stmt->subcode & GF_CALL_NOTHROW) != 0)
2834 {
2835 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2836 "a case label or default label");
2837 break;
2838 }
2839
2840 if (gsi_end_p (i: *gsi_p))
2841 {
2842 static_cast<location_t *>(wi->info)[0] = BUILTINS_LOCATION;
2843 static_cast<location_t *>(wi->info)[1] = loc;
2844 break;
2845 }
2846
2847 bool found = false;
2848
2849 gimple_stmt_iterator gsi2 = *gsi_p;
2850 stmt = gsi_stmt (i: gsi2);
2851 if (gimple_code (g: stmt) == GIMPLE_GOTO && !gimple_has_location (g: stmt))
2852 {
2853 /* Go on until the artificial label. */
2854 tree goto_dest = gimple_goto_dest (gs: stmt);
2855 for (; !gsi_end_p (i: gsi2); gsi_next (i: &gsi2))
2856 {
2857 if (gimple_code (g: gsi_stmt (i: gsi2)) == GIMPLE_LABEL
2858 && gimple_label_label (gs: as_a <glabel *> (p: gsi_stmt (i: gsi2)))
2859 == goto_dest)
2860 break;
2861 }
2862
2863 /* Not found? Stop. */
2864 if (gsi_end_p (i: gsi2))
2865 break;
2866
2867 /* Look one past it. */
2868 gsi_next (i: &gsi2);
2869 }
2870
2871 /* We're looking for a case label or default label here. */
2872 while (!gsi_end_p (i: gsi2))
2873 {
2874 stmt = gsi_stmt (i: gsi2);
2875 if (gimple_code (g: stmt) == GIMPLE_LABEL)
2876 {
2877 tree label = gimple_label_label (gs: as_a <glabel *> (p: stmt));
2878 if (gimple_has_location (g: stmt) && DECL_ARTIFICIAL (label))
2879 {
2880 found = true;
2881 break;
2882 }
2883 }
2884 else if (gimple_call_internal_p (gs: stmt, fn: IFN_ASAN_MARK))
2885 ;
2886 else if (!is_gimple_debug (gs: stmt))
2887 /* Anything else is not expected. */
2888 break;
2889 gsi_next (i: &gsi2);
2890 }
2891 if (!found)
2892 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2893 "a case label or default label");
2894 }
2895 break;
2896 default:
2897 static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
2898 break;
2899 }
2900 return NULL_TREE;
2901}
2902
2903/* Expand all FALLTHROUGH () calls in SEQ. */
2904
2905static void
2906expand_FALLTHROUGH (gimple_seq *seq_p)
2907{
2908 struct walk_stmt_info wi;
2909 location_t loc[2];
2910 memset (s: &wi, c: 0, n: sizeof (wi));
2911 loc[0] = UNKNOWN_LOCATION;
2912 loc[1] = UNKNOWN_LOCATION;
2913 wi.info = (void *) &loc[0];
2914 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2915 if (loc[0] != UNKNOWN_LOCATION)
2916 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2917 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2918 pedwarn (loc[1], 0, "attribute %<fallthrough%> not preceding "
2919 "a case label or default label");
2920}
2921
2922
2923/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2924 branch to. */
2925
2926static enum gimplify_status
2927gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2928{
2929 tree switch_expr = *expr_p;
2930 gimple_seq switch_body_seq = NULL;
2931 enum gimplify_status ret;
2932 tree index_type = TREE_TYPE (switch_expr);
2933 if (index_type == NULL_TREE)
2934 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2935
2936 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2937 fb_rvalue);
2938 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2939 return ret;
2940
2941 if (SWITCH_BODY (switch_expr))
2942 {
2943 vec<tree> labels;
2944 vec<tree> saved_labels;
2945 hash_set<tree> *saved_live_switch_vars = NULL;
2946 tree default_case = NULL_TREE;
2947 gswitch *switch_stmt;
2948
2949 /* Save old labels, get new ones from body, then restore the old
2950 labels. Save all the things from the switch body to append after. */
2951 saved_labels = gimplify_ctxp->case_labels;
2952 gimplify_ctxp->case_labels.create (nelems: 8);
2953
2954 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2955 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2956 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2957 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2958 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2959 else
2960 gimplify_ctxp->live_switch_vars = NULL;
2961
2962 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2963 gimplify_ctxp->in_switch_expr = true;
2964
2965 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2966
2967 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2968 maybe_warn_switch_unreachable_and_auto_init (seq: switch_body_seq);
2969 maybe_warn_implicit_fallthrough (seq: switch_body_seq);
2970 /* Only do this for the outermost GIMPLE_SWITCH. */
2971 if (!gimplify_ctxp->in_switch_expr)
2972 expand_FALLTHROUGH (seq_p: &switch_body_seq);
2973
2974 labels = gimplify_ctxp->case_labels;
2975 gimplify_ctxp->case_labels = saved_labels;
2976
2977 if (gimplify_ctxp->live_switch_vars)
2978 {
2979 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2980 delete gimplify_ctxp->live_switch_vars;
2981 }
2982 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2983
2984 preprocess_case_label_vec_for_gimple (labels, index_type,
2985 &default_case);
2986
2987 bool add_bind = false;
2988 if (!default_case)
2989 {
2990 glabel *new_default;
2991
2992 default_case
2993 = build_case_label (NULL_TREE, NULL_TREE,
2994 create_artificial_label (UNKNOWN_LOCATION));
2995 if (old_in_switch_expr)
2996 {
2997 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2998 add_bind = true;
2999 }
3000 new_default = gimple_build_label (CASE_LABEL (default_case));
3001 gimplify_seq_add_stmt (seq_p: &switch_body_seq, gs: new_default);
3002 }
3003 else if (old_in_switch_expr)
3004 {
3005 gimple *last = gimple_seq_last_stmt (s: switch_body_seq);
3006 if (last && gimple_code (g: last) == GIMPLE_LABEL)
3007 {
3008 tree label = gimple_label_label (gs: as_a <glabel *> (p: last));
3009 if (SWITCH_BREAK_LABEL_P (label))
3010 add_bind = true;
3011 }
3012 }
3013
3014 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
3015 default_case, labels);
3016 gimple_set_location (g: switch_stmt, EXPR_LOCATION (switch_expr));
3017 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
3018 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
3019 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
3020 so that we can easily find the start and end of the switch
3021 statement. */
3022 if (add_bind)
3023 {
3024 gimple_seq bind_body = NULL;
3025 gimplify_seq_add_stmt (seq_p: &bind_body, gs: switch_stmt);
3026 gimple_seq_add_seq (&bind_body, switch_body_seq);
3027 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
3028 gimple_set_location (g: bind, EXPR_LOCATION (switch_expr));
3029 gimplify_seq_add_stmt (seq_p: pre_p, gs: bind);
3030 }
3031 else
3032 {
3033 gimplify_seq_add_stmt (seq_p: pre_p, gs: switch_stmt);
3034 gimplify_seq_add_seq (dst_p: pre_p, src: switch_body_seq);
3035 }
3036 labels.release ();
3037 }
3038 else
3039 gcc_unreachable ();
3040
3041 return GS_ALL_DONE;
3042}
3043
3044/* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
3045
3046static enum gimplify_status
3047gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
3048{
3049 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
3050 == current_function_decl);
3051
3052 tree label = LABEL_EXPR_LABEL (*expr_p);
3053 glabel *label_stmt = gimple_build_label (label);
3054 gimple_set_location (g: label_stmt, EXPR_LOCATION (*expr_p));
3055 gimplify_seq_add_stmt (seq_p: pre_p, gs: label_stmt);
3056
3057 if (lookup_attribute (attr_name: "cold", DECL_ATTRIBUTES (label)))
3058 gimple_seq_add_stmt (pre_p, gimple_build_predict (predictor: PRED_COLD_LABEL,
3059 outcome: NOT_TAKEN));
3060 else if (lookup_attribute (attr_name: "hot", DECL_ATTRIBUTES (label)))
3061 gimple_seq_add_stmt (pre_p, gimple_build_predict (predictor: PRED_HOT_LABEL,
3062 outcome: TAKEN));
3063
3064 return GS_ALL_DONE;
3065}
3066
3067/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
3068
3069static enum gimplify_status
3070gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
3071{
3072 struct gimplify_ctx *ctxp;
3073 glabel *label_stmt;
3074
3075 /* Invalid programs can play Duff's Device type games with, for example,
3076 #pragma omp parallel. At least in the C front end, we don't
3077 detect such invalid branches until after gimplification, in the
3078 diagnose_omp_blocks pass. */
3079 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
3080 if (ctxp->case_labels.exists ())
3081 break;
3082
3083 tree label = CASE_LABEL (*expr_p);
3084 label_stmt = gimple_build_label (label);
3085 gimple_set_location (g: label_stmt, EXPR_LOCATION (*expr_p));
3086 ctxp->case_labels.safe_push (obj: *expr_p);
3087 gimplify_seq_add_stmt (seq_p: pre_p, gs: label_stmt);
3088
3089 if (lookup_attribute (attr_name: "cold", DECL_ATTRIBUTES (label)))
3090 gimple_seq_add_stmt (pre_p, gimple_build_predict (predictor: PRED_COLD_LABEL,
3091 outcome: NOT_TAKEN));
3092 else if (lookup_attribute (attr_name: "hot", DECL_ATTRIBUTES (label)))
3093 gimple_seq_add_stmt (pre_p, gimple_build_predict (predictor: PRED_HOT_LABEL,
3094 outcome: TAKEN));
3095
3096 return GS_ALL_DONE;
3097}
3098
3099/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
3100 if necessary. */
3101
3102tree
3103build_and_jump (tree *label_p)
3104{
3105 if (label_p == NULL)
3106 /* If there's nowhere to jump, just fall through. */
3107 return NULL_TREE;
3108
3109 if (*label_p == NULL_TREE)
3110 {
3111 tree label = create_artificial_label (UNKNOWN_LOCATION);
3112 *label_p = label;
3113 }
3114
3115 return build1 (GOTO_EXPR, void_type_node, *label_p);
3116}
3117
3118/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
3119 This also involves building a label to jump to and communicating it to
3120 gimplify_loop_expr through gimplify_ctxp->exit_label. */
3121
3122static enum gimplify_status
3123gimplify_exit_expr (tree *expr_p)
3124{
3125 tree cond = TREE_OPERAND (*expr_p, 0);
3126 tree expr;
3127
3128 expr = build_and_jump (label_p: &gimplify_ctxp->exit_label);
3129 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
3130 *expr_p = expr;
3131
3132 return GS_OK;
3133}
3134
3135/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
3136 different from its canonical type, wrap the whole thing inside a
3137 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
3138 type.
3139
3140 The canonical type of a COMPONENT_REF is the type of the field being
3141 referenced--unless the field is a bit-field which can be read directly
3142 in a smaller mode, in which case the canonical type is the
3143 sign-appropriate type corresponding to that mode. */
3144
3145static void
3146canonicalize_component_ref (tree *expr_p)
3147{
3148 tree expr = *expr_p;
3149 tree type;
3150
3151 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
3152
3153 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
3154 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
3155 else
3156 type = TREE_TYPE (TREE_OPERAND (expr, 1));
3157
3158 /* One could argue that all the stuff below is not necessary for
3159 the non-bitfield case and declare it a FE error if type
3160 adjustment would be needed. */
3161 if (TREE_TYPE (expr) != type)
3162 {
3163#ifdef ENABLE_TYPES_CHECKING
3164 tree old_type = TREE_TYPE (expr);
3165#endif
3166 int type_quals;
3167
3168 /* We need to preserve qualifiers and propagate them from
3169 operand 0. */
3170 type_quals = TYPE_QUALS (type)
3171 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
3172 if (TYPE_QUALS (type) != type_quals)
3173 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
3174
3175 /* Set the type of the COMPONENT_REF to the underlying type. */
3176 TREE_TYPE (expr) = type;
3177
3178#ifdef ENABLE_TYPES_CHECKING
3179 /* It is now a FE error, if the conversion from the canonical
3180 type to the original expression type is not useless. */
3181 gcc_assert (useless_type_conversion_p (old_type, type));
3182#endif
3183 }
3184}
3185
3186/* If a NOP conversion is changing a pointer to array of foo to a pointer
3187 to foo, embed that change in the ADDR_EXPR by converting
3188 T array[U];
3189 (T *)&array
3190 ==>
3191 &array[L]
3192 where L is the lower bound. For simplicity, only do this for constant
3193 lower bound.
3194 The constraint is that the type of &array[L] is trivially convertible
3195 to T *. */
3196
3197static void
3198canonicalize_addr_expr (tree *expr_p)
3199{
3200 tree expr = *expr_p;
3201 tree addr_expr = TREE_OPERAND (expr, 0);
3202 tree datype, ddatype, pddatype;
3203
3204 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
3205 if (!POINTER_TYPE_P (TREE_TYPE (expr))
3206 || TREE_CODE (addr_expr) != ADDR_EXPR)
3207 return;
3208
3209 /* The addr_expr type should be a pointer to an array. */
3210 datype = TREE_TYPE (TREE_TYPE (addr_expr));
3211 if (TREE_CODE (datype) != ARRAY_TYPE)
3212 return;
3213
3214 /* The pointer to element type shall be trivially convertible to
3215 the expression pointer type. */
3216 ddatype = TREE_TYPE (datype);
3217 pddatype = build_pointer_type (ddatype);
3218 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
3219 pddatype))
3220 return;
3221
3222 /* The lower bound and element sizes must be constant. */
3223 if (!TYPE_SIZE_UNIT (ddatype)
3224 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
3225 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
3226 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
3227 return;
3228
3229 /* All checks succeeded. Build a new node to merge the cast. */
3230 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
3231 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
3232 NULL_TREE, NULL_TREE);
3233 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
3234
3235 /* We can have stripped a required restrict qualifier above. */
3236 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
3237 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
3238}
3239
3240/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3241 underneath as appropriate. */
3242
3243static enum gimplify_status
3244gimplify_conversion (tree *expr_p)
3245{
3246 location_t loc = EXPR_LOCATION (*expr_p);
3247 gcc_assert (CONVERT_EXPR_P (*expr_p));
3248
3249 /* Then strip away all but the outermost conversion. */
3250 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3251
3252 /* And remove the outermost conversion if it's useless. */
3253 if (tree_ssa_useless_type_conversion (*expr_p))
3254 *expr_p = TREE_OPERAND (*expr_p, 0);
3255
3256 /* If we still have a conversion at the toplevel,
3257 then canonicalize some constructs. */
3258 if (CONVERT_EXPR_P (*expr_p))
3259 {
3260 tree sub = TREE_OPERAND (*expr_p, 0);
3261
3262 /* If a NOP conversion is changing the type of a COMPONENT_REF
3263 expression, then canonicalize its type now in order to expose more
3264 redundant conversions. */
3265 if (TREE_CODE (sub) == COMPONENT_REF)
3266 canonicalize_component_ref (expr_p: &TREE_OPERAND (*expr_p, 0));
3267
3268 /* If a NOP conversion is changing a pointer to array of foo
3269 to a pointer to foo, embed that change in the ADDR_EXPR. */
3270 else if (TREE_CODE (sub) == ADDR_EXPR)
3271 canonicalize_addr_expr (expr_p);
3272 }
3273
3274 /* If we have a conversion to a non-register type force the
3275 use of a VIEW_CONVERT_EXPR instead. */
3276 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3277 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3278 TREE_OPERAND (*expr_p, 0));
3279
3280 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3281 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3282 TREE_SET_CODE (*expr_p, NOP_EXPR);
3283
3284 return GS_OK;
3285}
3286
3287/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3288 DECL_VALUE_EXPR, and it's worth re-examining things. */
3289
3290static enum gimplify_status
3291gimplify_var_or_parm_decl (tree *expr_p)
3292{
3293 tree decl = *expr_p;
3294
3295 /* ??? If this is a local variable, and it has not been seen in any
3296 outer BIND_EXPR, then it's probably the result of a duplicate
3297 declaration, for which we've already issued an error. It would
3298 be really nice if the front end wouldn't leak these at all.
3299 Currently the only known culprit is C++ destructors, as seen
3300 in g++.old-deja/g++.jason/binding.C.
3301 Another possible culpit are size expressions for variably modified
3302 types which are lost in the FE or not gimplified correctly. */
3303 if (VAR_P (decl)
3304 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3305 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3306 && decl_function_context (decl) == current_function_decl)
3307 {
3308 gcc_assert (seen_error ());
3309 return GS_ERROR;
3310 }
3311
3312 /* When within an OMP context, notice uses of variables. */
3313 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3314 return GS_ALL_DONE;
3315
3316 /* If the decl is an alias for another expression, substitute it now. */
3317 if (DECL_HAS_VALUE_EXPR_P (decl))
3318 {
3319 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3320 return GS_OK;
3321 }
3322
3323 return GS_ALL_DONE;
3324}
3325
3326/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3327
3328static void
3329recalculate_side_effects (tree t)
3330{
3331 enum tree_code code = TREE_CODE (t);
3332 int len = TREE_OPERAND_LENGTH (t);
3333 int i;
3334
3335 switch (TREE_CODE_CLASS (code))
3336 {
3337 case tcc_expression:
3338 switch (code)
3339 {
3340 case INIT_EXPR:
3341 case MODIFY_EXPR:
3342 case VA_ARG_EXPR:
3343 case PREDECREMENT_EXPR:
3344 case PREINCREMENT_EXPR:
3345 case POSTDECREMENT_EXPR:
3346 case POSTINCREMENT_EXPR:
3347 /* All of these have side-effects, no matter what their
3348 operands are. */
3349 return;
3350
3351 default:
3352 break;
3353 }
3354 /* Fall through. */
3355
3356 case tcc_comparison: /* a comparison expression */
3357 case tcc_unary: /* a unary arithmetic expression */
3358 case tcc_binary: /* a binary arithmetic expression */
3359 case tcc_reference: /* a reference */
3360 case tcc_vl_exp: /* a function call */
3361 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3362 for (i = 0; i < len; ++i)
3363 {
3364 tree op = TREE_OPERAND (t, i);
3365 if (op && TREE_SIDE_EFFECTS (op))
3366 TREE_SIDE_EFFECTS (t) = 1;
3367 }
3368 break;
3369
3370 case tcc_constant:
3371 /* No side-effects. */
3372 return;
3373
3374 default:
3375 if (code == SSA_NAME)
3376 /* No side-effects. */
3377 return;
3378 gcc_unreachable ();
3379 }
3380}
3381
3382/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3383 node *EXPR_P.
3384
3385 compound_lval
3386 : min_lval '[' val ']'
3387 | min_lval '.' ID
3388 | compound_lval '[' val ']'
3389 | compound_lval '.' ID
3390
3391 This is not part of the original SIMPLE definition, which separates
3392 array and member references, but it seems reasonable to handle them
3393 together. Also, this way we don't run into problems with union
3394 aliasing; gcc requires that for accesses through a union to alias, the
3395 union reference must be explicit, which was not always the case when we
3396 were splitting up array and member refs.
3397
3398 PRE_P points to the sequence where side effects that must happen before
3399 *EXPR_P should be stored.
3400
3401 POST_P points to the sequence where side effects that must happen after
3402 *EXPR_P should be stored. */
3403
3404static enum gimplify_status
3405gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3406 fallback_t fallback)
3407{
3408 tree *p;
3409 enum gimplify_status ret = GS_ALL_DONE, tret;
3410 int i;
3411 location_t loc = EXPR_LOCATION (*expr_p);
3412 tree expr = *expr_p;
3413
3414 /* Create a stack of the subexpressions so later we can walk them in
3415 order from inner to outer. */
3416 auto_vec<tree, 10> expr_stack;
3417
3418 /* We can handle anything that get_inner_reference can deal with. */
3419 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3420 {
3421 restart:
3422 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3423 if (TREE_CODE (*p) == INDIRECT_REF)
3424 *p = fold_indirect_ref_loc (loc, *p);
3425
3426 if (handled_component_p (t: *p))
3427 ;
3428 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3429 additional COMPONENT_REFs. */
3430 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3431 && gimplify_var_or_parm_decl (expr_p: p) == GS_OK)
3432 goto restart;
3433 else
3434 break;
3435
3436 expr_stack.safe_push (obj: *p);
3437 }
3438
3439 gcc_assert (expr_stack.length ());
3440
3441 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3442 walked through and P points to the innermost expression.
3443
3444 Java requires that we elaborated nodes in source order. That
3445 means we must gimplify the inner expression followed by each of
3446 the indices, in order. But we can't gimplify the inner
3447 expression until we deal with any variable bounds, sizes, or
3448 positions in order to deal with PLACEHOLDER_EXPRs.
3449
3450 The base expression may contain a statement expression that
3451 has declarations used in size expressions, so has to be
3452 gimplified before gimplifying the size expressions.
3453
3454 So we do this in three steps. First we deal with variable
3455 bounds, sizes, and positions, then we gimplify the base and
3456 ensure it is memory if needed, then we deal with the annotations
3457 for any variables in the components and any indices, from left
3458 to right. */
3459
3460 bool need_non_reg = false;
3461 for (i = expr_stack.length () - 1; i >= 0; i--)
3462 {
3463 tree t = expr_stack[i];
3464
3465 if (error_operand_p (TREE_OPERAND (t, 0)))
3466 return GS_ERROR;
3467
3468 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3469 {
3470 /* Deal with the low bound and element type size and put them into
3471 the ARRAY_REF. If these values are set, they have already been
3472 gimplified. */
3473 if (TREE_OPERAND (t, 2) == NULL_TREE)
3474 {
3475 tree low = unshare_expr (expr: array_ref_low_bound (t));
3476 if (!is_gimple_min_invariant (low))
3477 {
3478 TREE_OPERAND (t, 2) = low;
3479 }
3480 }
3481
3482 if (TREE_OPERAND (t, 3) == NULL_TREE)
3483 {
3484 tree elmt_size = array_ref_element_size (t);
3485 if (!is_gimple_min_invariant (elmt_size))
3486 {
3487 elmt_size = unshare_expr (expr: elmt_size);
3488 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3489 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3490
3491 /* Divide the element size by the alignment of the element
3492 type (above). */
3493 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3494 elmt_size, factor);
3495
3496 TREE_OPERAND (t, 3) = elmt_size;
3497 }
3498 }
3499 need_non_reg = true;
3500 }
3501 else if (TREE_CODE (t) == COMPONENT_REF)
3502 {
3503 /* Set the field offset into T and gimplify it. */
3504 if (TREE_OPERAND (t, 2) == NULL_TREE)
3505 {
3506 tree offset = component_ref_field_offset (t);
3507 if (!is_gimple_min_invariant (offset))
3508 {
3509 offset = unshare_expr (expr: offset);
3510 tree field = TREE_OPERAND (t, 1);
3511 tree factor
3512 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3513
3514 /* Divide the offset by its alignment. */
3515 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3516 offset, factor);
3517
3518 TREE_OPERAND (t, 2) = offset;
3519 }
3520 }
3521 need_non_reg = true;
3522 }
3523 else if (!is_gimple_reg_type (TREE_TYPE (t)))
3524 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3525 is a non-register type then require the base object to be a
3526 non-register as well. */
3527 need_non_reg = true;
3528 }
3529
3530 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3531 so as to match the min_lval predicate. Failure to do so may result
3532 in the creation of large aggregate temporaries. */
3533 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3534 fallback | fb_lvalue);
3535 ret = MIN (ret, tret);
3536 if (ret == GS_ERROR)
3537 return GS_ERROR;
3538
3539 /* Step 2a: if we have component references we do not support on
3540 registers then make sure the base isn't a register. Of course
3541 we can only do so if an rvalue is OK. */
3542 if (need_non_reg && (fallback & fb_rvalue))
3543 prepare_gimple_addressable (p, pre_p);
3544
3545
3546 /* Step 3: gimplify size expressions and the indices and operands of
3547 ARRAY_REF. During this loop we also remove any useless conversions.
3548 If we operate on a register also make sure to properly gimplify
3549 to individual operations. */
3550
3551 bool reg_operations = is_gimple_reg (*p);
3552 for (; expr_stack.length () > 0; )
3553 {
3554 tree t = expr_stack.pop ();
3555
3556 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3557 {
3558 gcc_assert (!reg_operations);
3559
3560 /* Gimplify the low bound and element type size. */
3561 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3562 is_gimple_reg, fb_rvalue);
3563 ret = MIN (ret, tret);
3564
3565 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3566 is_gimple_reg, fb_rvalue);
3567 ret = MIN (ret, tret);
3568
3569 /* Gimplify the dimension. */
3570 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3571 is_gimple_val, fb_rvalue);
3572 ret = MIN (ret, tret);
3573 }
3574 else if (TREE_CODE (t) == COMPONENT_REF)
3575 {
3576 gcc_assert (!reg_operations);
3577
3578 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3579 is_gimple_reg, fb_rvalue);
3580 ret = MIN (ret, tret);
3581 }
3582 else if (reg_operations)
3583 {
3584 tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3585 is_gimple_val, fb_rvalue);
3586 ret = MIN (ret, tret);
3587 }
3588
3589 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3590
3591 /* The innermost expression P may have originally had
3592 TREE_SIDE_EFFECTS set which would have caused all the outer
3593 expressions in *EXPR_P leading to P to also have had
3594 TREE_SIDE_EFFECTS set. */
3595 recalculate_side_effects (t);
3596 }
3597
3598 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3599 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3600 {
3601 canonicalize_component_ref (expr_p);
3602 }
3603
3604 expr_stack.release ();
3605
3606 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3607
3608 return ret;
3609}
3610
3611/* Gimplify the self modifying expression pointed to by EXPR_P
3612 (++, --, +=, -=).
3613
3614 PRE_P points to the list where side effects that must happen before
3615 *EXPR_P should be stored.
3616
3617 POST_P points to the list where side effects that must happen after
3618 *EXPR_P should be stored.
3619
3620 WANT_VALUE is nonzero iff we want to use the value of this expression
3621 in another expression.
3622
3623 ARITH_TYPE is the type the computation should be performed in. */
3624
3625enum gimplify_status
3626gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3627 bool want_value, tree arith_type)
3628{
3629 enum tree_code code;
3630 tree lhs, lvalue, rhs, t1;
3631 gimple_seq post = NULL, *orig_post_p = post_p;
3632 bool postfix;
3633 enum tree_code arith_code;
3634 enum gimplify_status ret;
3635 location_t loc = EXPR_LOCATION (*expr_p);
3636
3637 code = TREE_CODE (*expr_p);
3638
3639 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3640 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3641
3642 /* Prefix or postfix? */
3643 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3644 /* Faster to treat as prefix if result is not used. */
3645 postfix = want_value;
3646 else
3647 postfix = false;
3648
3649 /* For postfix, make sure the inner expression's post side effects
3650 are executed after side effects from this expression. */
3651 if (postfix)
3652 post_p = &post;
3653
3654 /* Add or subtract? */
3655 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3656 arith_code = PLUS_EXPR;
3657 else
3658 arith_code = MINUS_EXPR;
3659
3660 /* Gimplify the LHS into a GIMPLE lvalue. */
3661 lvalue = TREE_OPERAND (*expr_p, 0);
3662 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3663 if (ret == GS_ERROR)
3664 return ret;
3665
3666 /* Extract the operands to the arithmetic operation. */
3667 lhs = lvalue;
3668 rhs = TREE_OPERAND (*expr_p, 1);
3669
3670 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3671 that as the result value and in the postqueue operation. */
3672 if (postfix)
3673 {
3674 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3675 if (ret == GS_ERROR)
3676 return ret;
3677
3678 lhs = get_initialized_tmp_var (val: lhs, pre_p);
3679 }
3680
3681 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3682 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3683 {
3684 rhs = convert_to_ptrofftype_loc (loc, off: rhs);
3685 if (arith_code == MINUS_EXPR)
3686 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3687 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3688 }
3689 else
3690 t1 = fold_convert (TREE_TYPE (*expr_p),
3691 fold_build2 (arith_code, arith_type,
3692 fold_convert (arith_type, lhs),
3693 fold_convert (arith_type, rhs)));
3694
3695 if (postfix)
3696 {
3697 gimplify_assign (lvalue, t1, pre_p);
3698 gimplify_seq_add_seq (dst_p: orig_post_p, src: post);
3699 *expr_p = lhs;
3700 return GS_ALL_DONE;
3701 }
3702 else
3703 {
3704 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3705 return GS_OK;
3706 }
3707}
3708
3709/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3710
3711static void
3712maybe_with_size_expr (tree *expr_p)
3713{
3714 tree expr = *expr_p;
3715 tree type = TREE_TYPE (expr);
3716 tree size;
3717
3718 /* If we've already wrapped this or the type is error_mark_node, we can't do
3719 anything. */
3720 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3721 || type == error_mark_node)
3722 return;
3723
3724 /* If the size isn't known or is a constant, we have nothing to do. */
3725 size = TYPE_SIZE_UNIT (type);
3726 if (!size || poly_int_tree_p (t: size))
3727 return;
3728
3729 /* Otherwise, make a WITH_SIZE_EXPR. */
3730 size = unshare_expr (expr: size);
3731 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3732 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3733}
3734
3735/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3736 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3737 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3738 gimplified to an SSA name. */
3739
3740enum gimplify_status
3741gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3742 bool allow_ssa)
3743{
3744 bool (*test) (tree);
3745 fallback_t fb;
3746
3747 /* In general, we allow lvalues for function arguments to avoid
3748 extra overhead of copying large aggregates out of even larger
3749 aggregates into temporaries only to copy the temporaries to
3750 the argument list. Make optimizers happy by pulling out to
3751 temporaries those types that fit in registers. */
3752 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3753 test = is_gimple_val, fb = fb_rvalue;
3754 else
3755 {
3756 test = is_gimple_lvalue, fb = fb_either;
3757 /* Also strip a TARGET_EXPR that would force an extra copy. */
3758 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3759 {
3760 tree init = TARGET_EXPR_INITIAL (*arg_p);
3761 if (init
3762 && !VOID_TYPE_P (TREE_TYPE (init)))
3763 *arg_p = init;
3764 }
3765 }
3766
3767 /* If this is a variable sized type, we must remember the size. */
3768 maybe_with_size_expr (expr_p: arg_p);
3769
3770 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3771 /* Make sure arguments have the same location as the function call
3772 itself. */
3773 protected_set_expr_location (*arg_p, call_location);
3774
3775 /* There is a sequence point before a function call. Side effects in
3776 the argument list must occur before the actual call. So, when
3777 gimplifying arguments, force gimplify_expr to use an internal
3778 post queue which is then appended to the end of PRE_P. */
3779 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3780}
3781
3782/* Don't fold inside offloading or taskreg regions: it can break code by
3783 adding decl references that weren't in the source. We'll do it during
3784 omplower pass instead. */
3785
3786static bool
3787maybe_fold_stmt (gimple_stmt_iterator *gsi)
3788{
3789 struct gimplify_omp_ctx *ctx;
3790 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3791 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3792 return false;
3793 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3794 return false;
3795 /* Delay folding of builtins until the IL is in consistent state
3796 so the diagnostic machinery can do a better job. */
3797 if (gimple_call_builtin_p (gsi_stmt (i: *gsi)))
3798 return false;
3799 return fold_stmt (gsi);
3800}
3801
3802/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3803 WANT_VALUE is true if the result of the call is desired. */
3804
3805static enum gimplify_status
3806gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3807{
3808 tree fndecl, parms, p, fnptrtype;
3809 enum gimplify_status ret;
3810 int i, nargs;
3811 gcall *call;
3812 bool builtin_va_start_p = false;
3813 location_t loc = EXPR_LOCATION (*expr_p);
3814
3815 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3816
3817 /* For reliable diagnostics during inlining, it is necessary that
3818 every call_expr be annotated with file and line. */
3819 if (! EXPR_HAS_LOCATION (*expr_p))
3820 SET_EXPR_LOCATION (*expr_p, input_location);
3821
3822 /* Gimplify internal functions created in the FEs. */
3823 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3824 {
3825 if (want_value)
3826 return GS_ALL_DONE;
3827
3828 nargs = call_expr_nargs (*expr_p);
3829 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3830 auto_vec<tree> vargs (nargs);
3831
3832 if (ifn == IFN_ASSUME)
3833 {
3834 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3835 {
3836 /* If the [[assume (cond)]]; condition is simple
3837 enough and can be evaluated unconditionally
3838 without side-effects, expand it as
3839 if (!cond) __builtin_unreachable (); */
3840 tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_UNREACHABLE);
3841 *expr_p = build3 (COND_EXPR, void_type_node,
3842 CALL_EXPR_ARG (*expr_p, 0), void_node,
3843 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3844 fndecl, 0));
3845 return GS_OK;
3846 }
3847 /* If not optimizing, ignore the assumptions. */
3848 if (!optimize || seen_error ())
3849 {
3850 *expr_p = NULL_TREE;
3851 return GS_ALL_DONE;
3852 }
3853 /* Temporarily, until gimple lowering, transform
3854 .ASSUME (cond);
3855 into:
3856 [[assume (guard)]]
3857 {
3858 guard = cond;
3859 }
3860 such that gimple lowering can outline the condition into
3861 a separate function easily. */
3862 tree guard = create_tmp_var (boolean_type_node);
3863 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3864 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3865 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3866 push_gimplify_context ();
3867 gimple_seq body = NULL;
3868 gimple *g = gimplify_and_return_first (t: *expr_p, seq_p: &body);
3869 pop_gimplify_context (body: g);
3870 g = gimple_build_assume (guard, body);
3871 gimple_set_location (g, location: loc);
3872 gimplify_seq_add_stmt (seq_p: pre_p, gs: g);
3873 *expr_p = NULL_TREE;
3874 return GS_ALL_DONE;
3875 }
3876
3877 for (i = 0; i < nargs; i++)
3878 {
3879 gimplify_arg (arg_p: &CALL_EXPR_ARG (*expr_p, i), pre_p,
3880 EXPR_LOCATION (*expr_p));
3881 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3882 }
3883
3884 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3885 gimple_call_set_nothrow (s: call, TREE_NOTHROW (*expr_p));
3886 gimplify_seq_add_stmt (seq_p: pre_p, gs: call);
3887 return GS_ALL_DONE;
3888 }
3889
3890 /* This may be a call to a builtin function.
3891
3892 Builtin function calls may be transformed into different
3893 (and more efficient) builtin function calls under certain
3894 circumstances. Unfortunately, gimplification can muck things
3895 up enough that the builtin expanders are not aware that certain
3896 transformations are still valid.
3897
3898 So we attempt transformation/gimplification of the call before
3899 we gimplify the CALL_EXPR. At this time we do not manage to
3900 transform all calls in the same manner as the expanders do, but
3901 we do transform most of them. */
3902 fndecl = get_callee_fndecl (*expr_p);
3903 if (fndecl && fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL))
3904 switch (DECL_FUNCTION_CODE (decl: fndecl))
3905 {
3906 CASE_BUILT_IN_ALLOCA:
3907 /* If the call has been built for a variable-sized object, then we
3908 want to restore the stack level when the enclosing BIND_EXPR is
3909 exited to reclaim the allocated space; otherwise, we precisely
3910 need to do the opposite and preserve the latest stack level. */
3911 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3912 gimplify_ctxp->save_stack = true;
3913 else
3914 gimplify_ctxp->keep_stack = true;
3915 break;
3916
3917 case BUILT_IN_VA_START:
3918 {
3919 builtin_va_start_p = true;
3920 if (call_expr_nargs (*expr_p) < 2)
3921 {
3922 error ("too few arguments to function %<va_start%>");
3923 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3924 return GS_OK;
3925 }
3926
3927 if (fold_builtin_next_arg (*expr_p, true))
3928 {
3929 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3930 return GS_OK;
3931 }
3932 break;
3933 }
3934
3935 case BUILT_IN_EH_RETURN:
3936 cfun->calls_eh_return = true;
3937 break;
3938
3939 case BUILT_IN_CLEAR_PADDING:
3940 if (call_expr_nargs (*expr_p) == 1)
3941 {
3942 /* Remember the original type of the argument in an internal
3943 dummy second argument, as in GIMPLE pointer conversions are
3944 useless. Also mark this call as not for automatic
3945 initialization in the internal dummy third argument. */
3946 p = CALL_EXPR_ARG (*expr_p, 0);
3947 *expr_p
3948 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3949 build_zero_cst (TREE_TYPE (p)));
3950 return GS_OK;
3951 }
3952 break;
3953
3954 default:
3955 ;
3956 }
3957 if (fndecl && fndecl_built_in_p (node: fndecl))
3958 {
3959 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3960 if (new_tree && new_tree != *expr_p)
3961 {
3962 /* There was a transformation of this call which computes the
3963 same value, but in a more efficient way. Return and try
3964 again. */
3965 *expr_p = new_tree;
3966 return GS_OK;
3967 }
3968 }
3969
3970 /* Remember the original function pointer type. */
3971 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3972
3973 if (flag_openmp
3974 && fndecl
3975 && cfun
3976 && (cfun->curr_properties & PROP_gimple_any) == 0)
3977 {
3978 tree variant = omp_resolve_declare_variant (fndecl);
3979 if (variant != fndecl)
3980 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3981 }
3982
3983 /* There is a sequence point before the call, so any side effects in
3984 the calling expression must occur before the actual call. Force
3985 gimplify_expr to use an internal post queue. */
3986 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3987 is_gimple_call_addr, fb_rvalue);
3988
3989 if (ret == GS_ERROR)
3990 return GS_ERROR;
3991
3992 nargs = call_expr_nargs (*expr_p);
3993
3994 /* Get argument types for verification. */
3995 fndecl = get_callee_fndecl (*expr_p);
3996 parms = NULL_TREE;
3997 if (fndecl)
3998 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3999 else
4000 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
4001
4002 if (fndecl && DECL_ARGUMENTS (fndecl))
4003 p = DECL_ARGUMENTS (fndecl);
4004 else if (parms)
4005 p = parms;
4006 else
4007 p = NULL_TREE;
4008 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
4009 ;
4010
4011 /* If the last argument is __builtin_va_arg_pack () and it is not
4012 passed as a named argument, decrease the number of CALL_EXPR
4013 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
4014 if (!p
4015 && i < nargs
4016 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
4017 {
4018 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
4019 tree last_arg_fndecl = get_callee_fndecl (last_arg);
4020
4021 if (last_arg_fndecl
4022 && fndecl_built_in_p (node: last_arg_fndecl, name1: BUILT_IN_VA_ARG_PACK))
4023 {
4024 tree call = *expr_p;
4025
4026 --nargs;
4027 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
4028 CALL_EXPR_FN (call),
4029 nargs, CALL_EXPR_ARGP (call));
4030
4031 /* Copy all CALL_EXPR flags, location and block, except
4032 CALL_EXPR_VA_ARG_PACK flag. */
4033 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
4034 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
4035 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
4036 = CALL_EXPR_RETURN_SLOT_OPT (call);
4037 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
4038 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
4039
4040 /* Set CALL_EXPR_VA_ARG_PACK. */
4041 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
4042 }
4043 }
4044
4045 /* If the call returns twice then after building the CFG the call
4046 argument computations will no longer dominate the call because
4047 we add an abnormal incoming edge to the call. So do not use SSA
4048 vars there. */
4049 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
4050
4051 /* Gimplify the function arguments. */
4052 if (nargs > 0)
4053 {
4054 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
4055 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
4056 PUSH_ARGS_REVERSED ? i-- : i++)
4057 {
4058 enum gimplify_status t;
4059
4060 /* Avoid gimplifying the second argument to va_start, which needs to
4061 be the plain PARM_DECL. */
4062 if ((i != 1) || !builtin_va_start_p)
4063 {
4064 t = gimplify_arg (arg_p: &CALL_EXPR_ARG (*expr_p, i), pre_p,
4065 EXPR_LOCATION (*expr_p), allow_ssa: ! returns_twice);
4066
4067 if (t == GS_ERROR)
4068 ret = GS_ERROR;
4069 }
4070 }
4071 }
4072
4073 /* Gimplify the static chain. */
4074 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
4075 {
4076 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
4077 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
4078 else
4079 {
4080 enum gimplify_status t;
4081 t = gimplify_arg (arg_p: &CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
4082 EXPR_LOCATION (*expr_p), allow_ssa: ! returns_twice);
4083 if (t == GS_ERROR)
4084 ret = GS_ERROR;
4085 }
4086 }
4087
4088 /* Verify the function result. */
4089 if (want_value && fndecl
4090 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
4091 {
4092 error_at (loc, "using result of function returning %<void%>");
4093 ret = GS_ERROR;
4094 }
4095
4096 /* Try this again in case gimplification exposed something. */
4097 if (ret != GS_ERROR)
4098 {
4099 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
4100
4101 if (new_tree && new_tree != *expr_p)
4102 {
4103 /* There was a transformation of this call which computes the
4104 same value, but in a more efficient way. Return and try
4105 again. */
4106 *expr_p = new_tree;
4107 return GS_OK;
4108 }
4109 }
4110 else
4111 {
4112 *expr_p = error_mark_node;
4113 return GS_ERROR;
4114 }
4115
4116 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
4117 decl. This allows us to eliminate redundant or useless
4118 calls to "const" functions. */
4119 if (TREE_CODE (*expr_p) == CALL_EXPR)
4120 {
4121 int flags = call_expr_flags (*expr_p);
4122 if (flags & (ECF_CONST | ECF_PURE)
4123 /* An infinite loop is considered a side effect. */
4124 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
4125 TREE_SIDE_EFFECTS (*expr_p) = 0;
4126 }
4127
4128 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
4129 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
4130 form and delegate the creation of a GIMPLE_CALL to
4131 gimplify_modify_expr. This is always possible because when
4132 WANT_VALUE is true, the caller wants the result of this call into
4133 a temporary, which means that we will emit an INIT_EXPR in
4134 internal_get_tmp_var which will then be handled by
4135 gimplify_modify_expr. */
4136 if (!want_value)
4137 {
4138 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
4139 have to do is replicate it as a GIMPLE_CALL tuple. */
4140 gimple_stmt_iterator gsi;
4141 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
4142 notice_special_calls (call);
4143 gimplify_seq_add_stmt (seq_p: pre_p, gs: call);
4144 gsi = gsi_last (seq&: *pre_p);
4145 maybe_fold_stmt (gsi: &gsi);
4146 *expr_p = NULL_TREE;
4147 }
4148 else
4149 /* Remember the original function type. */
4150 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
4151 CALL_EXPR_FN (*expr_p));
4152
4153 return ret;
4154}
4155
4156/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
4157 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
4158
4159 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
4160 condition is true or false, respectively. If null, we should generate
4161 our own to skip over the evaluation of this specific expression.
4162
4163 LOCUS is the source location of the COND_EXPR.
4164
4165 The condition_uid is a discriminator tag for condition coverage used to map
4166 conditions to its corresponding full Boolean function.
4167
4168 This function is the tree equivalent of do_jump.
4169
4170 shortcut_cond_r should only be called by shortcut_cond_expr. */
4171
4172static tree
4173shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
4174 location_t locus, unsigned condition_uid)
4175{
4176 tree local_label = NULL_TREE;
4177 tree t, expr = NULL;
4178
4179 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
4180 retain the shortcut semantics. Just insert the gotos here;
4181 shortcut_cond_expr will append the real blocks later. */
4182 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4183 {
4184 location_t new_locus;
4185
4186 /* Turn if (a && b) into
4187
4188 if (a); else goto no;
4189 if (b) goto yes; else goto no;
4190 (no:) */
4191
4192 if (false_label_p == NULL)
4193 false_label_p = &local_label;
4194
4195 /* Keep the original source location on the first 'if'. */
4196 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus,
4197 condition_uid);
4198 append_to_statement_list (t, &expr);
4199
4200 /* Set the source location of the && on the second 'if'. */
4201 new_locus = rexpr_location (expr: pred, or_else: locus);
4202 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4203 locus: new_locus, condition_uid);
4204 append_to_statement_list (t, &expr);
4205 }
4206 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4207 {
4208 location_t new_locus;
4209
4210 /* Turn if (a || b) into
4211
4212 if (a) goto yes;
4213 if (b) goto yes; else goto no;
4214 (yes:) */
4215
4216 if (true_label_p == NULL)
4217 true_label_p = &local_label;
4218
4219 /* Keep the original source location on the first 'if'. */
4220 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus,
4221 condition_uid);
4222 append_to_statement_list (t, &expr);
4223
4224 /* Set the source location of the || on the second 'if'. */
4225 new_locus = rexpr_location (expr: pred, or_else: locus);
4226 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4227 locus: new_locus, condition_uid);
4228 append_to_statement_list (t, &expr);
4229 }
4230 else if (TREE_CODE (pred) == COND_EXPR
4231 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
4232 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
4233 {
4234 location_t new_locus;
4235
4236 /* As long as we're messing with gotos, turn if (a ? b : c) into
4237 if (a)
4238 if (b) goto yes; else goto no;
4239 else
4240 if (c) goto yes; else goto no;
4241
4242 Don't do this if one of the arms has void type, which can happen
4243 in C++ when the arm is throw. */
4244
4245 /* Keep the original source location on the first 'if'. Set the source
4246 location of the ? on the second 'if'. */
4247 new_locus = rexpr_location (expr: pred, or_else: locus);
4248 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
4249 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
4250 false_label_p, locus, condition_uid),
4251 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
4252 false_label_p, locus: new_locus,
4253 condition_uid));
4254 SET_EXPR_UID (expr, condition_uid);
4255 }
4256 else
4257 {
4258 expr = build3 (COND_EXPR, void_type_node, pred,
4259 build_and_jump (label_p: true_label_p),
4260 build_and_jump (label_p: false_label_p));
4261 SET_EXPR_LOCATION (expr, locus);
4262 SET_EXPR_UID (expr, condition_uid);
4263 }
4264
4265 if (local_label)
4266 {
4267 t = build1 (LABEL_EXPR, void_type_node, local_label);
4268 append_to_statement_list (t, &expr);
4269 }
4270
4271 return expr;
4272}
4273
4274/* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4275 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4276 statement, if it is the last one. Otherwise, return NULL. */
4277
4278static tree
4279find_goto (tree expr)
4280{
4281 if (!expr)
4282 return NULL_TREE;
4283
4284 if (TREE_CODE (expr) == GOTO_EXPR)
4285 return expr;
4286
4287 if (TREE_CODE (expr) != STATEMENT_LIST)
4288 return NULL_TREE;
4289
4290 tree_stmt_iterator i = tsi_start (t: expr);
4291
4292 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4293 tsi_next (i: &i);
4294
4295 if (!tsi_one_before_end_p (i))
4296 return NULL_TREE;
4297
4298 return find_goto (expr: tsi_stmt (i));
4299}
4300
4301/* Same as find_goto, except that it returns NULL if the destination
4302 is not a LABEL_DECL. */
4303
4304static inline tree
4305find_goto_label (tree expr)
4306{
4307 tree dest = find_goto (expr);
4308 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4309 return dest;
4310 return NULL_TREE;
4311}
4312
4313
4314/* Given a multi-term condition (ANDIF, ORIF), walk the predicate PRED and tag
4315 every basic condition with CONDITION_UID. Two basic conditions share the
4316 CONDITION_UID discriminator when they belong to the same predicate, which is
4317 used by the condition coverage. Doing this as an explicit step makes for a
4318 simpler implementation than weaving it into the splitting code as the
4319 splitting code eventually calls the entry point gimplfiy_expr which makes
4320 bookkeeping complicated. */
4321static void
4322tag_shortcut_cond (tree pred, unsigned condition_uid)
4323{
4324 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR
4325 || TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4326 {
4327 tree fst = TREE_OPERAND (pred, 0);
4328 tree lst = TREE_OPERAND (pred, 1);
4329
4330 if (TREE_CODE (fst) == TRUTH_ANDIF_EXPR
4331 || TREE_CODE (fst) == TRUTH_ORIF_EXPR)
4332 tag_shortcut_cond (pred: fst, condition_uid);
4333 else if (TREE_CODE (fst) == COND_EXPR)
4334 SET_EXPR_UID (fst, condition_uid);
4335
4336 if (TREE_CODE (lst) == TRUTH_ANDIF_EXPR
4337 || TREE_CODE (lst) == TRUTH_ORIF_EXPR)
4338 tag_shortcut_cond (pred: lst, condition_uid);
4339 else if (TREE_CODE (lst) == COND_EXPR)
4340 SET_EXPR_UID (lst, condition_uid);
4341 }
4342}
4343
4344/* Given a conditional expression EXPR with short-circuit boolean
4345 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4346 predicate apart into the equivalent sequence of conditionals. CONDITION_UID
4347 is a the tag/discriminator for this EXPR - all basic conditions in the
4348 expression will be given the same CONDITION_UID. */
4349static tree
4350shortcut_cond_expr (tree expr, unsigned condition_uid)
4351{
4352 tree pred = TREE_OPERAND (expr, 0);
4353 tree then_ = TREE_OPERAND (expr, 1);
4354 tree else_ = TREE_OPERAND (expr, 2);
4355 tree true_label, false_label, end_label, t;
4356 tree *true_label_p;
4357 tree *false_label_p;
4358 bool emit_end, emit_false, jump_over_else;
4359 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4360 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4361
4362 tag_shortcut_cond (pred, condition_uid);
4363
4364 /* First do simple transformations. */
4365 if (!else_se)
4366 {
4367 /* If there is no 'else', turn
4368 if (a && b) then c
4369 into
4370 if (a) if (b) then c. */
4371 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4372 {
4373 /* Keep the original source location on the first 'if'. */
4374 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4375 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4376 /* Set the source location of the && on the second 'if'. */
4377 if (rexpr_has_location (expr: pred))
4378 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4379 then_ = shortcut_cond_expr (expr, condition_uid);
4380 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4381 pred = TREE_OPERAND (pred, 0);
4382 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4383 SET_EXPR_LOCATION (expr, locus);
4384 }
4385 }
4386
4387 if (!then_se)
4388 {
4389 /* If there is no 'then', turn
4390 if (a || b); else d
4391 into
4392 if (a); else if (b); else d. */
4393 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4394 {
4395 /* Keep the original source location on the first 'if'. */
4396 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4397 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4398 /* Set the source location of the || on the second 'if'. */
4399 if (rexpr_has_location (expr: pred))
4400 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4401 else_ = shortcut_cond_expr (expr, condition_uid);
4402 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4403 pred = TREE_OPERAND (pred, 0);
4404 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4405 SET_EXPR_LOCATION (expr, locus);
4406 }
4407 }
4408
4409 /* The expr tree should also have the expression id set. */
4410 SET_EXPR_UID (expr, condition_uid);
4411
4412 /* If we're done, great. */
4413 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4414 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4415 return expr;
4416
4417 /* Otherwise we need to mess with gotos. Change
4418 if (a) c; else d;
4419 to
4420 if (a); else goto no;
4421 c; goto end;
4422 no: d; end:
4423 and recursively gimplify the condition. */
4424
4425 true_label = false_label = end_label = NULL_TREE;
4426
4427 /* If our arms just jump somewhere, hijack those labels so we don't
4428 generate jumps to jumps. */
4429
4430 if (tree then_goto = find_goto_label (expr: then_))
4431 {
4432 true_label = GOTO_DESTINATION (then_goto);
4433 then_ = NULL;
4434 then_se = false;
4435 }
4436
4437 if (tree else_goto = find_goto_label (expr: else_))
4438 {
4439 false_label = GOTO_DESTINATION (else_goto);
4440 else_ = NULL;
4441 else_se = false;
4442 }
4443
4444 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4445 if (true_label)
4446 true_label_p = &true_label;
4447 else
4448 true_label_p = NULL;
4449
4450 /* The 'else' branch also needs a label if it contains interesting code. */
4451 if (false_label || else_se)
4452 false_label_p = &false_label;
4453 else
4454 false_label_p = NULL;
4455
4456 /* If there was nothing else in our arms, just forward the label(s). */
4457 if (!then_se && !else_se)
4458 return shortcut_cond_r (pred, true_label_p, false_label_p,
4459 EXPR_LOC_OR_LOC (expr, input_location), condition_uid);
4460
4461 /* If our last subexpression already has a terminal label, reuse it. */
4462 if (else_se)
4463 t = expr_last (else_);
4464 else if (then_se)
4465 t = expr_last (then_);
4466 else
4467 t = NULL;
4468 if (t && TREE_CODE (t) == LABEL_EXPR)
4469 end_label = LABEL_EXPR_LABEL (t);
4470
4471 /* If we don't care about jumping to the 'else' branch, jump to the end
4472 if the condition is false. */
4473 if (!false_label_p)
4474 false_label_p = &end_label;
4475
4476 /* We only want to emit these labels if we aren't hijacking them. */
4477 emit_end = (end_label == NULL_TREE);
4478 emit_false = (false_label == NULL_TREE);
4479
4480 /* We only emit the jump over the else clause if we have to--if the
4481 then clause may fall through. Otherwise we can wind up with a
4482 useless jump and a useless label at the end of gimplified code,
4483 which will cause us to think that this conditional as a whole
4484 falls through even if it doesn't. If we then inline a function
4485 which ends with such a condition, that can cause us to issue an
4486 inappropriate warning about control reaching the end of a
4487 non-void function. */
4488 jump_over_else = block_may_fallthru (then_);
4489
4490 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4491 EXPR_LOC_OR_LOC (expr, input_location),
4492 condition_uid);
4493
4494 expr = NULL;
4495 append_to_statement_list (pred, &expr);
4496
4497 append_to_statement_list (then_, &expr);
4498 if (else_se)
4499 {
4500 if (jump_over_else)
4501 {
4502 tree last = expr_last (expr);
4503 t = build_and_jump (label_p: &end_label);
4504 if (rexpr_has_location (expr: last))
4505 SET_EXPR_LOCATION (t, rexpr_location (last));
4506 append_to_statement_list (t, &expr);
4507 }
4508 if (emit_false)
4509 {
4510 t = build1 (LABEL_EXPR, void_type_node, false_label);
4511 append_to_statement_list (t, &expr);
4512 }
4513 append_to_statement_list (else_, &expr);
4514 }
4515 if (emit_end && end_label)
4516 {
4517 t = build1 (LABEL_EXPR, void_type_node, end_label);
4518 append_to_statement_list (t, &expr);
4519 }
4520
4521 return expr;
4522}
4523
4524/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4525
4526tree
4527gimple_boolify (tree expr)
4528{
4529 tree type = TREE_TYPE (expr);
4530 location_t loc = EXPR_LOCATION (expr);
4531
4532 if (TREE_CODE (expr) == NE_EXPR
4533 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4534 && integer_zerop (TREE_OPERAND (expr, 1)))
4535 {
4536 tree call = TREE_OPERAND (expr, 0);
4537 tree fn = get_callee_fndecl (call);
4538
4539 /* For __builtin_expect ((long) (x), y) recurse into x as well
4540 if x is truth_value_p. */
4541 if (fn
4542 && fndecl_built_in_p (node: fn, name1: BUILT_IN_EXPECT)
4543 && call_expr_nargs (call) == 2)
4544 {
4545 tree arg = CALL_EXPR_ARG (call, 0);
4546 if (arg)
4547 {
4548 if (TREE_CODE (arg) == NOP_EXPR
4549 && TREE_TYPE (arg) == TREE_TYPE (call))
4550 arg = TREE_OPERAND (arg, 0);
4551 if (truth_value_p (TREE_CODE (arg)))
4552 {
4553 arg = gimple_boolify (expr: arg);
4554 CALL_EXPR_ARG (call, 0)
4555 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4556 }
4557 }
4558 }
4559 }
4560
4561 switch (TREE_CODE (expr))
4562 {
4563 case TRUTH_AND_EXPR:
4564 case TRUTH_OR_EXPR:
4565 case TRUTH_XOR_EXPR:
4566 case TRUTH_ANDIF_EXPR:
4567 case TRUTH_ORIF_EXPR:
4568 /* Also boolify the arguments of truth exprs. */
4569 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4570 /* FALLTHRU */
4571
4572 case TRUTH_NOT_EXPR:
4573 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4574
4575 /* These expressions always produce boolean results. */
4576 if (TREE_CODE (type) != BOOLEAN_TYPE)
4577 TREE_TYPE (expr) = boolean_type_node;
4578 return expr;
4579
4580 case ANNOTATE_EXPR:
4581 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4582 {
4583 case annot_expr_ivdep_kind:
4584 case annot_expr_unroll_kind:
4585 case annot_expr_no_vector_kind:
4586 case annot_expr_vector_kind:
4587 case annot_expr_parallel_kind:
4588 case annot_expr_maybe_infinite_kind:
4589 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4590 if (TREE_CODE (type) != BOOLEAN_TYPE)
4591 TREE_TYPE (expr) = boolean_type_node;
4592 return expr;
4593 default:
4594 gcc_unreachable ();
4595 }
4596
4597 default:
4598 if (COMPARISON_CLASS_P (expr))
4599 {
4600 /* These expressions always produce boolean results. */
4601 if (TREE_CODE (type) != BOOLEAN_TYPE)
4602 TREE_TYPE (expr) = boolean_type_node;
4603 return expr;
4604 }
4605 /* Other expressions that get here must have boolean values, but
4606 might need to be converted to the appropriate mode. */
4607 if (TREE_CODE (type) == BOOLEAN_TYPE)
4608 return expr;
4609 return fold_convert_loc (loc, boolean_type_node, expr);
4610 }
4611}
4612
4613/* Given a conditional expression *EXPR_P without side effects, gimplify
4614 its operands. New statements are inserted to PRE_P. */
4615
4616static enum gimplify_status
4617gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4618{
4619 tree expr = *expr_p, cond;
4620 enum gimplify_status ret, tret;
4621 enum tree_code code;
4622
4623 cond = gimple_boolify (COND_EXPR_COND (expr));
4624
4625 /* We need to handle && and || specially, as their gimplification
4626 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4627 code = TREE_CODE (cond);
4628 if (code == TRUTH_ANDIF_EXPR)
4629 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4630 else if (code == TRUTH_ORIF_EXPR)
4631 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4632 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4633 COND_EXPR_COND (*expr_p) = cond;
4634
4635 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4636 is_gimple_val, fb_rvalue);
4637 ret = MIN (ret, tret);
4638 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4639 is_gimple_val, fb_rvalue);
4640
4641 return MIN (ret, tret);
4642}
4643
4644/* Return true if evaluating EXPR could trap.
4645 EXPR is GENERIC, while tree_could_trap_p can be called
4646 only on GIMPLE. */
4647
4648bool
4649generic_expr_could_trap_p (tree expr)
4650{
4651 unsigned i, n;
4652
4653 if (!expr || is_gimple_val (expr))
4654 return false;
4655
4656 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4657 return true;
4658
4659 n = TREE_OPERAND_LENGTH (expr);
4660 for (i = 0; i < n; i++)
4661 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4662 return true;
4663
4664 return false;
4665}
4666
4667/* Associate the condition STMT with the discriminator UID. STMTs that are
4668 broken down with ANDIF/ORIF from the same Boolean expression should be given
4669 the same UID; 'if (a && b && c) { if (d || e) ... } ...' should yield the
4670 { a: 1, b: 1, c: 1, d: 2, e: 2 } when gimplification is done. This is used
4671 for condition coverage. */
4672static void
4673gimple_associate_condition_with_expr (struct function *fn, gcond *stmt,
4674 unsigned uid)
4675{
4676 if (!condition_coverage_flag)
4677 return;
4678
4679 if (!fn->cond_uids)
4680 fn->cond_uids = new hash_map <gcond*, unsigned> ();
4681
4682 fn->cond_uids->put (k: stmt, v: uid);
4683}
4684
4685/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4686 into
4687
4688 if (p) if (p)
4689 t1 = a; a;
4690 else or else
4691 t1 = b; b;
4692 t1;
4693
4694 The second form is used when *EXPR_P is of type void.
4695
4696 PRE_P points to the list where side effects that must happen before
4697 *EXPR_P should be stored. */
4698
4699static enum gimplify_status
4700gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4701{
4702 tree expr = *expr_p;
4703 tree type = TREE_TYPE (expr);
4704 location_t loc = EXPR_LOCATION (expr);
4705 tree tmp, arm1, arm2;
4706 enum gimplify_status ret;
4707 tree label_true, label_false, label_cont;
4708 bool have_then_clause_p, have_else_clause_p;
4709 gcond *cond_stmt;
4710 enum tree_code pred_code;
4711 gimple_seq seq = NULL;
4712
4713 /* If this COND_EXPR has a value, copy the values into a temporary within
4714 the arms. */
4715 if (!VOID_TYPE_P (type))
4716 {
4717 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4718 tree result;
4719
4720 /* If either an rvalue is ok or we do not require an lvalue, create the
4721 temporary. But we cannot do that if the type is addressable. */
4722 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4723 && !TREE_ADDRESSABLE (type))
4724 {
4725 if (gimplify_ctxp->allow_rhs_cond_expr
4726 /* If either branch has side effects or could trap, it can't be
4727 evaluated unconditionally. */
4728 && !TREE_SIDE_EFFECTS (then_)
4729 && !generic_expr_could_trap_p (expr: then_)
4730 && !TREE_SIDE_EFFECTS (else_)
4731 && !generic_expr_could_trap_p (expr: else_))
4732 return gimplify_pure_cond_expr (expr_p, pre_p);
4733
4734 tmp = create_tmp_var (type, "iftmp");
4735 result = tmp;
4736 }
4737
4738 /* Otherwise, only create and copy references to the values. */
4739 else
4740 {
4741 type = build_pointer_type (type);
4742
4743 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4744 then_ = build_fold_addr_expr_loc (loc, then_);
4745
4746 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4747 else_ = build_fold_addr_expr_loc (loc, else_);
4748
4749 expr
4750 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4751
4752 tmp = create_tmp_var (type, "iftmp");
4753 result = build_simple_mem_ref_loc (loc, tmp);
4754 }
4755
4756 /* Build the new then clause, `tmp = then_;'. But don't build the
4757 assignment if the value is void; in C++ it can be if it's a throw. */
4758 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4759 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4760
4761 /* Similarly, build the new else clause, `tmp = else_;'. */
4762 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4763 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4764
4765 TREE_TYPE (expr) = void_type_node;
4766 recalculate_side_effects (t: expr);
4767
4768 /* Move the COND_EXPR to the prequeue. */
4769 gimplify_stmt (&expr, pre_p);
4770
4771 *expr_p = result;
4772 return GS_ALL_DONE;
4773 }
4774
4775 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4776 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4777 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4778 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4779
4780 /* Make sure the condition has BOOLEAN_TYPE. */
4781 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4782
4783 /* Break apart && and || conditions. */
4784 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4785 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4786 {
4787 expr = shortcut_cond_expr (expr, condition_uid: next_cond_uid ());
4788
4789 if (expr != *expr_p)
4790 {
4791 *expr_p = expr;
4792
4793 /* We can't rely on gimplify_expr to re-gimplify the expanded
4794 form properly, as cleanups might cause the target labels to be
4795 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4796 set up a conditional context. */
4797 gimple_push_condition ();
4798 gimplify_stmt (expr_p, &seq);
4799 gimple_pop_condition (pre_p);
4800 gimple_seq_add_seq (pre_p, seq);
4801
4802 return GS_ALL_DONE;
4803 }
4804 }
4805
4806 /* Now do the normal gimplification. */
4807
4808 /* Gimplify condition. */
4809 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4810 is_gimple_condexpr_for_cond, fb_rvalue);
4811 if (ret == GS_ERROR)
4812 return GS_ERROR;
4813 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4814
4815 gimple_push_condition ();
4816
4817 have_then_clause_p = have_else_clause_p = false;
4818 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4819 if (label_true
4820 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4821 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4822 have different locations, otherwise we end up with incorrect
4823 location information on the branches. */
4824 && (optimize
4825 || !EXPR_HAS_LOCATION (expr)
4826 || !rexpr_has_location (expr: label_true)
4827 || EXPR_LOCATION (expr) == rexpr_location (expr: label_true)))
4828 {
4829 have_then_clause_p = true;
4830 label_true = GOTO_DESTINATION (label_true);
4831 }
4832 else
4833 label_true = create_artificial_label (UNKNOWN_LOCATION);
4834 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4835 if (label_false
4836 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4837 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4838 have different locations, otherwise we end up with incorrect
4839 location information on the branches. */
4840 && (optimize
4841 || !EXPR_HAS_LOCATION (expr)
4842 || !rexpr_has_location (expr: label_false)
4843 || EXPR_LOCATION (expr) == rexpr_location (expr: label_false)))
4844 {
4845 have_else_clause_p = true;
4846 label_false = GOTO_DESTINATION (label_false);
4847 }
4848 else
4849 label_false = create_artificial_label (UNKNOWN_LOCATION);
4850
4851 unsigned cond_uid = EXPR_COND_UID (expr);
4852 if (cond_uid == 0)
4853 cond_uid = next_cond_uid ();
4854
4855 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4856 &arm2);
4857 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4858 label_false);
4859 gimple_set_location (g: cond_stmt, EXPR_LOCATION (expr));
4860 gimple_associate_condition_with_expr (cfun, stmt: cond_stmt, uid: cond_uid);
4861 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4862 gimplify_seq_add_stmt (seq_p: &seq, gs: cond_stmt);
4863 gimple_stmt_iterator gsi = gsi_last (seq);
4864 maybe_fold_stmt (gsi: &gsi);
4865
4866 label_cont = NULL_TREE;
4867 if (!have_then_clause_p)
4868 {
4869 /* For if (...) {} else { code; } put label_true after
4870 the else block. */
4871 if (TREE_OPERAND (expr, 1) == NULL_TREE
4872 && !have_else_clause_p
4873 && TREE_OPERAND (expr, 2) != NULL_TREE)
4874 {
4875 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4876 handling that label_cont == label_true can be only reached
4877 through fallthrough from { code; }. */
4878 if (integer_zerop (COND_EXPR_COND (expr)))
4879 UNUSED_LABEL_P (label_true) = 1;
4880 label_cont = label_true;
4881 }
4882 else
4883 {
4884 bool then_side_effects
4885 = (TREE_OPERAND (expr, 1)
4886 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4887 gimplify_seq_add_stmt (seq_p: &seq, gs: gimple_build_label (label: label_true));
4888 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4889 /* For if (...) { code; } else {} or
4890 if (...) { code; } else goto label; or
4891 if (...) { code; return; } else { ... }
4892 label_cont isn't needed. */
4893 if (!have_else_clause_p
4894 && TREE_OPERAND (expr, 2) != NULL_TREE
4895 && gimple_seq_may_fallthru (seq))
4896 {
4897 gimple *g;
4898 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4899
4900 /* For if (0) { non-side-effect-code } else { code }
4901 tell -Wimplicit-fallthrough handling that label_cont can
4902 be only reached through fallthrough from { code }. */
4903 if (integer_zerop (COND_EXPR_COND (expr)))
4904 {
4905 UNUSED_LABEL_P (label_true) = 1;
4906 if (!then_side_effects)
4907 UNUSED_LABEL_P (label_cont) = 1;
4908 }
4909
4910 g = gimple_build_goto (dest: label_cont);
4911
4912 /* GIMPLE_COND's are very low level; they have embedded
4913 gotos. This particular embedded goto should not be marked
4914 with the location of the original COND_EXPR, as it would
4915 correspond to the COND_EXPR's condition, not the ELSE or the
4916 THEN arms. To avoid marking it with the wrong location, flag
4917 it as "no location". */
4918 gimple_set_do_not_emit_location (g);
4919
4920 gimplify_seq_add_stmt (seq_p: &seq, gs: g);
4921 }
4922 }
4923 }
4924 if (!have_else_clause_p)
4925 {
4926 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4927 tell -Wimplicit-fallthrough handling that label_false can be only
4928 reached through fallthrough from { code }. */
4929 if (integer_nonzerop (COND_EXPR_COND (expr))
4930 && (TREE_OPERAND (expr, 2) == NULL_TREE
4931 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4932 UNUSED_LABEL_P (label_false) = 1;
4933 gimplify_seq_add_stmt (seq_p: &seq, gs: gimple_build_label (label: label_false));
4934 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4935 }
4936 if (label_cont)
4937 gimplify_seq_add_stmt (seq_p: &seq, gs: gimple_build_label (label: label_cont));
4938
4939 gimple_pop_condition (pre_p);
4940 gimple_seq_add_seq (pre_p, seq);
4941
4942 if (ret == GS_ERROR)
4943 ; /* Do nothing. */
4944 else if (have_then_clause_p || have_else_clause_p)
4945 ret = GS_ALL_DONE;
4946 else
4947 {
4948 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4949 expr = TREE_OPERAND (expr, 0);
4950 gimplify_stmt (&expr, pre_p);
4951 }
4952
4953 *expr_p = NULL;
4954 return ret;
4955}
4956
4957/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4958 to be marked addressable.
4959
4960 We cannot rely on such an expression being directly markable if a temporary
4961 has been created by the gimplification. In this case, we create another
4962 temporary and initialize it with a copy, which will become a store after we
4963 mark it addressable. This can happen if the front-end passed us something
4964 that it could not mark addressable yet, like a Fortran pass-by-reference
4965 parameter (int) floatvar. */
4966
4967static void
4968prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4969{
4970 while (handled_component_p (t: *expr_p))
4971 expr_p = &TREE_OPERAND (*expr_p, 0);
4972
4973 /* Do not allow an SSA name as the temporary. */
4974 if (is_gimple_reg (*expr_p))
4975 *expr_p = internal_get_tmp_var (val: *expr_p, pre_p: seq_p, NULL, is_formal: false, allow_ssa: false, not_gimple_reg: true);
4976}
4977
4978/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4979 a call to __builtin_memcpy. */
4980
4981static enum gimplify_status
4982gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4983 gimple_seq *seq_p)
4984{
4985 tree t, to, to_ptr, from, from_ptr;
4986 gcall *gs;
4987 location_t loc = EXPR_LOCATION (*expr_p);
4988
4989 to = TREE_OPERAND (*expr_p, 0);
4990 from = TREE_OPERAND (*expr_p, 1);
4991 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to)))
4992 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))));
4993
4994 /* Mark the RHS addressable. Beware that it may not be possible to do so
4995 directly if a temporary has been created by the gimplification. */
4996 prepare_gimple_addressable (expr_p: &from, seq_p);
4997
4998 mark_addressable (from);
4999 from_ptr = build_fold_addr_expr_loc (loc, from);
5000 gimplify_arg (arg_p: &from_ptr, pre_p: seq_p, call_location: loc);
5001
5002 mark_addressable (to);
5003 to_ptr = build_fold_addr_expr_loc (loc, to);
5004 gimplify_arg (arg_p: &to_ptr, pre_p: seq_p, call_location: loc);
5005
5006 t = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY);
5007
5008 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
5009 gimple_call_set_alloca_for_var (s: gs, for_var: true);
5010
5011 if (want_value)
5012 {
5013 /* tmp = memcpy() */
5014 t = create_tmp_var (TREE_TYPE (to_ptr));
5015 gimple_call_set_lhs (gs, lhs: t);
5016 gimplify_seq_add_stmt (seq_p, gs);
5017
5018 *expr_p = build_simple_mem_ref (t);
5019 return GS_ALL_DONE;
5020 }
5021
5022 gimplify_seq_add_stmt (seq_p, gs);
5023 *expr_p = NULL;
5024 return GS_ALL_DONE;
5025}
5026
5027/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
5028 a call to __builtin_memset. In this case we know that the RHS is
5029 a CONSTRUCTOR with an empty element list. */
5030
5031static enum gimplify_status
5032gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
5033 gimple_seq *seq_p)
5034{
5035 tree t, from, to, to_ptr;
5036 gcall *gs;
5037 location_t loc = EXPR_LOCATION (*expr_p);
5038
5039 /* Assert our assumptions, to abort instead of producing wrong code
5040 silently if they are not met. Beware that the RHS CONSTRUCTOR might
5041 not be immediately exposed. */
5042 from = TREE_OPERAND (*expr_p, 1);
5043 if (TREE_CODE (from) == WITH_SIZE_EXPR)
5044 from = TREE_OPERAND (from, 0);
5045
5046 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
5047 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
5048
5049 /* Now proceed. */
5050 to = TREE_OPERAND (*expr_p, 0);
5051 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to))));
5052
5053 to_ptr = build_fold_addr_expr_loc (loc, to);
5054 gimplify_arg (arg_p: &to_ptr, pre_p: seq_p, call_location: loc);
5055 t = builtin_decl_implicit (fncode: BUILT_IN_MEMSET);
5056
5057 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
5058
5059 if (want_value)
5060 {
5061 /* tmp = memset() */
5062 t = create_tmp_var (TREE_TYPE (to_ptr));
5063 gimple_call_set_lhs (gs, lhs: t);
5064 gimplify_seq_add_stmt (seq_p, gs);
5065
5066 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
5067 return GS_ALL_DONE;
5068 }
5069
5070 gimplify_seq_add_stmt (seq_p, gs);
5071 *expr_p = NULL;
5072 return GS_ALL_DONE;
5073}
5074
5075/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
5076 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
5077 assignment. Return non-null if we detect a potential overlap. */
5078
5079struct gimplify_init_ctor_preeval_data
5080{
5081 /* The base decl of the lhs object. May be NULL, in which case we
5082 have to assume the lhs is indirect. */
5083 tree lhs_base_decl;
5084
5085 /* The alias set of the lhs object. */
5086 alias_set_type lhs_alias_set;
5087};
5088
5089static tree
5090gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
5091{
5092 struct gimplify_init_ctor_preeval_data *data
5093 = (struct gimplify_init_ctor_preeval_data *) xdata;
5094 tree t = *tp;
5095
5096 /* If we find the base object, obviously we have overlap. */
5097 if (data->lhs_base_decl == t)
5098 return t;
5099
5100 /* If the constructor component is indirect, determine if we have a
5101 potential overlap with the lhs. The only bits of information we
5102 have to go on at this point are addressability and alias sets. */
5103 if ((INDIRECT_REF_P (t)
5104 || TREE_CODE (t) == MEM_REF)
5105 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5106 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
5107 return t;
5108
5109 /* If the constructor component is a call, determine if it can hide a
5110 potential overlap with the lhs through an INDIRECT_REF like above.
5111 ??? Ugh - this is completely broken. In fact this whole analysis
5112 doesn't look conservative. */
5113 if (TREE_CODE (t) == CALL_EXPR)
5114 {
5115 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
5116
5117 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
5118 if (POINTER_TYPE_P (TREE_VALUE (type))
5119 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5120 && alias_sets_conflict_p (data->lhs_alias_set,
5121 get_alias_set
5122 (TREE_TYPE (TREE_VALUE (type)))))
5123 return t;
5124 }
5125
5126 if (IS_TYPE_OR_DECL_P (t))
5127 *walk_subtrees = 0;
5128 return NULL;
5129}
5130
5131/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
5132 force values that overlap with the lhs (as described by *DATA)
5133 into temporaries. */
5134
5135static void
5136gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5137 struct gimplify_init_ctor_preeval_data *data)
5138{
5139 enum gimplify_status one;
5140
5141 /* If the value is constant, then there's nothing to pre-evaluate. */
5142 if (TREE_CONSTANT (*expr_p))
5143 {
5144 /* Ensure it does not have side effects, it might contain a reference to
5145 the object we're initializing. */
5146 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
5147 return;
5148 }
5149
5150 /* If the type has non-trivial constructors, we can't pre-evaluate. */
5151 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
5152 return;
5153
5154 /* Recurse for nested constructors. */
5155 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
5156 {
5157 unsigned HOST_WIDE_INT ix;
5158 constructor_elt *ce;
5159 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
5160
5161 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
5162 gimplify_init_ctor_preeval (expr_p: &ce->value, pre_p, post_p, data);
5163
5164 return;
5165 }
5166
5167 /* If this is a variable sized type, we must remember the size. */
5168 maybe_with_size_expr (expr_p);
5169
5170 /* Gimplify the constructor element to something appropriate for the rhs
5171 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
5172 the gimplifier will consider this a store to memory. Doing this
5173 gimplification now means that we won't have to deal with complicated
5174 language-specific trees, nor trees like SAVE_EXPR that can induce
5175 exponential search behavior. */
5176 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
5177 if (one == GS_ERROR)
5178 {
5179 *expr_p = NULL;
5180 return;
5181 }
5182
5183 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
5184 with the lhs, since "a = { .x=a }" doesn't make sense. This will
5185 always be true for all scalars, since is_gimple_mem_rhs insists on a
5186 temporary variable for them. */
5187 if (DECL_P (*expr_p))
5188 return;
5189
5190 /* If this is of variable size, we have no choice but to assume it doesn't
5191 overlap since we can't make a temporary for it. */
5192 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
5193 return;
5194
5195 /* Otherwise, we must search for overlap ... */
5196 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
5197 return;
5198
5199 /* ... and if found, force the value into a temporary. */
5200 *expr_p = get_formal_tmp_var (val: *expr_p, pre_p);
5201}
5202
5203/* A subroutine of gimplify_init_ctor_eval. Create a loop for
5204 a RANGE_EXPR in a CONSTRUCTOR for an array.
5205
5206 var = lower;
5207 loop_entry:
5208 object[var] = value;
5209 if (var == upper)
5210 goto loop_exit;
5211 var = var + 1;
5212 goto loop_entry;
5213 loop_exit:
5214
5215 We increment var _after_ the loop exit check because we might otherwise
5216 fail if upper == TYPE_MAX_VALUE (type for upper).
5217
5218 Note that we never have to deal with SAVE_EXPRs here, because this has
5219 already been taken care of for us, in gimplify_init_ctor_preeval(). */
5220
5221static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
5222 gimple_seq *, bool);
5223
5224static void
5225gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
5226 tree value, tree array_elt_type,
5227 gimple_seq *pre_p, bool cleared)
5228{
5229 tree loop_entry_label, loop_exit_label, fall_thru_label;
5230 tree var, var_type, cref, tmp;
5231
5232 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
5233 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
5234 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
5235
5236 /* Create and initialize the index variable. */
5237 var_type = TREE_TYPE (upper);
5238 var = create_tmp_var (var_type);
5239 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_assign (var, lower));
5240
5241 /* Add the loop entry label. */
5242 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_label (label: loop_entry_label));
5243
5244 /* Build the reference. */
5245 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (expr: object),
5246 var, NULL_TREE, NULL_TREE);
5247
5248 /* If we are a constructor, just call gimplify_init_ctor_eval to do
5249 the store. Otherwise just assign value to the reference. */
5250
5251 if (TREE_CODE (value) == CONSTRUCTOR)
5252 /* NB we might have to call ourself recursively through
5253 gimplify_init_ctor_eval if the value is a constructor. */
5254 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5255 pre_p, cleared);
5256 else
5257 {
5258 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
5259 != GS_ERROR)
5260 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_assign (cref, value));
5261 }
5262
5263 /* We exit the loop when the index var is equal to the upper bound. */
5264 gimplify_seq_add_stmt (seq_p: pre_p,
5265 gs: gimple_build_cond (EQ_EXPR, var, upper,
5266 loop_exit_label, fall_thru_label));
5267
5268 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_label (label: fall_thru_label));
5269
5270 /* Otherwise, increment the index var... */
5271 tmp = build2 (PLUS_EXPR, var_type, var,
5272 fold_convert (var_type, integer_one_node));
5273 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_assign (var, tmp));
5274
5275 /* ...and jump back to the loop entry. */
5276 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_goto (dest: loop_entry_label));
5277
5278 /* Add the loop exit label. */
5279 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_label (label: loop_exit_label));
5280}
5281
5282/* A subroutine of gimplify_init_constructor. Generate individual
5283 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
5284 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
5285 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
5286 zeroed first. */
5287
5288static void
5289gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
5290 gimple_seq *pre_p, bool cleared)
5291{
5292 tree array_elt_type = NULL;
5293 unsigned HOST_WIDE_INT ix;
5294 tree purpose, value;
5295
5296 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
5297 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
5298
5299 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
5300 {
5301 tree cref;
5302
5303 /* NULL values are created above for gimplification errors. */
5304 if (value == NULL)
5305 continue;
5306
5307 if (cleared && initializer_zerop (value))
5308 continue;
5309
5310 /* ??? Here's to hoping the front end fills in all of the indices,
5311 so we don't have to figure out what's missing ourselves. */
5312 gcc_assert (purpose);
5313
5314 /* Skip zero-sized fields, unless value has side-effects. This can
5315 happen with calls to functions returning a empty type, which
5316 we shouldn't discard. As a number of downstream passes don't
5317 expect sets of empty type fields, we rely on the gimplification of
5318 the MODIFY_EXPR we make below to drop the assignment statement. */
5319 if (!TREE_SIDE_EFFECTS (value)
5320 && TREE_CODE (purpose) == FIELD_DECL
5321 && is_empty_type (TREE_TYPE (purpose)))
5322 continue;
5323
5324 /* If we have a RANGE_EXPR, we have to build a loop to assign the
5325 whole range. */
5326 if (TREE_CODE (purpose) == RANGE_EXPR)
5327 {
5328 tree lower = TREE_OPERAND (purpose, 0);
5329 tree upper = TREE_OPERAND (purpose, 1);
5330
5331 /* If the lower bound is equal to upper, just treat it as if
5332 upper was the index. */
5333 if (simple_cst_equal (lower, upper))
5334 purpose = upper;
5335 else
5336 {
5337 gimplify_init_ctor_eval_range (object, lower, upper, value,
5338 array_elt_type, pre_p, cleared);
5339 continue;
5340 }
5341 }
5342
5343 if (array_elt_type)
5344 {
5345 /* Do not use bitsizetype for ARRAY_REF indices. */
5346 if (TYPE_DOMAIN (TREE_TYPE (object)))
5347 purpose
5348 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5349 purpose);
5350 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (expr: object),
5351 purpose, NULL_TREE, NULL_TREE);
5352 }
5353 else
5354 {
5355 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5356 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5357 unshare_expr (expr: object), purpose, NULL_TREE);
5358 }
5359
5360 if (TREE_CODE (value) == CONSTRUCTOR
5361 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5362 gimplify_init_ctor_eval (object: cref, CONSTRUCTOR_ELTS (value),
5363 pre_p, cleared);
5364 else
5365 {
5366 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5367 gimplify_and_add (t: init, seq_p: pre_p);
5368 ggc_free (init);
5369 }
5370 }
5371}
5372
5373/* Return the appropriate RHS predicate for this LHS. */
5374
5375gimple_predicate
5376rhs_predicate_for (tree lhs)
5377{
5378 if (is_gimple_reg (lhs))
5379 return is_gimple_reg_rhs_or_call;
5380 else
5381 return is_gimple_mem_rhs_or_call;
5382}
5383
5384/* Return the initial guess for an appropriate RHS predicate for this LHS,
5385 before the LHS has been gimplified. */
5386
5387static gimple_predicate
5388initial_rhs_predicate_for (tree lhs)
5389{
5390 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5391 return is_gimple_reg_rhs_or_call;
5392 else
5393 return is_gimple_mem_rhs_or_call;
5394}
5395
5396/* Gimplify a C99 compound literal expression. This just means adding
5397 the DECL_EXPR before the current statement and using its anonymous
5398 decl instead. */
5399
5400static enum gimplify_status
5401gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5402 bool (*gimple_test_f) (tree),
5403 fallback_t fallback)
5404{
5405 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5406 tree decl = DECL_EXPR_DECL (decl_s);
5407 tree init = DECL_INITIAL (decl);
5408 /* Mark the decl as addressable if the compound literal
5409 expression is addressable now, otherwise it is marked too late
5410 after we gimplify the initialization expression. */
5411 if (TREE_ADDRESSABLE (*expr_p))
5412 TREE_ADDRESSABLE (decl) = 1;
5413 /* Otherwise, if we don't need an lvalue and have a literal directly
5414 substitute it. Check if it matches the gimple predicate, as
5415 otherwise we'd generate a new temporary, and we can as well just
5416 use the decl we already have. */
5417 else if (!TREE_ADDRESSABLE (decl)
5418 && !TREE_THIS_VOLATILE (decl)
5419 && init
5420 && (fallback & fb_lvalue) == 0
5421 && gimple_test_f (init))
5422 {
5423 *expr_p = init;
5424 return GS_OK;
5425 }
5426
5427 /* If the decl is not addressable, then it is being used in some
5428 expression or on the right hand side of a statement, and it can
5429 be put into a readonly data section. */
5430 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5431 TREE_READONLY (decl) = 1;
5432
5433 /* This decl isn't mentioned in the enclosing block, so add it to the
5434 list of temps. FIXME it seems a bit of a kludge to say that
5435 anonymous artificial vars aren't pushed, but everything else is. */
5436 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5437 gimple_add_tmp_var (tmp: decl);
5438
5439 gimplify_and_add (t: decl_s, seq_p: pre_p);
5440 *expr_p = decl;
5441 return GS_OK;
5442}
5443
5444/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5445 return a new CONSTRUCTOR if something changed. */
5446
5447static tree
5448optimize_compound_literals_in_ctor (tree orig_ctor)
5449{
5450 tree ctor = orig_ctor;
5451 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5452 unsigned int idx, num = vec_safe_length (v: elts);
5453
5454 for (idx = 0; idx < num; idx++)
5455 {
5456 tree value = (*elts)[idx].value;
5457 tree newval = value;
5458 if (TREE_CODE (value) == CONSTRUCTOR)
5459 newval = optimize_compound_literals_in_ctor (orig_ctor: value);
5460 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5461 {
5462 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5463 tree decl = DECL_EXPR_DECL (decl_s);
5464 tree init = DECL_INITIAL (decl);
5465
5466 if (!TREE_ADDRESSABLE (value)
5467 && !TREE_ADDRESSABLE (decl)
5468 && init
5469 && TREE_CODE (init) == CONSTRUCTOR)
5470 newval = optimize_compound_literals_in_ctor (orig_ctor: init);
5471 }
5472 if (newval == value)
5473 continue;
5474
5475 if (ctor == orig_ctor)
5476 {
5477 ctor = copy_node (orig_ctor);
5478 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (src: elts);
5479 elts = CONSTRUCTOR_ELTS (ctor);
5480 }
5481 (*elts)[idx].value = newval;
5482 }
5483 return ctor;
5484}
5485
5486/* A subroutine of gimplify_modify_expr. Break out elements of a
5487 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5488
5489 Note that we still need to clear any elements that don't have explicit
5490 initializers, so if not all elements are initialized we keep the
5491 original MODIFY_EXPR, we just remove all of the constructor elements.
5492
5493 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5494 GS_ERROR if we would have to create a temporary when gimplifying
5495 this constructor. Otherwise, return GS_OK.
5496
5497 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5498
5499static enum gimplify_status
5500gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5501 bool want_value, bool notify_temp_creation)
5502{
5503 tree object, ctor, type;
5504 enum gimplify_status ret;
5505 vec<constructor_elt, va_gc> *elts;
5506 bool cleared = false;
5507 bool is_empty_ctor = false;
5508 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5509
5510 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5511
5512 if (!notify_temp_creation)
5513 {
5514 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5515 is_gimple_lvalue, fb_lvalue);
5516 if (ret == GS_ERROR)
5517 return ret;
5518 }
5519
5520 object = TREE_OPERAND (*expr_p, 0);
5521 ctor = TREE_OPERAND (*expr_p, 1)
5522 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5523 type = TREE_TYPE (ctor);
5524 elts = CONSTRUCTOR_ELTS (ctor);
5525 ret = GS_ALL_DONE;
5526
5527 switch (TREE_CODE (type))
5528 {
5529 case RECORD_TYPE:
5530 case UNION_TYPE:
5531 case QUAL_UNION_TYPE:
5532 case ARRAY_TYPE:
5533 {
5534 /* Use readonly data for initializers of this or smaller size
5535 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5536 ratio. */
5537 const HOST_WIDE_INT min_unique_size = 64;
5538 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5539 is smaller than this, use readonly data. */
5540 const int unique_nonzero_ratio = 8;
5541 /* True if a single access of the object must be ensured. This is the
5542 case if the target is volatile, the type is non-addressable and more
5543 than one field need to be assigned. */
5544 const bool ensure_single_access
5545 = TREE_THIS_VOLATILE (object)
5546 && !TREE_ADDRESSABLE (type)
5547 && vec_safe_length (v: elts) > 1;
5548 struct gimplify_init_ctor_preeval_data preeval_data;
5549 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5550 HOST_WIDE_INT num_unique_nonzero_elements;
5551 bool complete_p, valid_const_initializer;
5552
5553 /* Aggregate types must lower constructors to initialization of
5554 individual elements. The exception is that a CONSTRUCTOR node
5555 with no elements indicates zero-initialization of the whole. */
5556 if (vec_safe_is_empty (v: elts))
5557 {
5558 if (notify_temp_creation)
5559 return GS_OK;
5560
5561 /* The var will be initialized and so appear on lhs of
5562 assignment, it can't be TREE_READONLY anymore. */
5563 if (VAR_P (object))
5564 TREE_READONLY (object) = 0;
5565
5566 is_empty_ctor = true;
5567 break;
5568 }
5569
5570 /* Fetch information about the constructor to direct later processing.
5571 We might want to make static versions of it in various cases, and
5572 can only do so if it known to be a valid constant initializer. */
5573 valid_const_initializer
5574 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5575 &num_unique_nonzero_elements,
5576 &num_ctor_elements, &complete_p);
5577
5578 /* If a const aggregate variable is being initialized, then it
5579 should never be a lose to promote the variable to be static. */
5580 if (valid_const_initializer
5581 && num_nonzero_elements > 1
5582 && TREE_READONLY (object)
5583 && VAR_P (object)
5584 && !DECL_REGISTER (object)
5585 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)
5586 || DECL_MERGEABLE (object))
5587 /* For ctors that have many repeated nonzero elements
5588 represented through RANGE_EXPRs, prefer initializing
5589 those through runtime loops over copies of large amounts
5590 of data from readonly data section. */
5591 && (num_unique_nonzero_elements
5592 > num_nonzero_elements / unique_nonzero_ratio
5593 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5594 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5595 {
5596 if (notify_temp_creation)
5597 return GS_ERROR;
5598
5599 DECL_INITIAL (object) = ctor;
5600 TREE_STATIC (object) = 1;
5601 if (!DECL_NAME (object))
5602 DECL_NAME (object) = create_tmp_var_name ("C");
5603 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5604
5605 /* ??? C++ doesn't automatically append a .<number> to the
5606 assembler name, and even when it does, it looks at FE private
5607 data structures to figure out what that number should be,
5608 which are not set for this variable. I suppose this is
5609 important for local statics for inline functions, which aren't
5610 "local" in the object file sense. So in order to get a unique
5611 TU-local symbol, we must invoke the lhd version now. */
5612 lhd_set_decl_assembler_name (decl: object);
5613
5614 *expr_p = NULL_TREE;
5615 break;
5616 }
5617
5618 /* The var will be initialized and so appear on lhs of
5619 assignment, it can't be TREE_READONLY anymore. */
5620 if (VAR_P (object) && !notify_temp_creation)
5621 TREE_READONLY (object) = 0;
5622
5623 /* If there are "lots" of initialized elements, even discounting
5624 those that are not address constants (and thus *must* be
5625 computed at runtime), then partition the constructor into
5626 constant and non-constant parts. Block copy the constant
5627 parts in, then generate code for the non-constant parts. */
5628 /* TODO. There's code in cp/typeck.cc to do this. */
5629
5630 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5631 /* store_constructor will ignore the clearing of variable-sized
5632 objects. Initializers for such objects must explicitly set
5633 every field that needs to be set. */
5634 cleared = false;
5635 else if (!complete_p)
5636 /* If the constructor isn't complete, clear the whole object
5637 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5638
5639 ??? This ought not to be needed. For any element not present
5640 in the initializer, we should simply set them to zero. Except
5641 we'd need to *find* the elements that are not present, and that
5642 requires trickery to avoid quadratic compile-time behavior in
5643 large cases or excessive memory use in small cases. */
5644 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5645 else if (num_ctor_elements - num_nonzero_elements
5646 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5647 && num_nonzero_elements < num_ctor_elements / 4)
5648 /* If there are "lots" of zeros, it's more efficient to clear
5649 the memory and then set the nonzero elements. */
5650 cleared = true;
5651 else if (ensure_single_access && num_nonzero_elements == 0)
5652 /* If a single access to the target must be ensured and all elements
5653 are zero, then it's optimal to clear whatever their number. */
5654 cleared = true;
5655 else
5656 cleared = false;
5657
5658 /* If there are "lots" of initialized elements, and all of them
5659 are valid address constants, then the entire initializer can
5660 be dropped to memory, and then memcpy'd out. Don't do this
5661 for sparse arrays, though, as it's more efficient to follow
5662 the standard CONSTRUCTOR behavior of memset followed by
5663 individual element initialization. Also don't do this for small
5664 all-zero initializers (which aren't big enough to merit
5665 clearing), and don't try to make bitwise copies of
5666 TREE_ADDRESSABLE types. */
5667 if (valid_const_initializer
5668 && complete_p
5669 && !(cleared || num_nonzero_elements == 0)
5670 && !TREE_ADDRESSABLE (type))
5671 {
5672 HOST_WIDE_INT size = int_size_in_bytes (type);
5673 unsigned int align;
5674
5675 /* ??? We can still get unbounded array types, at least
5676 from the C++ front end. This seems wrong, but attempt
5677 to work around it for now. */
5678 if (size < 0)
5679 {
5680 size = int_size_in_bytes (TREE_TYPE (object));
5681 if (size >= 0)
5682 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5683 }
5684
5685 /* Find the maximum alignment we can assume for the object. */
5686 /* ??? Make use of DECL_OFFSET_ALIGN. */
5687 if (DECL_P (object))
5688 align = DECL_ALIGN (object);
5689 else
5690 align = TYPE_ALIGN (type);
5691
5692 /* Do a block move either if the size is so small as to make
5693 each individual move a sub-unit move on average, or if it
5694 is so large as to make individual moves inefficient. */
5695 if (size > 0
5696 && num_nonzero_elements > 1
5697 /* For ctors that have many repeated nonzero elements
5698 represented through RANGE_EXPRs, prefer initializing
5699 those through runtime loops over copies of large amounts
5700 of data from readonly data section. */
5701 && (num_unique_nonzero_elements
5702 > num_nonzero_elements / unique_nonzero_ratio
5703 || size <= min_unique_size)
5704 && (size < num_nonzero_elements
5705 || !can_move_by_pieces (size, align)))
5706 {
5707 if (notify_temp_creation)
5708 return GS_ERROR;
5709
5710 walk_tree (&ctor, force_labels_r, NULL, NULL);
5711 ctor = tree_output_constant_def (ctor);
5712 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5713 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5714 TREE_OPERAND (*expr_p, 1) = ctor;
5715
5716 /* This is no longer an assignment of a CONSTRUCTOR, but
5717 we still may have processing to do on the LHS. So
5718 pretend we didn't do anything here to let that happen. */
5719 return GS_UNHANDLED;
5720 }
5721 }
5722
5723 /* If a single access to the target must be ensured and there are
5724 nonzero elements or the zero elements are not assigned en masse,
5725 initialize the target from a temporary. */
5726 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5727 {
5728 if (notify_temp_creation)
5729 return GS_ERROR;
5730
5731 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5732 TREE_OPERAND (*expr_p, 0) = temp;
5733 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5734 *expr_p,
5735 build2 (MODIFY_EXPR, void_type_node,
5736 object, temp));
5737 return GS_OK;
5738 }
5739
5740 if (notify_temp_creation)
5741 return GS_OK;
5742
5743 /* If there are nonzero elements and if needed, pre-evaluate to capture
5744 elements overlapping with the lhs into temporaries. We must do this
5745 before clearing to fetch the values before they are zeroed-out. */
5746 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5747 {
5748 preeval_data.lhs_base_decl = get_base_address (t: object);
5749 if (!DECL_P (preeval_data.lhs_base_decl))
5750 preeval_data.lhs_base_decl = NULL;
5751 preeval_data.lhs_alias_set = get_alias_set (object);
5752
5753 gimplify_init_ctor_preeval (expr_p: &TREE_OPERAND (*expr_p, 1),
5754 pre_p, post_p, data: &preeval_data);
5755 }
5756
5757 bool ctor_has_side_effects_p
5758 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5759
5760 if (cleared)
5761 {
5762 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5763 Note that we still have to gimplify, in order to handle the
5764 case of variable sized types. Avoid shared tree structures. */
5765 CONSTRUCTOR_ELTS (ctor) = NULL;
5766 TREE_SIDE_EFFECTS (ctor) = 0;
5767 object = unshare_expr (expr: object);
5768 gimplify_stmt (expr_p, pre_p);
5769 }
5770
5771 /* If we have not block cleared the object, or if there are nonzero
5772 elements in the constructor, or if the constructor has side effects,
5773 add assignments to the individual scalar fields of the object. */
5774 if (!cleared
5775 || num_nonzero_elements > 0
5776 || ctor_has_side_effects_p)
5777 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5778
5779 *expr_p = NULL_TREE;
5780 }
5781 break;
5782
5783 case COMPLEX_TYPE:
5784 {
5785 tree r, i;
5786
5787 if (notify_temp_creation)
5788 return GS_OK;
5789
5790 /* Extract the real and imaginary parts out of the ctor. */
5791 gcc_assert (elts->length () == 2);
5792 r = (*elts)[0].value;
5793 i = (*elts)[1].value;
5794 if (r == NULL || i == NULL)
5795 {
5796 tree zero = build_zero_cst (TREE_TYPE (type));
5797 if (r == NULL)
5798 r = zero;
5799 if (i == NULL)
5800 i = zero;
5801 }
5802
5803 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5804 represent creation of a complex value. */
5805 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5806 {
5807 ctor = build_complex (type, r, i);
5808 TREE_OPERAND (*expr_p, 1) = ctor;
5809 }
5810 else
5811 {
5812 ctor = build2 (COMPLEX_EXPR, type, r, i);
5813 TREE_OPERAND (*expr_p, 1) = ctor;
5814 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5815 pre_p,
5816 post_p,
5817 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5818 fb_rvalue);
5819 }
5820 }
5821 break;
5822
5823 case VECTOR_TYPE:
5824 {
5825 unsigned HOST_WIDE_INT ix;
5826 constructor_elt *ce;
5827
5828 if (notify_temp_creation)
5829 return GS_OK;
5830
5831 /* Vector types use CONSTRUCTOR all the way through gimple
5832 compilation as a general initializer. */
5833 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5834 {
5835 enum gimplify_status tret;
5836 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5837 fb_rvalue);
5838 if (tret == GS_ERROR)
5839 ret = GS_ERROR;
5840 else if (TREE_STATIC (ctor)
5841 && !initializer_constant_valid_p (ce->value,
5842 TREE_TYPE (ce->value)))
5843 TREE_STATIC (ctor) = 0;
5844 }
5845 recompute_constructor_flags (ctor);
5846
5847 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5848 if (TREE_CONSTANT (ctor))
5849 {
5850 bool constant_p = true;
5851 tree value;
5852
5853 /* Even when ctor is constant, it might contain non-*_CST
5854 elements, such as addresses or trapping values like
5855 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5856 in VECTOR_CST nodes. */
5857 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5858 if (!CONSTANT_CLASS_P (value))
5859 {
5860 constant_p = false;
5861 break;
5862 }
5863
5864 if (constant_p)
5865 {
5866 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5867 break;
5868 }
5869 }
5870
5871 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5872 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (val: ctor, pre_p);
5873 }
5874 break;
5875
5876 default:
5877 /* So how did we get a CONSTRUCTOR for a scalar type? */
5878 gcc_unreachable ();
5879 }
5880
5881 if (ret == GS_ERROR)
5882 return GS_ERROR;
5883 /* If we have gimplified both sides of the initializer but have
5884 not emitted an assignment, do so now. */
5885 if (*expr_p
5886 /* If the type is an empty type, we don't need to emit the
5887 assignment. */
5888 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5889 {
5890 tree lhs = TREE_OPERAND (*expr_p, 0);
5891 tree rhs = TREE_OPERAND (*expr_p, 1);
5892 if (want_value && object == lhs)
5893 lhs = unshare_expr (expr: lhs);
5894 gassign *init = gimple_build_assign (lhs, rhs);
5895 gimplify_seq_add_stmt (seq_p: pre_p, gs: init);
5896 }
5897 if (want_value)
5898 {
5899 *expr_p = object;
5900 ret = GS_OK;
5901 }
5902 else
5903 {
5904 *expr_p = NULL;
5905 ret = GS_ALL_DONE;
5906 }
5907
5908 /* If the user requests to initialize automatic variables, we
5909 should initialize paddings inside the variable. Add a call to
5910 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5911 initialize paddings of object always to zero regardless of
5912 INIT_TYPE. Note, we will not insert this call if the aggregate
5913 variable has be completely cleared already or it's initialized
5914 with an empty constructor. We cannot insert this call if the
5915 variable is a gimple register since __builtin_clear_padding will take
5916 the address of the variable. As a result, if a long double/_Complex long
5917 double variable will be spilled into stack later, its padding cannot
5918 be cleared with __builtin_clear_padding. We should clear its padding
5919 when it is spilled into memory. */
5920 if (is_init_expr
5921 && !is_gimple_reg (object)
5922 && clear_padding_type_may_have_padding_p (type)
5923 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5924 || !AGGREGATE_TYPE_P (type))
5925 && is_var_need_auto_init (decl: object))
5926 gimple_add_padding_init_for_auto_var (decl: object, is_vla: false, seq_p: pre_p);
5927
5928 return ret;
5929}
5930
5931/* Given a pointer value OP0, return a simplified version of an
5932 indirection through OP0, or NULL_TREE if no simplification is
5933 possible. This may only be applied to a rhs of an expression.
5934 Note that the resulting type may be different from the type pointed
5935 to in the sense that it is still compatible from the langhooks
5936 point of view. */
5937
5938static tree
5939gimple_fold_indirect_ref_rhs (tree t)
5940{
5941 return gimple_fold_indirect_ref (t);
5942}
5943
5944/* Subroutine of gimplify_modify_expr to do simplifications of
5945 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5946 something changes. */
5947
5948static enum gimplify_status
5949gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5950 gimple_seq *pre_p, gimple_seq *post_p,
5951 bool want_value)
5952{
5953 enum gimplify_status ret = GS_UNHANDLED;
5954 bool changed;
5955
5956 do
5957 {
5958 changed = false;
5959 switch (TREE_CODE (*from_p))
5960 {
5961 case VAR_DECL:
5962 /* If we're assigning from a read-only variable initialized with
5963 a constructor and not volatile, do the direct assignment from
5964 the constructor, but only if the target is not volatile either
5965 since this latter assignment might end up being done on a per
5966 field basis. However, if the target is volatile and the type
5967 is aggregate and non-addressable, gimplify_init_constructor
5968 knows that it needs to ensure a single access to the target
5969 and it will return GS_OK only in this case. */
5970 if (TREE_READONLY (*from_p)
5971 && DECL_INITIAL (*from_p)
5972 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5973 && !TREE_THIS_VOLATILE (*from_p)
5974 && (!TREE_THIS_VOLATILE (*to_p)
5975 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5976 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5977 {
5978 tree old_from = *from_p;
5979 enum gimplify_status subret;
5980
5981 /* Move the constructor into the RHS. */
5982 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5983
5984 /* Let's see if gimplify_init_constructor will need to put
5985 it in memory. */
5986 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5987 want_value: false, notify_temp_creation: true);
5988 if (subret == GS_ERROR)
5989 {
5990 /* If so, revert the change. */
5991 *from_p = old_from;
5992 }
5993 else
5994 {
5995 ret = GS_OK;
5996 changed = true;
5997 }
5998 }
5999 break;
6000 case INDIRECT_REF:
6001 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
6002 /* If we have code like
6003
6004 *(const A*)(A*)&x
6005
6006 where the type of "x" is a (possibly cv-qualified variant
6007 of "A"), treat the entire expression as identical to "x".
6008 This kind of code arises in C++ when an object is bound
6009 to a const reference, and if "x" is a TARGET_EXPR we want
6010 to take advantage of the optimization below. But not if
6011 the type is TREE_ADDRESSABLE; then C++17 says that the
6012 TARGET_EXPR needs to be a temporary. */
6013 if (tree t
6014 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
6015 {
6016 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6017 if (TREE_THIS_VOLATILE (t) != volatile_p)
6018 {
6019 if (DECL_P (t))
6020 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
6021 build_fold_addr_expr (t));
6022 if (REFERENCE_CLASS_P (t))
6023 TREE_THIS_VOLATILE (t) = volatile_p;
6024 }
6025 *from_p = t;
6026 ret = GS_OK;
6027 changed = true;
6028 }
6029 break;
6030
6031 case TARGET_EXPR:
6032 {
6033 /* If we are initializing something from a TARGET_EXPR, strip the
6034 TARGET_EXPR and initialize it directly, if possible. This can't
6035 be done if the initializer is void, since that implies that the
6036 temporary is set in some non-trivial way.
6037
6038 ??? What about code that pulls out the temp and uses it
6039 elsewhere? I think that such code never uses the TARGET_EXPR as
6040 an initializer. If I'm wrong, we'll die because the temp won't
6041 have any RTL. In that case, I guess we'll need to replace
6042 references somehow. */
6043 tree init = TARGET_EXPR_INITIAL (*from_p);
6044
6045 if (init
6046 && (TREE_CODE (*expr_p) != MODIFY_EXPR
6047 || !TARGET_EXPR_NO_ELIDE (*from_p))
6048 && !VOID_TYPE_P (TREE_TYPE (init)))
6049 {
6050 *from_p = init;
6051 ret = GS_OK;
6052 changed = true;
6053 }
6054 }
6055 break;
6056
6057 case COMPOUND_EXPR:
6058 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
6059 caught. */
6060 gimplify_compound_expr (from_p, pre_p, true);
6061 ret = GS_OK;
6062 changed = true;
6063 break;
6064
6065 case CONSTRUCTOR:
6066 /* If we already made some changes, let the front end have a
6067 crack at this before we break it down. */
6068 if (ret != GS_UNHANDLED)
6069 break;
6070
6071 /* If we're initializing from a CONSTRUCTOR, break this into
6072 individual MODIFY_EXPRs. */
6073 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
6074 notify_temp_creation: false);
6075 return ret;
6076
6077 case COND_EXPR:
6078 /* If we're assigning to a non-register type, push the assignment
6079 down into the branches. This is mandatory for ADDRESSABLE types,
6080 since we cannot generate temporaries for such, but it saves a
6081 copy in other cases as well. */
6082 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
6083 {
6084 /* This code should mirror the code in gimplify_cond_expr. */
6085 enum tree_code code = TREE_CODE (*expr_p);
6086 tree cond = *from_p;
6087 tree result = *to_p;
6088
6089 ret = gimplify_expr (&result, pre_p, post_p,
6090 is_gimple_lvalue, fb_lvalue);
6091 if (ret != GS_ERROR)
6092 ret = GS_OK;
6093
6094 /* If we are going to write RESULT more than once, clear
6095 TREE_READONLY flag, otherwise we might incorrectly promote
6096 the variable to static const and initialize it at compile
6097 time in one of the branches. */
6098 if (VAR_P (result)
6099 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
6100 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
6101 TREE_READONLY (result) = 0;
6102 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
6103 TREE_OPERAND (cond, 1)
6104 = build2 (code, void_type_node, result,
6105 TREE_OPERAND (cond, 1));
6106 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
6107 TREE_OPERAND (cond, 2)
6108 = build2 (code, void_type_node, unshare_expr (expr: result),
6109 TREE_OPERAND (cond, 2));
6110
6111 TREE_TYPE (cond) = void_type_node;
6112 recalculate_side_effects (t: cond);
6113
6114 if (want_value)
6115 {
6116 gimplify_and_add (t: cond, seq_p: pre_p);
6117 *expr_p = unshare_expr (expr: result);
6118 }
6119 else
6120 *expr_p = cond;
6121 return ret;
6122 }
6123 break;
6124
6125 case CALL_EXPR:
6126 /* For calls that return in memory, give *to_p as the CALL_EXPR's
6127 return slot so that we don't generate a temporary. */
6128 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
6129 && aggregate_value_p (*from_p, *from_p))
6130 {
6131 bool use_target;
6132
6133 if (!(rhs_predicate_for (lhs: *to_p))(*from_p))
6134 /* If we need a temporary, *to_p isn't accurate. */
6135 use_target = false;
6136 /* It's OK to use the return slot directly unless it's an NRV. */
6137 else if (TREE_CODE (*to_p) == RESULT_DECL
6138 && DECL_NAME (*to_p) == NULL_TREE
6139 && needs_to_live_in_memory (*to_p))
6140 use_target = true;
6141 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
6142 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
6143 /* Don't force regs into memory. */
6144 use_target = false;
6145 else if (TREE_CODE (*expr_p) == INIT_EXPR)
6146 /* It's OK to use the target directly if it's being
6147 initialized. */
6148 use_target = true;
6149 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
6150 != INTEGER_CST)
6151 /* Always use the target and thus RSO for variable-sized types.
6152 GIMPLE cannot deal with a variable-sized assignment
6153 embedded in a call statement. */
6154 use_target = true;
6155 else if (TREE_CODE (*to_p) != SSA_NAME
6156 && (!is_gimple_variable (t: *to_p)
6157 || needs_to_live_in_memory (*to_p)))
6158 /* Don't use the original target if it's already addressable;
6159 if its address escapes, and the called function uses the
6160 NRV optimization, a conforming program could see *to_p
6161 change before the called function returns; see c++/19317.
6162 When optimizing, the return_slot pass marks more functions
6163 as safe after we have escape info. */
6164 use_target = false;
6165 else
6166 use_target = true;
6167
6168 if (use_target)
6169 {
6170 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
6171 mark_addressable (*to_p);
6172 }
6173 }
6174 break;
6175
6176 case WITH_SIZE_EXPR:
6177 /* Likewise for calls that return an aggregate of non-constant size,
6178 since we would not be able to generate a temporary at all. */
6179 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
6180 {
6181 *from_p = TREE_OPERAND (*from_p, 0);
6182 /* We don't change ret in this case because the
6183 WITH_SIZE_EXPR might have been added in
6184 gimplify_modify_expr, so returning GS_OK would lead to an
6185 infinite loop. */
6186 changed = true;
6187 }
6188 break;
6189
6190 /* If we're initializing from a container, push the initialization
6191 inside it. */
6192 case CLEANUP_POINT_EXPR:
6193 case BIND_EXPR:
6194 case STATEMENT_LIST:
6195 {
6196 tree wrap = *from_p;
6197 tree t;
6198
6199 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
6200 fb_lvalue);
6201 if (ret != GS_ERROR)
6202 ret = GS_OK;
6203
6204 t = voidify_wrapper_expr (wrapper: wrap, temp: *expr_p);
6205 gcc_assert (t == *expr_p);
6206
6207 if (want_value)
6208 {
6209 gimplify_and_add (t: wrap, seq_p: pre_p);
6210 *expr_p = unshare_expr (expr: *to_p);
6211 }
6212 else
6213 *expr_p = wrap;
6214 return GS_OK;
6215 }
6216
6217 case NOP_EXPR:
6218 /* Pull out compound literal expressions from a NOP_EXPR.
6219 Those are created in the C FE to drop qualifiers during
6220 lvalue conversion. */
6221 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
6222 && tree_ssa_useless_type_conversion (*from_p))
6223 {
6224 *from_p = TREE_OPERAND (*from_p, 0);
6225 ret = GS_OK;
6226 changed = true;
6227 }
6228 break;
6229
6230 case COMPOUND_LITERAL_EXPR:
6231 {
6232 tree complit = TREE_OPERAND (*expr_p, 1);
6233 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
6234 tree decl = DECL_EXPR_DECL (decl_s);
6235 tree init = DECL_INITIAL (decl);
6236
6237 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
6238 into struct T x = { 0, 1, 2 } if the address of the
6239 compound literal has never been taken. */
6240 if (!TREE_ADDRESSABLE (complit)
6241 && !TREE_ADDRESSABLE (decl)
6242 && init)
6243 {
6244 *expr_p = copy_node (*expr_p);
6245 TREE_OPERAND (*expr_p, 1) = init;
6246 return GS_OK;
6247 }
6248 }
6249
6250 default:
6251 break;
6252 }
6253 }
6254 while (changed);
6255
6256 return ret;
6257}
6258
6259
6260/* Return true if T looks like a valid GIMPLE statement. */
6261
6262static bool
6263is_gimple_stmt (tree t)
6264{
6265 const enum tree_code code = TREE_CODE (t);
6266
6267 switch (code)
6268 {
6269 case NOP_EXPR:
6270 /* The only valid NOP_EXPR is the empty statement. */
6271 return IS_EMPTY_STMT (t);
6272
6273 case BIND_EXPR:
6274 case COND_EXPR:
6275 /* These are only valid if they're void. */
6276 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
6277
6278 case SWITCH_EXPR:
6279 case GOTO_EXPR:
6280 case RETURN_EXPR:
6281 case LABEL_EXPR:
6282 case CASE_LABEL_EXPR:
6283 case TRY_CATCH_EXPR:
6284 case TRY_FINALLY_EXPR:
6285 case EH_FILTER_EXPR:
6286 case CATCH_EXPR:
6287 case ASM_EXPR:
6288 case STATEMENT_LIST:
6289 case OACC_PARALLEL:
6290 case OACC_KERNELS:
6291 case OACC_SERIAL:
6292 case OACC_DATA:
6293 case OACC_HOST_DATA:
6294 case OACC_DECLARE:
6295 case OACC_UPDATE:
6296 case OACC_ENTER_DATA:
6297 case OACC_EXIT_DATA:
6298 case OACC_CACHE:
6299 case OMP_PARALLEL:
6300 case OMP_FOR:
6301 case OMP_SIMD:
6302 case OMP_DISTRIBUTE:
6303 case OMP_LOOP:
6304 case OACC_LOOP:
6305 case OMP_SCAN:
6306 case OMP_SCOPE:
6307 case OMP_SECTIONS:
6308 case OMP_SECTION:
6309 case OMP_STRUCTURED_BLOCK:
6310 case OMP_SINGLE:
6311 case OMP_MASTER:
6312 case OMP_MASKED:
6313 case OMP_TASKGROUP:
6314 case OMP_ORDERED:
6315 case OMP_CRITICAL:
6316 case OMP_TASK:
6317 case OMP_TARGET:
6318 case OMP_TARGET_DATA:
6319 case OMP_TARGET_UPDATE:
6320 case OMP_TARGET_ENTER_DATA:
6321 case OMP_TARGET_EXIT_DATA:
6322 case OMP_TASKLOOP:
6323 case OMP_TEAMS:
6324 /* These are always void. */
6325 return true;
6326
6327 case CALL_EXPR:
6328 case MODIFY_EXPR:
6329 case PREDICT_EXPR:
6330 /* These are valid regardless of their type. */
6331 return true;
6332
6333 default:
6334 return false;
6335 }
6336}
6337
6338
6339/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6340 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6341
6342 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6343 other, unmodified part of the complex object just before the total store.
6344 As a consequence, if the object is still uninitialized, an undefined value
6345 will be loaded into a register, which may result in a spurious exception
6346 if the register is floating-point and the value happens to be a signaling
6347 NaN for example. Then the fully-fledged complex operations lowering pass
6348 followed by a DCE pass are necessary in order to fix things up. */
6349
6350static enum gimplify_status
6351gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6352 bool want_value)
6353{
6354 enum tree_code code, ocode;
6355 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6356
6357 lhs = TREE_OPERAND (*expr_p, 0);
6358 rhs = TREE_OPERAND (*expr_p, 1);
6359 code = TREE_CODE (lhs);
6360 lhs = TREE_OPERAND (lhs, 0);
6361
6362 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6363 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6364 suppress_warning (other);
6365 other = get_formal_tmp_var (val: other, pre_p);
6366
6367 realpart = code == REALPART_EXPR ? rhs : other;
6368 imagpart = code == REALPART_EXPR ? other : rhs;
6369
6370 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6371 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6372 else
6373 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6374
6375 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_assign (lhs, new_rhs));
6376 *expr_p = (want_value) ? rhs : NULL_TREE;
6377
6378 return GS_ALL_DONE;
6379}
6380
6381/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6382
6383 modify_expr
6384 : varname '=' rhs
6385 | '*' ID '=' rhs
6386
6387 PRE_P points to the list where side effects that must happen before
6388 *EXPR_P should be stored.
6389
6390 POST_P points to the list where side effects that must happen after
6391 *EXPR_P should be stored.
6392
6393 WANT_VALUE is nonzero iff we want to use the value of this expression
6394 in another expression. */
6395
6396static enum gimplify_status
6397gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6398 bool want_value)
6399{
6400 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6401 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6402 enum gimplify_status ret = GS_UNHANDLED;
6403 gimple *assign;
6404 location_t loc = EXPR_LOCATION (*expr_p);
6405 gimple_stmt_iterator gsi;
6406
6407 if (error_operand_p (t: *from_p) || error_operand_p (t: *to_p))
6408 return GS_ERROR;
6409
6410 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6411 || TREE_CODE (*expr_p) == INIT_EXPR);
6412
6413 /* Trying to simplify a clobber using normal logic doesn't work,
6414 so handle it here. */
6415 if (TREE_CLOBBER_P (*from_p))
6416 {
6417 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6418 if (ret == GS_ERROR)
6419 return ret;
6420 gcc_assert (!want_value);
6421 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6422 {
6423 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6424 pre_p, post_p);
6425 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6426 }
6427 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_assign (*to_p, *from_p));
6428 *expr_p = NULL;
6429 return GS_ALL_DONE;
6430 }
6431
6432 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6433 memset. */
6434 if (TREE_TYPE (*from_p) != error_mark_node
6435 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6436 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6437 && TREE_CODE (*from_p) == CONSTRUCTOR
6438 && CONSTRUCTOR_NELTS (*from_p) == 0)
6439 {
6440 maybe_with_size_expr (expr_p: from_p);
6441 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6442 return gimplify_modify_expr_to_memset (expr_p,
6443 TREE_OPERAND (*from_p, 1),
6444 want_value, seq_p: pre_p);
6445 }
6446
6447 /* Insert pointer conversions required by the middle-end that are not
6448 required by the frontend. This fixes middle-end type checking for
6449 for example gcc.dg/redecl-6.c. */
6450 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6451 {
6452 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6453 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6454 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6455 }
6456
6457 /* See if any simplifications can be done based on what the RHS is. */
6458 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6459 want_value);
6460 if (ret != GS_UNHANDLED)
6461 return ret;
6462
6463 /* For empty types only gimplify the left hand side and right hand
6464 side as statements and throw away the assignment. Do this after
6465 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6466 types properly. */
6467 if (is_empty_type (TREE_TYPE (*from_p))
6468 && !want_value
6469 /* Don't do this for calls that return addressable types, expand_call
6470 relies on those having a lhs. */
6471 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6472 && TREE_CODE (*from_p) == CALL_EXPR))
6473 {
6474 gimplify_stmt (from_p, pre_p);
6475 gimplify_stmt (to_p, pre_p);
6476 *expr_p = NULL_TREE;
6477 return GS_ALL_DONE;
6478 }
6479
6480 /* If the value being copied is of variable width, compute the length
6481 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6482 before gimplifying any of the operands so that we can resolve any
6483 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6484 the size of the expression to be copied, not of the destination, so
6485 that is what we must do here. */
6486 maybe_with_size_expr (expr_p: from_p);
6487
6488 /* As a special case, we have to temporarily allow for assignments
6489 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6490 a toplevel statement, when gimplifying the GENERIC expression
6491 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6492 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6493
6494 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6495 prevent gimplify_expr from trying to create a new temporary for
6496 foo's LHS, we tell it that it should only gimplify until it
6497 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6498 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6499 and all we need to do here is set 'a' to be its LHS. */
6500
6501 /* Gimplify the RHS first for C++17 and bug 71104. */
6502 gimple_predicate initial_pred = initial_rhs_predicate_for (lhs: *to_p);
6503 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6504 if (ret == GS_ERROR)
6505 return ret;
6506
6507 /* Then gimplify the LHS. */
6508 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6509 twice we have to make sure to gimplify into non-SSA as otherwise
6510 the abnormal edge added later will make those defs not dominate
6511 their uses.
6512 ??? Technically this applies only to the registers used in the
6513 resulting non-register *TO_P. */
6514 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6515 if (saved_into_ssa
6516 && TREE_CODE (*from_p) == CALL_EXPR
6517 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6518 gimplify_ctxp->into_ssa = false;
6519 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6520 gimplify_ctxp->into_ssa = saved_into_ssa;
6521 if (ret == GS_ERROR)
6522 return ret;
6523
6524 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6525 guess for the predicate was wrong. */
6526 gimple_predicate final_pred = rhs_predicate_for (lhs: *to_p);
6527 if (final_pred != initial_pred)
6528 {
6529 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6530 if (ret == GS_ERROR)
6531 return ret;
6532 }
6533
6534 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6535 size as argument to the call. */
6536 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6537 {
6538 tree call = TREE_OPERAND (*from_p, 0);
6539 tree vlasize = TREE_OPERAND (*from_p, 1);
6540
6541 if (TREE_CODE (call) == CALL_EXPR
6542 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6543 {
6544 int nargs = call_expr_nargs (call);
6545 tree type = TREE_TYPE (call);
6546 tree ap = CALL_EXPR_ARG (call, 0);
6547 tree tag = CALL_EXPR_ARG (call, 1);
6548 tree aptag = CALL_EXPR_ARG (call, 2);
6549 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6550 IFN_VA_ARG, type,
6551 nargs + 1, ap, tag,
6552 aptag, vlasize);
6553 TREE_OPERAND (*from_p, 0) = newcall;
6554 }
6555 }
6556
6557 /* Now see if the above changed *from_p to something we handle specially. */
6558 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6559 want_value);
6560 if (ret != GS_UNHANDLED)
6561 return ret;
6562
6563 /* If we've got a variable sized assignment between two lvalues (i.e. does
6564 not involve a call), then we can make things a bit more straightforward
6565 by converting the assignment to memcpy or memset. */
6566 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6567 {
6568 tree from = TREE_OPERAND (*from_p, 0);
6569 tree size = TREE_OPERAND (*from_p, 1);
6570
6571 if (TREE_CODE (from) == CONSTRUCTOR)
6572 return gimplify_modify_expr_to_memset (expr_p, size, want_value, seq_p: pre_p);
6573 else if (is_gimple_addressable (t: from)
6574 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (*to_p)))
6575 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))))
6576 {
6577 *from_p = from;
6578 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6579 seq_p: pre_p);
6580 }
6581 }
6582
6583 /* Transform partial stores to non-addressable complex variables into
6584 total stores. This allows us to use real instead of virtual operands
6585 for these variables, which improves optimization. */
6586 if ((TREE_CODE (*to_p) == REALPART_EXPR
6587 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6588 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6589 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6590
6591 /* Try to alleviate the effects of the gimplification creating artificial
6592 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6593 make sure not to create DECL_DEBUG_EXPR links across functions. */
6594 if (!gimplify_ctxp->into_ssa
6595 && VAR_P (*from_p)
6596 && DECL_IGNORED_P (*from_p)
6597 && DECL_P (*to_p)
6598 && !DECL_IGNORED_P (*to_p)
6599 && decl_function_context (*to_p) == current_function_decl
6600 && decl_function_context (*from_p) == current_function_decl)
6601 {
6602 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6603 DECL_NAME (*from_p)
6604 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6605 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6606 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6607 }
6608
6609 if (want_value && TREE_THIS_VOLATILE (*to_p))
6610 *from_p = get_initialized_tmp_var (val: *from_p, pre_p, post_p);
6611
6612 if (TREE_CODE (*from_p) == CALL_EXPR)
6613 {
6614 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6615 instead of a GIMPLE_ASSIGN. */
6616 gcall *call_stmt;
6617 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6618 {
6619 /* Gimplify internal functions created in the FEs. */
6620 int nargs = call_expr_nargs (*from_p), i;
6621 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6622 auto_vec<tree> vargs (nargs);
6623
6624 for (i = 0; i < nargs; i++)
6625 {
6626 gimplify_arg (arg_p: &CALL_EXPR_ARG (*from_p, i), pre_p,
6627 EXPR_LOCATION (*from_p));
6628 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6629 }
6630 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6631 gimple_call_set_nothrow (s: call_stmt, TREE_NOTHROW (*from_p));
6632 gimple_set_location (g: call_stmt, EXPR_LOCATION (*expr_p));
6633 }
6634 else
6635 {
6636 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6637 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6638 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6639 tree fndecl = get_callee_fndecl (*from_p);
6640 if (fndecl
6641 && fndecl_built_in_p (node: fndecl, name1: BUILT_IN_EXPECT)
6642 && call_expr_nargs (*from_p) == 3)
6643 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6644 CALL_EXPR_ARG (*from_p, 0),
6645 CALL_EXPR_ARG (*from_p, 1),
6646 CALL_EXPR_ARG (*from_p, 2));
6647 else
6648 {
6649 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6650 }
6651 }
6652 notice_special_calls (call_stmt);
6653 if (!gimple_call_noreturn_p (s: call_stmt) || !should_remove_lhs_p (lhs: *to_p))
6654 gimple_call_set_lhs (gs: call_stmt, lhs: *to_p);
6655 else if (TREE_CODE (*to_p) == SSA_NAME)
6656 /* The above is somewhat premature, avoid ICEing later for a
6657 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6658 ??? This doesn't make it a default-def. */
6659 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6660
6661 assign = call_stmt;
6662 }
6663 else
6664 {
6665 assign = gimple_build_assign (*to_p, *from_p);
6666 gimple_set_location (g: assign, EXPR_LOCATION (*expr_p));
6667 if (COMPARISON_CLASS_P (*from_p))
6668 copy_warning (assign, *from_p);
6669 }
6670
6671 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6672 {
6673 /* We should have got an SSA name from the start. */
6674 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6675 || ! gimple_in_ssa_p (cfun));
6676 }
6677
6678 gimplify_seq_add_stmt (seq_p: pre_p, gs: assign);
6679 gsi = gsi_last (seq&: *pre_p);
6680 maybe_fold_stmt (gsi: &gsi);
6681
6682 if (want_value)
6683 {
6684 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (expr: *to_p);
6685 return GS_OK;
6686 }
6687 else
6688 *expr_p = NULL;
6689
6690 return GS_ALL_DONE;
6691}
6692
6693/* Gimplify a comparison between two variable-sized objects. Do this
6694 with a call to BUILT_IN_MEMCMP. */
6695
6696static enum gimplify_status
6697gimplify_variable_sized_compare (tree *expr_p)
6698{
6699 location_t loc = EXPR_LOCATION (*expr_p);
6700 tree op0 = TREE_OPERAND (*expr_p, 0);
6701 tree op1 = TREE_OPERAND (*expr_p, 1);
6702 tree t, arg, dest, src, expr;
6703
6704 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6705 arg = unshare_expr (expr: arg);
6706 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6707 src = build_fold_addr_expr_loc (loc, op1);
6708 dest = build_fold_addr_expr_loc (loc, op0);
6709 t = builtin_decl_implicit (fncode: BUILT_IN_MEMCMP);
6710 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6711
6712 expr
6713 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6714 SET_EXPR_LOCATION (expr, loc);
6715 *expr_p = expr;
6716
6717 return GS_OK;
6718}
6719
6720/* Gimplify a comparison between two aggregate objects of integral scalar
6721 mode as a comparison between the bitwise equivalent scalar values. */
6722
6723static enum gimplify_status
6724gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6725{
6726 location_t loc = EXPR_LOCATION (*expr_p);
6727 tree op0 = TREE_OPERAND (*expr_p, 0);
6728 tree op1 = TREE_OPERAND (*expr_p, 1);
6729
6730 tree type = TREE_TYPE (op0);
6731 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6732
6733 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6734 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6735
6736 *expr_p
6737 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6738
6739 return GS_OK;
6740}
6741
6742/* Gimplify an expression sequence. This function gimplifies each
6743 expression and rewrites the original expression with the last
6744 expression of the sequence in GIMPLE form.
6745
6746 PRE_P points to the list where the side effects for all the
6747 expressions in the sequence will be emitted.
6748
6749 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6750
6751static enum gimplify_status
6752gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6753{
6754 tree t = *expr_p;
6755
6756 do
6757 {
6758 tree *sub_p = &TREE_OPERAND (t, 0);
6759
6760 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6761 gimplify_compound_expr (expr_p: sub_p, pre_p, want_value: false);
6762 else
6763 gimplify_stmt (sub_p, pre_p);
6764
6765 t = TREE_OPERAND (t, 1);
6766 }
6767 while (TREE_CODE (t) == COMPOUND_EXPR);
6768
6769 *expr_p = t;
6770 if (want_value)
6771 return GS_OK;
6772 else
6773 {
6774 gimplify_stmt (expr_p, pre_p);
6775 return GS_ALL_DONE;
6776 }
6777}
6778
6779/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6780 gimplify. After gimplification, EXPR_P will point to a new temporary
6781 that holds the original value of the SAVE_EXPR node.
6782
6783 PRE_P points to the list where side effects that must happen before
6784 *EXPR_P should be stored. */
6785
6786static enum gimplify_status
6787gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6788{
6789 enum gimplify_status ret = GS_ALL_DONE;
6790 tree val;
6791
6792 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6793 val = TREE_OPERAND (*expr_p, 0);
6794
6795 if (val && TREE_TYPE (val) == error_mark_node)
6796 return GS_ERROR;
6797
6798 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6799 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6800 {
6801 /* The operand may be a void-valued expression. It is
6802 being executed only for its side-effects. */
6803 if (TREE_TYPE (val) == void_type_node)
6804 {
6805 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6806 is_gimple_stmt, fb_none);
6807 val = NULL;
6808 }
6809 else
6810 /* The temporary may not be an SSA name as later abnormal and EH
6811 control flow may invalidate use/def domination. When in SSA
6812 form then assume there are no such issues and SAVE_EXPRs only
6813 appear via GENERIC foldings. */
6814 val = get_initialized_tmp_var (val, pre_p, post_p,
6815 allow_ssa: gimple_in_ssa_p (cfun));
6816
6817 TREE_OPERAND (*expr_p, 0) = val;
6818 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6819 }
6820
6821 *expr_p = val;
6822
6823 return ret;
6824}
6825
6826/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6827
6828 unary_expr
6829 : ...
6830 | '&' varname
6831 ...
6832
6833 PRE_P points to the list where side effects that must happen before
6834 *EXPR_P should be stored.
6835
6836 POST_P points to the list where side effects that must happen after
6837 *EXPR_P should be stored. */
6838
6839static enum gimplify_status
6840gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6841{
6842 tree expr = *expr_p;
6843 tree op0 = TREE_OPERAND (expr, 0);
6844 enum gimplify_status ret;
6845 location_t loc = EXPR_LOCATION (*expr_p);
6846
6847 switch (TREE_CODE (op0))
6848 {
6849 case INDIRECT_REF:
6850 do_indirect_ref:
6851 /* Check if we are dealing with an expression of the form '&*ptr'.
6852 While the front end folds away '&*ptr' into 'ptr', these
6853 expressions may be generated internally by the compiler (e.g.,
6854 builtins like __builtin_va_end). */
6855 /* Caution: the silent array decomposition semantics we allow for
6856 ADDR_EXPR means we can't always discard the pair. */
6857 /* Gimplification of the ADDR_EXPR operand may drop
6858 cv-qualification conversions, so make sure we add them if
6859 needed. */
6860 {
6861 tree op00 = TREE_OPERAND (op0, 0);
6862 tree t_expr = TREE_TYPE (expr);
6863 tree t_op00 = TREE_TYPE (op00);
6864
6865 if (!useless_type_conversion_p (t_expr, t_op00))
6866 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6867 *expr_p = op00;
6868 ret = GS_OK;
6869 }
6870 break;
6871
6872 case VIEW_CONVERT_EXPR:
6873 /* Take the address of our operand and then convert it to the type of
6874 this ADDR_EXPR.
6875
6876 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6877 all clear. The impact of this transformation is even less clear. */
6878
6879 /* If the operand is a useless conversion, look through it. Doing so
6880 guarantees that the ADDR_EXPR and its operand will remain of the
6881 same type. */
6882 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6883 op0 = TREE_OPERAND (op0, 0);
6884
6885 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6886 build_fold_addr_expr_loc (loc,
6887 TREE_OPERAND (op0, 0)));
6888 ret = GS_OK;
6889 break;
6890
6891 case MEM_REF:
6892 if (integer_zerop (TREE_OPERAND (op0, 1)))
6893 goto do_indirect_ref;
6894
6895 /* fall through */
6896
6897 default:
6898 /* If we see a call to a declared builtin or see its address
6899 being taken (we can unify those cases here) then we can mark
6900 the builtin for implicit generation by GCC. */
6901 if (TREE_CODE (op0) == FUNCTION_DECL
6902 && fndecl_built_in_p (node: op0, klass: BUILT_IN_NORMAL)
6903 && builtin_decl_declared_p (fncode: DECL_FUNCTION_CODE (decl: op0)))
6904 set_builtin_decl_implicit_p (fncode: DECL_FUNCTION_CODE (decl: op0), implicit_p: true);
6905
6906 /* We use fb_either here because the C frontend sometimes takes
6907 the address of a call that returns a struct; see
6908 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6909 the implied temporary explicit. */
6910
6911 /* Make the operand addressable. */
6912 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6913 is_gimple_addressable, fb_either);
6914 if (ret == GS_ERROR)
6915 break;
6916
6917 /* Then mark it. Beware that it may not be possible to do so directly
6918 if a temporary has been created by the gimplification. */
6919 prepare_gimple_addressable (expr_p: &TREE_OPERAND (expr, 0), seq_p: pre_p);
6920
6921 op0 = TREE_OPERAND (expr, 0);
6922
6923 /* For various reasons, the gimplification of the expression
6924 may have made a new INDIRECT_REF. */
6925 if (INDIRECT_REF_P (op0)
6926 || (TREE_CODE (op0) == MEM_REF
6927 && integer_zerop (TREE_OPERAND (op0, 1))))
6928 goto do_indirect_ref;
6929
6930 mark_addressable (TREE_OPERAND (expr, 0));
6931
6932 /* The FEs may end up building ADDR_EXPRs early on a decl with
6933 an incomplete type. Re-build ADDR_EXPRs in canonical form
6934 here. */
6935 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6936 *expr_p = build_fold_addr_expr (op0);
6937
6938 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6939 recompute_tree_invariant_for_addr_expr (*expr_p);
6940
6941 /* If we re-built the ADDR_EXPR add a conversion to the original type
6942 if required. */
6943 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6944 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6945
6946 break;
6947 }
6948
6949 return ret;
6950}
6951
6952/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6953 value; output operands should be a gimple lvalue. */
6954
6955static enum gimplify_status
6956gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6957{
6958 tree expr;
6959 int noutputs;
6960 const char **oconstraints;
6961 int i;
6962 tree link;
6963 const char *constraint;
6964 bool allows_mem, allows_reg, is_inout;
6965 enum gimplify_status ret, tret;
6966 gasm *stmt;
6967 vec<tree, va_gc> *inputs;
6968 vec<tree, va_gc> *outputs;
6969 vec<tree, va_gc> *clobbers;
6970 vec<tree, va_gc> *labels;
6971 tree link_next;
6972
6973 expr = *expr_p;
6974 noutputs = list_length (ASM_OUTPUTS (expr));
6975 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6976
6977 inputs = NULL;
6978 outputs = NULL;
6979 clobbers = NULL;
6980 labels = NULL;
6981
6982 ret = GS_ALL_DONE;
6983 link_next = NULL_TREE;
6984 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6985 {
6986 bool ok;
6987 size_t constraint_len;
6988
6989 link_next = TREE_CHAIN (link);
6990
6991 oconstraints[i]
6992 = constraint
6993 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6994 constraint_len = strlen (s: constraint);
6995 if (constraint_len == 0)
6996 continue;
6997
6998 ok = parse_output_constraint (&constraint, i, 0, 0,
6999 &allows_mem, &allows_reg, &is_inout);
7000 if (!ok)
7001 {
7002 ret = GS_ERROR;
7003 is_inout = false;
7004 }
7005
7006 /* If we can't make copies, we can only accept memory.
7007 Similarly for VLAs. */
7008 tree outtype = TREE_TYPE (TREE_VALUE (link));
7009 if (outtype != error_mark_node
7010 && (TREE_ADDRESSABLE (outtype)
7011 || !COMPLETE_TYPE_P (outtype)
7012 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
7013 {
7014 if (allows_mem)
7015 allows_reg = 0;
7016 else
7017 {
7018 error ("impossible constraint in %<asm%>");
7019 error ("non-memory output %d must stay in memory", i);
7020 return GS_ERROR;
7021 }
7022 }
7023
7024 if (!allows_reg && allows_mem)
7025 mark_addressable (TREE_VALUE (link));
7026
7027 tree orig = TREE_VALUE (link);
7028 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7029 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7030 fb_lvalue | fb_mayfail);
7031 if (tret == GS_ERROR)
7032 {
7033 if (orig != error_mark_node)
7034 error ("invalid lvalue in %<asm%> output %d", i);
7035 ret = tret;
7036 }
7037
7038 /* If the constraint does not allow memory make sure we gimplify
7039 it to a register if it is not already but its base is. This
7040 happens for complex and vector components. */
7041 if (!allows_mem)
7042 {
7043 tree op = TREE_VALUE (link);
7044 if (! is_gimple_val (op)
7045 && is_gimple_reg_type (TREE_TYPE (op))
7046 && is_gimple_reg (get_base_address (t: op)))
7047 {
7048 tree tem = create_tmp_reg (TREE_TYPE (op));
7049 tree ass;
7050 if (is_inout)
7051 {
7052 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
7053 tem, unshare_expr (expr: op));
7054 gimplify_and_add (t: ass, seq_p: pre_p);
7055 }
7056 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
7057 gimplify_and_add (t: ass, seq_p: post_p);
7058
7059 TREE_VALUE (link) = tem;
7060 tret = GS_OK;
7061 }
7062 }
7063
7064 vec_safe_push (v&: outputs, obj: link);
7065 TREE_CHAIN (link) = NULL_TREE;
7066
7067 if (is_inout)
7068 {
7069 /* An input/output operand. To give the optimizers more
7070 flexibility, split it into separate input and output
7071 operands. */
7072 tree input;
7073 /* Buffer big enough to format a 32-bit UINT_MAX into. */
7074 char buf[11];
7075
7076 /* Turn the in/out constraint into an output constraint. */
7077 char *p = xstrdup (constraint);
7078 p[0] = '=';
7079 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
7080
7081 /* And add a matching input constraint. */
7082 if (allows_reg)
7083 {
7084 sprintf (s: buf, format: "%u", i);
7085
7086 /* If there are multiple alternatives in the constraint,
7087 handle each of them individually. Those that allow register
7088 will be replaced with operand number, the others will stay
7089 unchanged. */
7090 if (strchr (s: p, c: ',') != NULL)
7091 {
7092 size_t len = 0, buflen = strlen (s: buf);
7093 char *beg, *end, *str, *dst;
7094
7095 for (beg = p + 1;;)
7096 {
7097 end = strchr (s: beg, c: ',');
7098 if (end == NULL)
7099 end = strchr (s: beg, c: '\0');
7100 if ((size_t) (end - beg) < buflen)
7101 len += buflen + 1;
7102 else
7103 len += end - beg + 1;
7104 if (*end)
7105 beg = end + 1;
7106 else
7107 break;
7108 }
7109
7110 str = (char *) alloca (len);
7111 for (beg = p + 1, dst = str;;)
7112 {
7113 const char *tem;
7114 bool mem_p, reg_p, inout_p;
7115
7116 end = strchr (s: beg, c: ',');
7117 if (end)
7118 *end = '\0';
7119 beg[-1] = '=';
7120 tem = beg - 1;
7121 parse_output_constraint (&tem, i, 0, 0,
7122 &mem_p, &reg_p, &inout_p);
7123 if (dst != str)
7124 *dst++ = ',';
7125 if (reg_p)
7126 {
7127 memcpy (dest: dst, src: buf, n: buflen);
7128 dst += buflen;
7129 }
7130 else
7131 {
7132 if (end)
7133 len = end - beg;
7134 else
7135 len = strlen (s: beg);
7136 memcpy (dest: dst, src: beg, n: len);
7137 dst += len;
7138 }
7139 if (end)
7140 beg = end + 1;
7141 else
7142 break;
7143 }
7144 *dst = '\0';
7145 input = build_string (dst - str, str);
7146 }
7147 else
7148 input = build_string (strlen (s: buf), buf);
7149 }
7150 else
7151 input = build_string (constraint_len - 1, constraint + 1);
7152
7153 free (ptr: p);
7154
7155 input = build_tree_list (build_tree_list (NULL_TREE, input),
7156 unshare_expr (TREE_VALUE (link)));
7157 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
7158 }
7159 }
7160
7161 link_next = NULL_TREE;
7162 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
7163 {
7164 link_next = TREE_CHAIN (link);
7165 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
7166 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7167 oconstraints, &allows_mem, &allows_reg);
7168
7169 /* If we can't make copies, we can only accept memory. */
7170 tree intype = TREE_TYPE (TREE_VALUE (link));
7171 if (intype != error_mark_node
7172 && (TREE_ADDRESSABLE (intype)
7173 || !COMPLETE_TYPE_P (intype)
7174 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
7175 {
7176 if (allows_mem)
7177 allows_reg = 0;
7178 else
7179 {
7180 error ("impossible constraint in %<asm%>");
7181 error ("non-memory input %d must stay in memory", i);
7182 return GS_ERROR;
7183 }
7184 }
7185
7186 /* If the operand is a memory input, it should be an lvalue. */
7187 if (!allows_reg && allows_mem)
7188 {
7189 tree inputv = TREE_VALUE (link);
7190 STRIP_NOPS (inputv);
7191 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
7192 || TREE_CODE (inputv) == PREINCREMENT_EXPR
7193 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
7194 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
7195 || TREE_CODE (inputv) == MODIFY_EXPR)
7196 TREE_VALUE (link) = error_mark_node;
7197 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7198 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7199 if (tret != GS_ERROR)
7200 {
7201 /* Unlike output operands, memory inputs are not guaranteed
7202 to be lvalues by the FE, and while the expressions are
7203 marked addressable there, if it is e.g. a statement
7204 expression, temporaries in it might not end up being
7205 addressable. They might be already used in the IL and thus
7206 it is too late to make them addressable now though. */
7207 tree x = TREE_VALUE (link);
7208 while (handled_component_p (t: x))
7209 x = TREE_OPERAND (x, 0);
7210 if (TREE_CODE (x) == MEM_REF
7211 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
7212 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
7213 if ((VAR_P (x)
7214 || TREE_CODE (x) == PARM_DECL
7215 || TREE_CODE (x) == RESULT_DECL)
7216 && !TREE_ADDRESSABLE (x)
7217 && is_gimple_reg (x))
7218 {
7219 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
7220 input_location), 0,
7221 "memory input %d is not directly addressable",
7222 i);
7223 prepare_gimple_addressable (expr_p: &TREE_VALUE (link), seq_p: pre_p);
7224 }
7225 }
7226 mark_addressable (TREE_VALUE (link));
7227 if (tret == GS_ERROR)
7228 {
7229 if (inputv != error_mark_node)
7230 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
7231 "memory input %d is not directly addressable", i);
7232 ret = tret;
7233 }
7234 }
7235 else
7236 {
7237 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7238 is_gimple_asm_val, fb_rvalue);
7239 if (tret == GS_ERROR)
7240 ret = tret;
7241 }
7242
7243 TREE_CHAIN (link) = NULL_TREE;
7244 vec_safe_push (v&: inputs, obj: link);
7245 }
7246
7247 link_next = NULL_TREE;
7248 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
7249 {
7250 link_next = TREE_CHAIN (link);
7251 TREE_CHAIN (link) = NULL_TREE;
7252 vec_safe_push (v&: clobbers, obj: link);
7253 }
7254
7255 link_next = NULL_TREE;
7256 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
7257 {
7258 link_next = TREE_CHAIN (link);
7259 TREE_CHAIN (link) = NULL_TREE;
7260 vec_safe_push (v&: labels, obj: link);
7261 }
7262
7263 /* Do not add ASMs with errors to the gimple IL stream. */
7264 if (ret != GS_ERROR)
7265 {
7266 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
7267 inputs, outputs, clobbers, labels);
7268
7269 /* asm is volatile if it was marked by the user as volatile or
7270 there are no outputs or this is an asm goto. */
7271 gimple_asm_set_volatile (asm_stmt: stmt,
7272 ASM_VOLATILE_P (expr)
7273 || noutputs == 0
7274 || labels);
7275 gimple_asm_set_input (asm_stmt: stmt, ASM_INPUT_P (expr));
7276 gimple_asm_set_inline (asm_stmt: stmt, ASM_INLINE_P (expr));
7277
7278 gimplify_seq_add_stmt (seq_p: pre_p, gs: stmt);
7279 }
7280
7281 return ret;
7282}
7283
7284/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
7285 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
7286 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
7287 return to this function.
7288
7289 FIXME should we complexify the prequeue handling instead? Or use flags
7290 for all the cleanups and let the optimizer tighten them up? The current
7291 code seems pretty fragile; it will break on a cleanup within any
7292 non-conditional nesting. But any such nesting would be broken, anyway;
7293 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
7294 and continues out of it. We can do that at the RTL level, though, so
7295 having an optimizer to tighten up try/finally regions would be a Good
7296 Thing. */
7297
7298static enum gimplify_status
7299gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
7300{
7301 gimple_stmt_iterator iter;
7302 gimple_seq body_sequence = NULL;
7303
7304 tree temp = voidify_wrapper_expr (wrapper: *expr_p, NULL);
7305
7306 /* We only care about the number of conditions between the innermost
7307 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
7308 any cleanups collected outside the CLEANUP_POINT_EXPR. */
7309 int old_conds = gimplify_ctxp->conditions;
7310 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
7311 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
7312 gimplify_ctxp->conditions = 0;
7313 gimplify_ctxp->conditional_cleanups = NULL;
7314 gimplify_ctxp->in_cleanup_point_expr = true;
7315
7316 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
7317
7318 gimplify_ctxp->conditions = old_conds;
7319 gimplify_ctxp->conditional_cleanups = old_cleanups;
7320 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
7321
7322 for (iter = gsi_start (seq&: body_sequence); !gsi_end_p (i: iter); )
7323 {
7324 gimple *wce = gsi_stmt (i: iter);
7325
7326 if (gimple_code (g: wce) == GIMPLE_WITH_CLEANUP_EXPR)
7327 {
7328 if (gsi_one_before_end_p (i: iter))
7329 {
7330 /* Note that gsi_insert_seq_before and gsi_remove do not
7331 scan operands, unlike some other sequence mutators. */
7332 if (!gimple_wce_cleanup_eh_only (gs: wce))
7333 gsi_insert_seq_before_without_update (&iter,
7334 gimple_wce_cleanup (gs: wce),
7335 GSI_SAME_STMT);
7336 gsi_remove (&iter, true);
7337 break;
7338 }
7339 else
7340 {
7341 gtry *gtry;
7342 gimple_seq seq;
7343 enum gimple_try_flags kind;
7344
7345 if (gimple_wce_cleanup_eh_only (gs: wce))
7346 kind = GIMPLE_TRY_CATCH;
7347 else
7348 kind = GIMPLE_TRY_FINALLY;
7349 seq = gsi_split_seq_after (iter);
7350
7351 gtry = gimple_build_try (seq, gimple_wce_cleanup (gs: wce), kind);
7352 /* Do not use gsi_replace here, as it may scan operands.
7353 We want to do a simple structural modification only. */
7354 gsi_set_stmt (&iter, gtry);
7355 iter = gsi_start (seq&: gtry->eval);
7356 }
7357 }
7358 else
7359 gsi_next (i: &iter);
7360 }
7361
7362 gimplify_seq_add_seq (dst_p: pre_p, src: body_sequence);
7363 if (temp)
7364 {
7365 *expr_p = temp;
7366 return GS_OK;
7367 }
7368 else
7369 {
7370 *expr_p = NULL;
7371 return GS_ALL_DONE;
7372 }
7373}
7374
7375/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7376 is the cleanup action required. EH_ONLY is true if the cleanup should
7377 only be executed if an exception is thrown, not on normal exit.
7378 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7379 only valid for clobbers. */
7380
7381static void
7382gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7383 bool force_uncond = false)
7384{
7385 gimple *wce;
7386 gimple_seq cleanup_stmts = NULL;
7387
7388 /* Errors can result in improperly nested cleanups. Which results in
7389 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7390 if (seen_error ())
7391 return;
7392
7393 if (gimple_conditional_context ())
7394 {
7395 /* If we're in a conditional context, this is more complex. We only
7396 want to run the cleanup if we actually ran the initialization that
7397 necessitates it, but we want to run it after the end of the
7398 conditional context. So we wrap the try/finally around the
7399 condition and use a flag to determine whether or not to actually
7400 run the destructor. Thus
7401
7402 test ? f(A()) : 0
7403
7404 becomes (approximately)
7405
7406 flag = 0;
7407 try {
7408 if (test) { A::A(temp); flag = 1; val = f(temp); }
7409 else { val = 0; }
7410 } finally {
7411 if (flag) A::~A(temp);
7412 }
7413 val
7414 */
7415 if (force_uncond)
7416 {
7417 gimplify_stmt (&cleanup, &cleanup_stmts);
7418 wce = gimple_build_wce (cleanup_stmts);
7419 gimplify_seq_add_stmt (seq_p: &gimplify_ctxp->conditional_cleanups, gs: wce);
7420 }
7421 else
7422 {
7423 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7424 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7425 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7426
7427 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7428 gimplify_stmt (&cleanup, &cleanup_stmts);
7429 wce = gimple_build_wce (cleanup_stmts);
7430 gimple_wce_set_cleanup_eh_only (gs: wce, eh_only_p: eh_only);
7431
7432 gimplify_seq_add_stmt (seq_p: &gimplify_ctxp->conditional_cleanups, gs: ffalse);
7433 gimplify_seq_add_stmt (seq_p: &gimplify_ctxp->conditional_cleanups, gs: wce);
7434 gimplify_seq_add_stmt (seq_p: pre_p, gs: ftrue);
7435
7436 /* Because of this manipulation, and the EH edges that jump
7437 threading cannot redirect, the temporary (VAR) will appear
7438 to be used uninitialized. Don't warn. */
7439 suppress_warning (var, OPT_Wuninitialized);
7440 }
7441 }
7442 else
7443 {
7444 gimplify_stmt (&cleanup, &cleanup_stmts);
7445 wce = gimple_build_wce (cleanup_stmts);
7446 gimple_wce_set_cleanup_eh_only (gs: wce, eh_only_p: eh_only);
7447 gimplify_seq_add_stmt (seq_p: pre_p, gs: wce);
7448 }
7449}
7450
7451/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7452
7453static enum gimplify_status
7454gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7455{
7456 tree targ = *expr_p;
7457 tree temp = TARGET_EXPR_SLOT (targ);
7458 tree init = TARGET_EXPR_INITIAL (targ);
7459 enum gimplify_status ret;
7460
7461 bool unpoison_empty_seq = false;
7462 gimple_stmt_iterator unpoison_it;
7463
7464 if (init)
7465 {
7466 gimple_seq init_pre_p = NULL;
7467
7468 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7469 to the temps list. Handle also variable length TARGET_EXPRs. */
7470 if (!poly_int_tree_p (DECL_SIZE (temp)))
7471 {
7472 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7473 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7474 /* FIXME: this is correct only when the size of the type does
7475 not depend on expressions evaluated in init. */
7476 gimplify_vla_decl (decl: temp, seq_p: &init_pre_p);
7477 }
7478 else
7479 {
7480 /* Save location where we need to place unpoisoning. It's possible
7481 that a variable will be converted to needs_to_live_in_memory. */
7482 unpoison_it = gsi_last (seq&: *pre_p);
7483 unpoison_empty_seq = gsi_end_p (i: unpoison_it);
7484
7485 gimple_add_tmp_var (tmp: temp);
7486 }
7487
7488 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7489 expression is supposed to initialize the slot. */
7490 if (VOID_TYPE_P (TREE_TYPE (init)))
7491 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7492 fb_none);
7493 else
7494 {
7495 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7496 init = init_expr;
7497 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7498 fb_none);
7499 init = NULL;
7500 ggc_free (init_expr);
7501 }
7502 if (ret == GS_ERROR)
7503 {
7504 /* PR c++/28266 Make sure this is expanded only once. */
7505 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7506 return GS_ERROR;
7507 }
7508
7509 if (init)
7510 gimplify_and_add (t: init, seq_p: &init_pre_p);
7511
7512 /* Add a clobber for the temporary going out of scope, like
7513 gimplify_bind_expr. But only if we did not promote the
7514 temporary to static storage. */
7515 if (gimplify_ctxp->in_cleanup_point_expr
7516 && !TREE_STATIC (temp)
7517 && needs_to_live_in_memory (temp))
7518 {
7519 if (flag_stack_reuse == SR_ALL)
7520 {
7521 tree clobber = build_clobber (TREE_TYPE (temp),
7522 CLOBBER_STORAGE_END);
7523 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7524 gimple_push_cleanup (var: temp, cleanup: clobber, eh_only: false, pre_p, force_uncond: true);
7525 }
7526 if (asan_poisoned_variables
7527 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7528 && !TREE_STATIC (temp)
7529 && dbg_cnt (index: asan_use_after_scope)
7530 && !gimplify_omp_ctxp)
7531 {
7532 tree asan_cleanup = build_asan_poison_call_expr (decl: temp);
7533 if (asan_cleanup)
7534 {
7535 if (unpoison_empty_seq)
7536 unpoison_it = gsi_start (seq&: *pre_p);
7537
7538 asan_poison_variable (decl: temp, poison: false, it: &unpoison_it,
7539 before: unpoison_empty_seq);
7540 gimple_push_cleanup (var: temp, cleanup: asan_cleanup, eh_only: false, pre_p);
7541 }
7542 }
7543 }
7544
7545 gimple_seq_add_seq (pre_p, init_pre_p);
7546
7547 /* If needed, push the cleanup for the temp. */
7548 if (TARGET_EXPR_CLEANUP (targ))
7549 gimple_push_cleanup (var: temp, TARGET_EXPR_CLEANUP (targ),
7550 CLEANUP_EH_ONLY (targ), pre_p);
7551
7552 /* Only expand this once. */
7553 TREE_OPERAND (targ, 3) = init;
7554 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7555 }
7556 else
7557 /* We should have expanded this before. */
7558 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7559
7560 *expr_p = temp;
7561 return GS_OK;
7562}
7563
7564/* Gimplification of expression trees. */
7565
7566/* Gimplify an expression which appears at statement context. The
7567 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7568 NULL, a new sequence is allocated.
7569
7570 Return true if we actually added a statement to the queue. */
7571
7572bool
7573gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7574{
7575 gimple_seq_node last;
7576
7577 last = gimple_seq_last (s: *seq_p);
7578 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7579 return last != gimple_seq_last (s: *seq_p);
7580}
7581
7582/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7583 to CTX. If entries already exist, force them to be some flavor of private.
7584 If there is no enclosing parallel, do nothing. */
7585
7586void
7587omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7588{
7589 splay_tree_node n;
7590
7591 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7592 return;
7593
7594 do
7595 {
7596 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7597 if (n != NULL)
7598 {
7599 if (n->value & GOVD_SHARED)
7600 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7601 else if (n->value & GOVD_MAP)
7602 n->value |= GOVD_MAP_TO_ONLY;
7603 else
7604 return;
7605 }
7606 else if ((ctx->region_type & ORT_TARGET) != 0)
7607 {
7608 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7609 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7610 else
7611 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7612 }
7613 else if (ctx->region_type != ORT_WORKSHARE
7614 && ctx->region_type != ORT_TASKGROUP
7615 && ctx->region_type != ORT_SIMD
7616 && ctx->region_type != ORT_ACC
7617 && !(ctx->region_type & ORT_TARGET_DATA))
7618 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7619
7620 ctx = ctx->outer_context;
7621 }
7622 while (ctx);
7623}
7624
7625/* Similarly for each of the type sizes of TYPE. */
7626
7627static void
7628omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7629{
7630 if (type == NULL || type == error_mark_node)
7631 return;
7632 type = TYPE_MAIN_VARIANT (type);
7633
7634 if (ctx->privatized_types->add (k: type))
7635 return;
7636
7637 switch (TREE_CODE (type))
7638 {
7639 case INTEGER_TYPE:
7640 case ENUMERAL_TYPE:
7641 case BOOLEAN_TYPE:
7642 case REAL_TYPE:
7643 case FIXED_POINT_TYPE:
7644 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7645 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7646 break;
7647
7648 case ARRAY_TYPE:
7649 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7650 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7651 break;
7652
7653 case RECORD_TYPE:
7654 case UNION_TYPE:
7655 case QUAL_UNION_TYPE:
7656 {
7657 tree field;
7658 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7659 if (TREE_CODE (field) == FIELD_DECL)
7660 {
7661 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7662 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7663 }
7664 }
7665 break;
7666
7667 case POINTER_TYPE:
7668 case REFERENCE_TYPE:
7669 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7670 break;
7671
7672 default:
7673 break;
7674 }
7675
7676 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7677 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7678 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7679}
7680
7681/* Add an entry for DECL in the OMP context CTX with FLAGS. */
7682
7683static void
7684omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7685{
7686 splay_tree_node n;
7687 unsigned int nflags;
7688 tree t;
7689
7690 if (error_operand_p (t: decl) || ctx->region_type == ORT_NONE)
7691 return;
7692
7693 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7694 there are constructors involved somewhere. Exception is a shared clause,
7695 there is nothing privatized in that case. */
7696 if ((flags & GOVD_SHARED) == 0
7697 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7698 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7699 flags |= GOVD_SEEN;
7700
7701 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7702 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7703 {
7704 /* We shouldn't be re-adding the decl with the same data
7705 sharing class. */
7706 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7707 nflags = n->value | flags;
7708 /* The only combination of data sharing classes we should see is
7709 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7710 reduction variables to be used in data sharing clauses. */
7711 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7712 || ((nflags & GOVD_DATA_SHARE_CLASS)
7713 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7714 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7715 n->value = nflags;
7716 return;
7717 }
7718
7719 /* When adding a variable-sized variable, we have to handle all sorts
7720 of additional bits of data: the pointer replacement variable, and
7721 the parameters of the type. */
7722 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7723 {
7724 /* Add the pointer replacement variable as PRIVATE if the variable
7725 replacement is private, else FIRSTPRIVATE since we'll need the
7726 address of the original variable either for SHARED, or for the
7727 copy into or out of the context. */
7728 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7729 {
7730 if (flags & GOVD_MAP)
7731 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7732 else if (flags & GOVD_PRIVATE)
7733 nflags = GOVD_PRIVATE;
7734 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7735 && (flags & GOVD_FIRSTPRIVATE))
7736 || (ctx->region_type == ORT_TARGET_DATA
7737 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7738 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7739 else
7740 nflags = GOVD_FIRSTPRIVATE;
7741 nflags |= flags & GOVD_SEEN;
7742 t = DECL_VALUE_EXPR (decl);
7743 gcc_assert (INDIRECT_REF_P (t));
7744 t = TREE_OPERAND (t, 0);
7745 gcc_assert (DECL_P (t));
7746 omp_add_variable (ctx, decl: t, flags: nflags);
7747 }
7748
7749 /* Add all of the variable and type parameters (which should have
7750 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7751 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7752 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7753 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7754
7755 /* The variable-sized variable itself is never SHARED, only some form
7756 of PRIVATE. The sharing would take place via the pointer variable
7757 which we remapped above. */
7758 if (flags & GOVD_SHARED)
7759 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7760 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7761
7762 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7763 alloca statement we generate for the variable, so make sure it
7764 is available. This isn't automatically needed for the SHARED
7765 case, since we won't be allocating local storage then.
7766 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7767 in this case omp_notice_variable will be called later
7768 on when it is gimplified. */
7769 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7770 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7771 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7772 }
7773 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7774 && omp_privatize_by_reference (decl))
7775 {
7776 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7777
7778 /* Similar to the direct variable sized case above, we'll need the
7779 size of references being privatized. */
7780 if ((flags & GOVD_SHARED) == 0)
7781 {
7782 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7783 if (t && DECL_P (t))
7784 omp_notice_variable (ctx, t, true);
7785 }
7786 }
7787
7788 if (n != NULL)
7789 n->value |= flags;
7790 else
7791 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7792
7793 /* For reductions clauses in OpenACC loop directives, by default create a
7794 copy clause on the enclosing parallel construct for carrying back the
7795 results. */
7796 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7797 {
7798 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7799 while (outer_ctx)
7800 {
7801 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7802 if (n != NULL)
7803 {
7804 /* Ignore local variables and explicitly declared clauses. */
7805 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7806 break;
7807 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7808 {
7809 /* According to the OpenACC spec, such a reduction variable
7810 should already have a copy map on a kernels construct,
7811 verify that here. */
7812 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7813 && (n->value & GOVD_MAP));
7814 }
7815 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7816 {
7817 /* Remove firstprivate and make it a copy map. */
7818 n->value &= ~GOVD_FIRSTPRIVATE;
7819 n->value |= GOVD_MAP;
7820 }
7821 }
7822 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7823 {
7824 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7825 GOVD_MAP | GOVD_SEEN);
7826 break;
7827 }
7828 outer_ctx = outer_ctx->outer_context;
7829 }
7830 }
7831}
7832
7833/* Notice a threadprivate variable DECL used in OMP context CTX.
7834 This just prints out diagnostics about threadprivate variable uses
7835 in untied tasks. If DECL2 is non-NULL, prevent this warning
7836 on that variable. */
7837
7838static bool
7839omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7840 tree decl2)
7841{
7842 splay_tree_node n;
7843 struct gimplify_omp_ctx *octx;
7844
7845 for (octx = ctx; octx; octx = octx->outer_context)
7846 if ((octx->region_type & ORT_TARGET) != 0
7847 || octx->order_concurrent)
7848 {
7849 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7850 if (n == NULL)
7851 {
7852 if (octx->order_concurrent)
7853 {
7854 error ("threadprivate variable %qE used in a region with"
7855 " %<order(concurrent)%> clause", DECL_NAME (decl));
7856 inform (octx->location, "enclosing region");
7857 }
7858 else
7859 {
7860 error ("threadprivate variable %qE used in target region",
7861 DECL_NAME (decl));
7862 inform (octx->location, "enclosing target region");
7863 }
7864 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7865 }
7866 if (decl2)
7867 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7868 }
7869
7870 if (ctx->region_type != ORT_UNTIED_TASK)
7871 return false;
7872 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7873 if (n == NULL)
7874 {
7875 error ("threadprivate variable %qE used in untied task",
7876 DECL_NAME (decl));
7877 inform (ctx->location, "enclosing task");
7878 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7879 }
7880 if (decl2)
7881 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7882 return false;
7883}
7884
7885/* Return true if global var DECL is device resident. */
7886
7887static bool
7888device_resident_p (tree decl)
7889{
7890 tree attr = lookup_attribute (attr_name: "oacc declare target", DECL_ATTRIBUTES (decl));
7891
7892 if (!attr)
7893 return false;
7894
7895 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7896 {
7897 tree c = TREE_VALUE (t);
7898 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7899 return true;
7900 }
7901
7902 return false;
7903}
7904
7905/* Return true if DECL has an ACC DECLARE attribute. */
7906
7907static bool
7908is_oacc_declared (tree decl)
7909{
7910 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7911 tree declared = lookup_attribute (attr_name: "oacc declare target", DECL_ATTRIBUTES (t));
7912 return declared != NULL_TREE;
7913}
7914
7915/* Determine outer default flags for DECL mentioned in an OMP region
7916 but not declared in an enclosing clause.
7917
7918 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7919 remapped firstprivate instead of shared. To some extent this is
7920 addressed in omp_firstprivatize_type_sizes, but not
7921 effectively. */
7922
7923static unsigned
7924omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7925 bool in_code, unsigned flags)
7926{
7927 enum omp_clause_default_kind default_kind = ctx->default_kind;
7928 enum omp_clause_default_kind kind;
7929
7930 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7931 if (ctx->region_type & ORT_TASK)
7932 {
7933 tree detach_clause = omp_find_clause (clauses: ctx->clauses, kind: OMP_CLAUSE_DETACH);
7934
7935 /* The event-handle specified by a detach clause should always be firstprivate,
7936 regardless of the current default. */
7937 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7938 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7939 }
7940 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7941 default_kind = kind;
7942 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7943 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7944 /* For C/C++ default({,first}private), variables with static storage duration
7945 declared in a namespace or global scope and referenced in construct
7946 must be explicitly specified, i.e. acts as default(none). */
7947 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7948 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7949 && VAR_P (decl)
7950 && is_global_var (t: decl)
7951 && (DECL_FILE_SCOPE_P (decl)
7952 || (DECL_CONTEXT (decl)
7953 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7954 && !lang_GNU_Fortran ())
7955 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7956
7957 switch (default_kind)
7958 {
7959 case OMP_CLAUSE_DEFAULT_NONE:
7960 {
7961 const char *rtype;
7962
7963 if (ctx->region_type & ORT_PARALLEL)
7964 rtype = "parallel";
7965 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7966 rtype = "taskloop";
7967 else if (ctx->region_type & ORT_TASK)
7968 rtype = "task";
7969 else if (ctx->region_type & ORT_TEAMS)
7970 rtype = "teams";
7971 else
7972 gcc_unreachable ();
7973
7974 error ("%qE not specified in enclosing %qs",
7975 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7976 inform (ctx->location, "enclosing %qs", rtype);
7977 }
7978 /* FALLTHRU */
7979 case OMP_CLAUSE_DEFAULT_SHARED:
7980 flags |= GOVD_SHARED;
7981 break;
7982 case OMP_CLAUSE_DEFAULT_PRIVATE:
7983 flags |= GOVD_PRIVATE;
7984 break;
7985 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7986 flags |= GOVD_FIRSTPRIVATE;
7987 break;
7988 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7989 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7990 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7991 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7992 {
7993 omp_notice_variable (octx, decl, in_code);
7994 for (; octx; octx = octx->outer_context)
7995 {
7996 splay_tree_node n2;
7997
7998 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7999 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
8000 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
8001 continue;
8002 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
8003 {
8004 flags |= GOVD_FIRSTPRIVATE;
8005 goto found_outer;
8006 }
8007 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
8008 {
8009 flags |= GOVD_SHARED;
8010 goto found_outer;
8011 }
8012 }
8013 }
8014
8015 if (TREE_CODE (decl) == PARM_DECL
8016 || (!is_global_var (t: decl)
8017 && DECL_CONTEXT (decl) == current_function_decl))
8018 flags |= GOVD_FIRSTPRIVATE;
8019 else
8020 flags |= GOVD_SHARED;
8021 found_outer:
8022 break;
8023
8024 default:
8025 gcc_unreachable ();
8026 }
8027
8028 return flags;
8029}
8030
8031/* Return string name for types of OpenACC constructs from ORT_* values. */
8032
8033static const char *
8034oacc_region_type_name (enum omp_region_type region_type)
8035{
8036 switch (region_type)
8037 {
8038 case ORT_ACC_DATA:
8039 return "data";
8040 case ORT_ACC_PARALLEL:
8041 return "parallel";
8042 case ORT_ACC_KERNELS:
8043 return "kernels";
8044 case ORT_ACC_SERIAL:
8045 return "serial";
8046 default:
8047 gcc_unreachable ();
8048 }
8049}
8050
8051/* Determine outer default flags for DECL mentioned in an OACC region
8052 but not declared in an enclosing clause. */
8053
8054static unsigned
8055oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
8056{
8057 struct gimplify_omp_ctx *ctx_default = ctx;
8058 /* If no 'default' clause appears on this compute construct... */
8059 if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED)
8060 {
8061 /* ..., see if one appears on a lexically containing 'data'
8062 construct. */
8063 while ((ctx_default = ctx_default->outer_context))
8064 {
8065 if (ctx_default->region_type == ORT_ACC_DATA
8066 && ctx_default->default_kind != OMP_CLAUSE_DEFAULT_SHARED)
8067 break;
8068 }
8069 /* If not, reset. */
8070 if (!ctx_default)
8071 ctx_default = ctx;
8072 }
8073
8074 bool on_device = false;
8075 bool is_private = false;
8076 bool declared = is_oacc_declared (decl);
8077 tree type = TREE_TYPE (decl);
8078
8079 if (omp_privatize_by_reference (decl))
8080 type = TREE_TYPE (type);
8081
8082 /* For Fortran COMMON blocks, only used variables in those blocks are
8083 transfered and remapped. The block itself will have a private clause to
8084 avoid transfering the data twice.
8085 The hook evaluates to false by default. For a variable in Fortran's COMMON
8086 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
8087 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
8088 the whole block. For C++ and Fortran, it can also be true under certain
8089 other conditions, if DECL_HAS_VALUE_EXPR. */
8090 if (RECORD_OR_UNION_TYPE_P (type))
8091 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
8092
8093 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
8094 && is_global_var (t: decl)
8095 && device_resident_p (decl)
8096 && !is_private)
8097 {
8098 on_device = true;
8099 flags |= GOVD_MAP_TO_ONLY;
8100 }
8101
8102 switch (ctx->region_type)
8103 {
8104 case ORT_ACC_KERNELS:
8105 if (is_private)
8106 flags |= GOVD_FIRSTPRIVATE;
8107 else if (AGGREGATE_TYPE_P (type))
8108 {
8109 /* Aggregates default to 'present_or_copy', or 'present'. */
8110 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
8111 flags |= GOVD_MAP;
8112 else
8113 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
8114 }
8115 else
8116 /* Scalars default to 'copy'. */
8117 flags |= GOVD_MAP | GOVD_MAP_FORCE;
8118
8119 break;
8120
8121 case ORT_ACC_PARALLEL:
8122 case ORT_ACC_SERIAL:
8123 if (is_private)
8124 flags |= GOVD_FIRSTPRIVATE;
8125 else if (on_device || declared)
8126 flags |= GOVD_MAP;
8127 else if (AGGREGATE_TYPE_P (type))
8128 {
8129 /* Aggregates default to 'present_or_copy', or 'present'. */
8130 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
8131 flags |= GOVD_MAP;
8132 else
8133 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
8134 }
8135 else
8136 /* Scalars default to 'firstprivate'. */
8137 flags |= GOVD_FIRSTPRIVATE;
8138
8139 break;
8140
8141 default:
8142 gcc_unreachable ();
8143 }
8144
8145 if (DECL_ARTIFICIAL (decl))
8146 ; /* We can get compiler-generated decls, and should not complain
8147 about them. */
8148 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_NONE)
8149 {
8150 error ("%qE not specified in enclosing OpenACC %qs construct",
8151 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)),
8152 oacc_region_type_name (region_type: ctx->region_type));
8153 if (ctx_default != ctx)
8154 inform (ctx->location, "enclosing OpenACC %qs construct and",
8155 oacc_region_type_name (region_type: ctx->region_type));
8156 inform (ctx_default->location,
8157 "enclosing OpenACC %qs construct with %qs clause",
8158 oacc_region_type_name (region_type: ctx_default->region_type),
8159 "default(none)");
8160 }
8161 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
8162 ; /* Handled above. */
8163 else
8164 gcc_checking_assert (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
8165
8166 return flags;
8167}
8168
8169/* Record the fact that DECL was used within the OMP context CTX.
8170 IN_CODE is true when real code uses DECL, and false when we should
8171 merely emit default(none) errors. Return true if DECL is going to
8172 be remapped and thus DECL shouldn't be gimplified into its
8173 DECL_VALUE_EXPR (if any). */
8174
8175static bool
8176omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
8177{
8178 splay_tree_node n;
8179 unsigned flags = in_code ? GOVD_SEEN : 0;
8180 bool ret = false, shared;
8181
8182 if (error_operand_p (t: decl))
8183 return false;
8184
8185 if (DECL_ARTIFICIAL (decl))
8186 {
8187 tree attr = lookup_attribute (attr_name: "omp allocate var", DECL_ATTRIBUTES (decl));
8188 if (attr)
8189 decl = TREE_VALUE (TREE_VALUE (attr));
8190 }
8191
8192 if (ctx->region_type == ORT_NONE)
8193 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
8194
8195 if (is_global_var (t: decl))
8196 {
8197 /* Threadprivate variables are predetermined. */
8198 if (DECL_THREAD_LOCAL_P (decl))
8199 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
8200
8201 if (DECL_HAS_VALUE_EXPR_P (decl))
8202 {
8203 if (ctx->region_type & ORT_ACC)
8204 /* For OpenACC, defer expansion of value to avoid transfering
8205 privatized common block data instead of im-/explicitly transfered
8206 variables which are in common blocks. */
8207 ;
8208 else
8209 {
8210 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8211
8212 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
8213 return omp_notice_threadprivate_variable (ctx, decl, decl2: value);
8214 }
8215 }
8216
8217 if (gimplify_omp_ctxp->outer_context == NULL
8218 && VAR_P (decl)
8219 && oacc_get_fn_attrib (fn: current_function_decl))
8220 {
8221 location_t loc = DECL_SOURCE_LOCATION (decl);
8222
8223 if (lookup_attribute (attr_name: "omp declare target link",
8224 DECL_ATTRIBUTES (decl)))
8225 {
8226 error_at (loc,
8227 "%qE with %<link%> clause used in %<routine%> function",
8228 DECL_NAME (decl));
8229 return false;
8230 }
8231 else if (!lookup_attribute (attr_name: "omp declare target",
8232 DECL_ATTRIBUTES (decl)))
8233 {
8234 error_at (loc,
8235 "%qE requires a %<declare%> directive for use "
8236 "in a %<routine%> function", DECL_NAME (decl));
8237 return false;
8238 }
8239 }
8240 }
8241
8242 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8243 if ((ctx->region_type & ORT_TARGET) != 0)
8244 {
8245 if (n == NULL)
8246 {
8247 unsigned nflags = flags;
8248 if ((ctx->region_type & ORT_ACC) == 0)
8249 {
8250 bool is_declare_target = false;
8251 if (is_global_var (t: decl)
8252 && varpool_node::get_create (decl)->offloadable)
8253 {
8254 struct gimplify_omp_ctx *octx;
8255 for (octx = ctx->outer_context;
8256 octx; octx = octx->outer_context)
8257 {
8258 n = splay_tree_lookup (octx->variables,
8259 (splay_tree_key)decl);
8260 if (n
8261 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
8262 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8263 break;
8264 }
8265 is_declare_target = octx == NULL;
8266 }
8267 if (!is_declare_target)
8268 {
8269 int gdmk;
8270 enum omp_clause_defaultmap_kind kind;
8271 if (lang_hooks.decls.omp_allocatable_p (decl))
8272 gdmk = GDMK_ALLOCATABLE;
8273 else if (lang_hooks.decls.omp_scalar_target_p (decl))
8274 gdmk = GDMK_SCALAR_TARGET;
8275 else if (lang_hooks.decls.omp_scalar_p (decl, false))
8276 gdmk = GDMK_SCALAR;
8277 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
8278 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8279 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
8280 == POINTER_TYPE)))
8281 gdmk = GDMK_POINTER;
8282 else
8283 gdmk = GDMK_AGGREGATE;
8284 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
8285 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
8286 {
8287 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
8288 nflags |= GOVD_FIRSTPRIVATE;
8289 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
8290 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
8291 else
8292 gcc_unreachable ();
8293 }
8294 else if (ctx->defaultmap[gdmk] == 0)
8295 {
8296 tree d = lang_hooks.decls.omp_report_decl (decl);
8297 error ("%qE not specified in enclosing %<target%>",
8298 DECL_NAME (d));
8299 inform (ctx->location, "enclosing %<target%>");
8300 }
8301 else if (ctx->defaultmap[gdmk]
8302 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
8303 nflags |= ctx->defaultmap[gdmk];
8304 else if (ctx->defaultmap[gdmk] & GOVD_MAP_FORCE_PRESENT)
8305 {
8306 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8307 nflags |= ctx->defaultmap[gdmk] | GOVD_MAP_ALLOC_ONLY;
8308 }
8309 else
8310 {
8311 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8312 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
8313 }
8314 }
8315 }
8316
8317 struct gimplify_omp_ctx *octx = ctx->outer_context;
8318 if ((ctx->region_type & ORT_ACC) && octx)
8319 {
8320 /* Look in outer OpenACC contexts, to see if there's a
8321 data attribute for this variable. */
8322 omp_notice_variable (ctx: octx, decl, in_code);
8323
8324 for (; octx; octx = octx->outer_context)
8325 {
8326 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
8327 break;
8328 splay_tree_node n2
8329 = splay_tree_lookup (octx->variables,
8330 (splay_tree_key) decl);
8331 if (n2)
8332 {
8333 if (octx->region_type == ORT_ACC_HOST_DATA)
8334 error ("variable %qE declared in enclosing "
8335 "%<host_data%> region", DECL_NAME (decl));
8336 nflags |= GOVD_MAP;
8337 if (octx->region_type == ORT_ACC_DATA
8338 && (n2->value & GOVD_MAP_0LEN_ARRAY))
8339 nflags |= GOVD_MAP_0LEN_ARRAY;
8340 goto found_outer;
8341 }
8342 }
8343 }
8344
8345 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
8346 | GOVD_MAP_ALLOC_ONLY)) == flags)
8347 {
8348 tree type = TREE_TYPE (decl);
8349
8350 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8351 && omp_privatize_by_reference (decl))
8352 type = TREE_TYPE (type);
8353 if (!omp_mappable_type (type))
8354 {
8355 error ("%qD referenced in target region does not have "
8356 "a mappable type", decl);
8357 nflags |= GOVD_MAP | GOVD_EXPLICIT;
8358 }
8359 else
8360 {
8361 if ((ctx->region_type & ORT_ACC) != 0)
8362 nflags = oacc_default_clause (ctx, decl, flags);
8363 else
8364 nflags |= GOVD_MAP;
8365 }
8366 }
8367 found_outer:
8368 omp_add_variable (ctx, decl, flags: nflags);
8369 if (ctx->region_type & ORT_ACC)
8370 /* For OpenACC, as remarked above, defer expansion. */
8371 shared = false;
8372 else
8373 shared = (nflags & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0;
8374 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8375 }
8376 else
8377 {
8378 if (ctx->region_type & ORT_ACC)
8379 /* For OpenACC, as remarked above, defer expansion. */
8380 shared = false;
8381 else
8382 shared = ((n->value | flags)
8383 & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0;
8384 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8385 /* If nothing changed, there's nothing left to do. */
8386 if ((n->value & flags) == flags)
8387 return ret;
8388 flags |= n->value;
8389 n->value = flags;
8390 }
8391 goto do_outer;
8392 }
8393
8394 if (n == NULL)
8395 {
8396 if (ctx->region_type == ORT_WORKSHARE
8397 || ctx->region_type == ORT_TASKGROUP
8398 || ctx->region_type == ORT_SIMD
8399 || ctx->region_type == ORT_ACC
8400 || (ctx->region_type & ORT_TARGET_DATA) != 0)
8401 goto do_outer;
8402
8403 flags = omp_default_clause (ctx, decl, in_code, flags);
8404
8405 if ((flags & GOVD_PRIVATE)
8406 && lang_hooks.decls.omp_private_outer_ref (decl))
8407 flags |= GOVD_PRIVATE_OUTER_REF;
8408
8409 omp_add_variable (ctx, decl, flags);
8410
8411 shared = (flags & GOVD_SHARED) != 0;
8412 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8413 goto do_outer;
8414 }
8415
8416 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8417 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8418 if (ctx->region_type == ORT_SIMD
8419 && ctx->in_for_exprs
8420 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8421 == GOVD_PRIVATE))
8422 flags &= ~GOVD_SEEN;
8423
8424 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8425 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8426 && DECL_SIZE (decl))
8427 {
8428 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8429 {
8430 splay_tree_node n2;
8431 tree t = DECL_VALUE_EXPR (decl);
8432 gcc_assert (INDIRECT_REF_P (t));
8433 t = TREE_OPERAND (t, 0);
8434 gcc_assert (DECL_P (t));
8435 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8436 n2->value |= GOVD_SEEN;
8437 }
8438 else if (omp_privatize_by_reference (decl)
8439 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8440 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8441 != INTEGER_CST))
8442 {
8443 splay_tree_node n2;
8444 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8445 gcc_assert (DECL_P (t));
8446 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8447 if (n2)
8448 omp_notice_variable (ctx, decl: t, in_code: true);
8449 }
8450 }
8451
8452 if (ctx->region_type & ORT_ACC)
8453 /* For OpenACC, as remarked above, defer expansion. */
8454 shared = false;
8455 else
8456 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8457 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8458
8459 /* If nothing changed, there's nothing left to do. */
8460 if ((n->value & flags) == flags)
8461 return ret;
8462 flags |= n->value;
8463 n->value = flags;
8464
8465 do_outer:
8466 /* If the variable is private in the current context, then we don't
8467 need to propagate anything to an outer context. */
8468 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8469 return ret;
8470 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8471 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8472 return ret;
8473 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8474 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8475 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8476 return ret;
8477 if (ctx->outer_context
8478 && omp_notice_variable (ctx: ctx->outer_context, decl, in_code))
8479 return true;
8480 return ret;
8481}
8482
8483/* Verify that DECL is private within CTX. If there's specific information
8484 to the contrary in the innermost scope, generate an error. */
8485
8486static bool
8487omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8488{
8489 splay_tree_node n;
8490
8491 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8492 if (n != NULL)
8493 {
8494 if (n->value & GOVD_SHARED)
8495 {
8496 if (ctx == gimplify_omp_ctxp)
8497 {
8498 if (simd)
8499 error ("iteration variable %qE is predetermined linear",
8500 DECL_NAME (decl));
8501 else
8502 error ("iteration variable %qE should be private",
8503 DECL_NAME (decl));
8504 n->value = GOVD_PRIVATE;
8505 return true;
8506 }
8507 else
8508 return false;
8509 }
8510 else if ((n->value & GOVD_EXPLICIT) != 0
8511 && (ctx == gimplify_omp_ctxp
8512 || (ctx->region_type == ORT_COMBINED_PARALLEL
8513 && gimplify_omp_ctxp->outer_context == ctx)))
8514 {
8515 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8516 error ("iteration variable %qE should not be firstprivate",
8517 DECL_NAME (decl));
8518 else if ((n->value & GOVD_REDUCTION) != 0)
8519 error ("iteration variable %qE should not be reduction",
8520 DECL_NAME (decl));
8521 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8522 error ("iteration variable %qE should not be linear",
8523 DECL_NAME (decl));
8524 }
8525 return (ctx == gimplify_omp_ctxp
8526 || (ctx->region_type == ORT_COMBINED_PARALLEL
8527 && gimplify_omp_ctxp->outer_context == ctx));
8528 }
8529
8530 if (ctx->region_type != ORT_WORKSHARE
8531 && ctx->region_type != ORT_TASKGROUP
8532 && ctx->region_type != ORT_SIMD
8533 && ctx->region_type != ORT_ACC)
8534 return false;
8535 else if (ctx->outer_context)
8536 return omp_is_private (ctx: ctx->outer_context, decl, simd);
8537 return false;
8538}
8539
8540/* Return true if DECL is private within a parallel region
8541 that binds to the current construct's context or in parallel
8542 region's REDUCTION clause. */
8543
8544static bool
8545omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8546{
8547 splay_tree_node n;
8548
8549 do
8550 {
8551 ctx = ctx->outer_context;
8552 if (ctx == NULL)
8553 {
8554 if (is_global_var (t: decl))
8555 return false;
8556
8557 /* References might be private, but might be shared too,
8558 when checking for copyprivate, assume they might be
8559 private, otherwise assume they might be shared. */
8560 if (copyprivate)
8561 return true;
8562
8563 if (omp_privatize_by_reference (decl))
8564 return false;
8565
8566 /* Treat C++ privatized non-static data members outside
8567 of the privatization the same. */
8568 if (omp_member_access_dummy_var (decl))
8569 return false;
8570
8571 return true;
8572 }
8573
8574 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8575
8576 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8577 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8578 {
8579 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8580 || n == NULL
8581 || (n->value & GOVD_MAP) == 0)
8582 continue;
8583 return false;
8584 }
8585
8586 if (n != NULL)
8587 {
8588 if ((n->value & GOVD_LOCAL) != 0
8589 && omp_member_access_dummy_var (decl))
8590 return false;
8591 return (n->value & GOVD_SHARED) == 0;
8592 }
8593
8594 if (ctx->region_type == ORT_WORKSHARE
8595 || ctx->region_type == ORT_TASKGROUP
8596 || ctx->region_type == ORT_SIMD
8597 || ctx->region_type == ORT_ACC)
8598 continue;
8599
8600 break;
8601 }
8602 while (1);
8603 return false;
8604}
8605
8606/* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8607
8608static tree
8609find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8610{
8611 tree t = *tp;
8612
8613 /* If this node has been visited, unmark it and keep looking. */
8614 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8615 return t;
8616
8617 if (IS_TYPE_OR_DECL_P (t))
8618 *walk_subtrees = 0;
8619 return NULL_TREE;
8620}
8621
8622
8623/* Gimplify the affinity clause but effectively ignore it.
8624 Generate:
8625 var = begin;
8626 if ((step > 1) ? var <= end : var > end)
8627 locatator_var_expr; */
8628
8629static void
8630gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8631{
8632 tree last_iter = NULL_TREE;
8633 tree last_bind = NULL_TREE;
8634 tree label = NULL_TREE;
8635 tree *last_body = NULL;
8636 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8637 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8638 {
8639 tree t = OMP_CLAUSE_DECL (c);
8640 if (TREE_CODE (t) == TREE_LIST
8641 && TREE_PURPOSE (t)
8642 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8643 {
8644 if (TREE_VALUE (t) == null_pointer_node)
8645 continue;
8646 if (TREE_PURPOSE (t) != last_iter)
8647 {
8648 if (last_bind)
8649 {
8650 append_to_statement_list (label, last_body);
8651 gimplify_and_add (t: last_bind, seq_p: pre_p);
8652 last_bind = NULL_TREE;
8653 }
8654 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8655 {
8656 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8657 is_gimple_val, fb_rvalue) == GS_ERROR
8658 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8659 is_gimple_val, fb_rvalue) == GS_ERROR
8660 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8661 is_gimple_val, fb_rvalue) == GS_ERROR
8662 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8663 is_gimple_val, fb_rvalue)
8664 == GS_ERROR))
8665 return;
8666 }
8667 last_iter = TREE_PURPOSE (t);
8668 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8669 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8670 NULL, block);
8671 last_body = &BIND_EXPR_BODY (last_bind);
8672 tree cond = NULL_TREE;
8673 location_t loc = OMP_CLAUSE_LOCATION (c);
8674 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8675 {
8676 tree var = TREE_VEC_ELT (it, 0);
8677 tree begin = TREE_VEC_ELT (it, 1);
8678 tree end = TREE_VEC_ELT (it, 2);
8679 tree step = TREE_VEC_ELT (it, 3);
8680 loc = DECL_SOURCE_LOCATION (var);
8681 tree tem = build2_loc (loc, code: MODIFY_EXPR, void_type_node,
8682 arg0: var, arg1: begin);
8683 append_to_statement_list_force (tem, last_body);
8684
8685 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8686 step, build_zero_cst (TREE_TYPE (step)));
8687 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8688 var, end);
8689 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8690 var, end);
8691 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8692 cond1, cond2, cond3);
8693 if (cond)
8694 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8695 boolean_type_node, cond, cond1);
8696 else
8697 cond = cond1;
8698 }
8699 tree cont_label = create_artificial_label (loc);
8700 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8701 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8702 void_node,
8703 build_and_jump (label_p: &cont_label));
8704 append_to_statement_list_force (tem, last_body);
8705 }
8706 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8707 {
8708 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8709 last_body);
8710 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8711 }
8712 if (error_operand_p (TREE_VALUE (t)))
8713 return;
8714 append_to_statement_list_force (TREE_VALUE (t), last_body);
8715 TREE_VALUE (t) = null_pointer_node;
8716 }
8717 else
8718 {
8719 if (last_bind)
8720 {
8721 append_to_statement_list (label, last_body);
8722 gimplify_and_add (t: last_bind, seq_p: pre_p);
8723 last_bind = NULL_TREE;
8724 }
8725 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8726 {
8727 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8728 NULL, is_gimple_val, fb_rvalue);
8729 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8730 }
8731 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8732 return;
8733 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8734 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8735 return;
8736 gimplify_and_add (OMP_CLAUSE_DECL (c), seq_p: pre_p);
8737 }
8738 }
8739 if (last_bind)
8740 {
8741 append_to_statement_list (label, last_body);
8742 gimplify_and_add (t: last_bind, seq_p: pre_p);
8743 }
8744 return;
8745}
8746
8747/* If *LIST_P contains any OpenMP depend clauses with iterators,
8748 lower all the depend clauses by populating corresponding depend
8749 array. Returns 0 if there are no such depend clauses, or
8750 2 if all depend clauses should be removed, 1 otherwise. */
8751
8752static int
8753gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8754{
8755 tree c;
8756 gimple *g;
8757 size_t n[5] = { 0, 0, 0, 0, 0 };
8758 bool unused[5];
8759 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8760 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8761 size_t i, j;
8762 location_t first_loc = UNKNOWN_LOCATION;
8763
8764 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8765 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8766 {
8767 switch (OMP_CLAUSE_DEPEND_KIND (c))
8768 {
8769 case OMP_CLAUSE_DEPEND_IN:
8770 i = 2;
8771 break;
8772 case OMP_CLAUSE_DEPEND_OUT:
8773 case OMP_CLAUSE_DEPEND_INOUT:
8774 i = 0;
8775 break;
8776 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8777 i = 1;
8778 break;
8779 case OMP_CLAUSE_DEPEND_DEPOBJ:
8780 i = 3;
8781 break;
8782 case OMP_CLAUSE_DEPEND_INOUTSET:
8783 i = 4;
8784 break;
8785 default:
8786 gcc_unreachable ();
8787 }
8788 tree t = OMP_CLAUSE_DECL (c);
8789 if (first_loc == UNKNOWN_LOCATION)
8790 first_loc = OMP_CLAUSE_LOCATION (c);
8791 if (TREE_CODE (t) == TREE_LIST
8792 && TREE_PURPOSE (t)
8793 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8794 {
8795 if (TREE_PURPOSE (t) != last_iter)
8796 {
8797 tree tcnt = size_one_node;
8798 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8799 {
8800 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8801 is_gimple_val, fb_rvalue) == GS_ERROR
8802 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8803 is_gimple_val, fb_rvalue) == GS_ERROR
8804 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8805 is_gimple_val, fb_rvalue) == GS_ERROR
8806 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8807 is_gimple_val, fb_rvalue)
8808 == GS_ERROR))
8809 return 2;
8810 tree var = TREE_VEC_ELT (it, 0);
8811 tree begin = TREE_VEC_ELT (it, 1);
8812 tree end = TREE_VEC_ELT (it, 2);
8813 tree step = TREE_VEC_ELT (it, 3);
8814 tree orig_step = TREE_VEC_ELT (it, 4);
8815 tree type = TREE_TYPE (var);
8816 tree stype = TREE_TYPE (step);
8817 location_t loc = DECL_SOURCE_LOCATION (var);
8818 tree endmbegin;
8819 /* Compute count for this iterator as
8820 orig_step > 0
8821 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8822 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8823 and compute product of those for the entire depend
8824 clause. */
8825 if (POINTER_TYPE_P (type))
8826 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8827 stype, end, begin);
8828 else
8829 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8830 end, begin);
8831 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8832 step,
8833 build_int_cst (stype, 1));
8834 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8835 build_int_cst (stype, 1));
8836 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8837 unshare_expr (expr: endmbegin),
8838 stepm1);
8839 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8840 pos, step);
8841 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8842 endmbegin, stepp1);
8843 if (TYPE_UNSIGNED (stype))
8844 {
8845 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8846 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8847 }
8848 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8849 neg, step);
8850 step = NULL_TREE;
8851 tree cond = fold_build2_loc (loc, LT_EXPR,
8852 boolean_type_node,
8853 begin, end);
8854 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8855 build_int_cst (stype, 0));
8856 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8857 end, begin);
8858 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8859 build_int_cst (stype, 0));
8860 tree osteptype = TREE_TYPE (orig_step);
8861 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8862 orig_step,
8863 build_int_cst (osteptype, 0));
8864 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8865 cond, pos, neg);
8866 cnt = fold_convert_loc (loc, sizetype, cnt);
8867 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8868 fb_rvalue) == GS_ERROR)
8869 return 2;
8870 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8871 }
8872 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8873 fb_rvalue) == GS_ERROR)
8874 return 2;
8875 last_iter = TREE_PURPOSE (t);
8876 last_count = tcnt;
8877 }
8878 if (counts[i] == NULL_TREE)
8879 counts[i] = last_count;
8880 else
8881 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8882 PLUS_EXPR, counts[i], last_count);
8883 }
8884 else
8885 n[i]++;
8886 }
8887 for (i = 0; i < 5; i++)
8888 if (counts[i])
8889 break;
8890 if (i == 5)
8891 return 0;
8892
8893 tree total = size_zero_node;
8894 for (i = 0; i < 5; i++)
8895 {
8896 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8897 if (counts[i] == NULL_TREE)
8898 counts[i] = size_zero_node;
8899 if (n[i])
8900 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8901 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8902 fb_rvalue) == GS_ERROR)
8903 return 2;
8904 total = size_binop (PLUS_EXPR, total, counts[i]);
8905 }
8906
8907 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8908 == GS_ERROR)
8909 return 2;
8910 bool is_old = unused[1] && unused[3] && unused[4];
8911 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8912 size_int (is_old ? 1 : 4));
8913 if (!unused[4])
8914 totalpx = size_binop (PLUS_EXPR, totalpx,
8915 size_binop (MULT_EXPR, counts[4], size_int (2)));
8916 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8917 tree array = create_tmp_var_raw (type);
8918 TREE_ADDRESSABLE (array) = 1;
8919 if (!poly_int_tree_p (t: totalpx))
8920 {
8921 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8922 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8923 if (gimplify_omp_ctxp)
8924 {
8925 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8926 while (ctx
8927 && (ctx->region_type == ORT_WORKSHARE
8928 || ctx->region_type == ORT_TASKGROUP
8929 || ctx->region_type == ORT_SIMD
8930 || ctx->region_type == ORT_ACC))
8931 ctx = ctx->outer_context;
8932 if (ctx)
8933 omp_add_variable (ctx, decl: array, flags: GOVD_LOCAL | GOVD_SEEN);
8934 }
8935 gimplify_vla_decl (decl: array, seq_p: pre_p);
8936 }
8937 else
8938 gimple_add_tmp_var (tmp: array);
8939 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8940 NULL_TREE);
8941 tree tem;
8942 if (!is_old)
8943 {
8944 tem = build2 (MODIFY_EXPR, void_type_node, r,
8945 build_int_cst (ptr_type_node, 0));
8946 gimplify_and_add (t: tem, seq_p: pre_p);
8947 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8948 NULL_TREE);
8949 }
8950 tem = build2 (MODIFY_EXPR, void_type_node, r,
8951 fold_convert (ptr_type_node, total));
8952 gimplify_and_add (t: tem, seq_p: pre_p);
8953 for (i = 1; i < (is_old ? 2 : 4); i++)
8954 {
8955 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8956 NULL_TREE, NULL_TREE);
8957 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8958 gimplify_and_add (t: tem, seq_p: pre_p);
8959 }
8960
8961 tree cnts[6];
8962 for (j = 5; j; j--)
8963 if (!unused[j - 1])
8964 break;
8965 for (i = 0; i < 5; i++)
8966 {
8967 if (i && (i >= j || unused[i - 1]))
8968 {
8969 cnts[i] = cnts[i - 1];
8970 continue;
8971 }
8972 cnts[i] = create_tmp_var (sizetype);
8973 if (i == 0)
8974 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8975 else
8976 {
8977 tree t;
8978 if (is_old)
8979 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8980 else
8981 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8982 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8983 == GS_ERROR)
8984 return 2;
8985 g = gimple_build_assign (cnts[i], t);
8986 }
8987 gimple_seq_add_stmt (pre_p, g);
8988 }
8989 if (unused[4])
8990 cnts[5] = NULL_TREE;
8991 else
8992 {
8993 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8994 cnts[5] = create_tmp_var (sizetype);
8995 g = gimple_build_assign (cnts[i], t);
8996 gimple_seq_add_stmt (pre_p, g);
8997 }
8998
8999 last_iter = NULL_TREE;
9000 tree last_bind = NULL_TREE;
9001 tree *last_body = NULL;
9002 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
9003 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
9004 {
9005 switch (OMP_CLAUSE_DEPEND_KIND (c))
9006 {
9007 case OMP_CLAUSE_DEPEND_IN:
9008 i = 2;
9009 break;
9010 case OMP_CLAUSE_DEPEND_OUT:
9011 case OMP_CLAUSE_DEPEND_INOUT:
9012 i = 0;
9013 break;
9014 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
9015 i = 1;
9016 break;
9017 case OMP_CLAUSE_DEPEND_DEPOBJ:
9018 i = 3;
9019 break;
9020 case OMP_CLAUSE_DEPEND_INOUTSET:
9021 i = 4;
9022 break;
9023 default:
9024 gcc_unreachable ();
9025 }
9026 tree t = OMP_CLAUSE_DECL (c);
9027 if (TREE_CODE (t) == TREE_LIST
9028 && TREE_PURPOSE (t)
9029 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
9030 {
9031 if (TREE_PURPOSE (t) != last_iter)
9032 {
9033 if (last_bind)
9034 gimplify_and_add (t: last_bind, seq_p: pre_p);
9035 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
9036 last_bind = build3 (BIND_EXPR, void_type_node,
9037 BLOCK_VARS (block), NULL, block);
9038 TREE_SIDE_EFFECTS (last_bind) = 1;
9039 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
9040 tree *p = &BIND_EXPR_BODY (last_bind);
9041 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
9042 {
9043 tree var = TREE_VEC_ELT (it, 0);
9044 tree begin = TREE_VEC_ELT (it, 1);
9045 tree end = TREE_VEC_ELT (it, 2);
9046 tree step = TREE_VEC_ELT (it, 3);
9047 tree orig_step = TREE_VEC_ELT (it, 4);
9048 tree type = TREE_TYPE (var);
9049 location_t loc = DECL_SOURCE_LOCATION (var);
9050 /* Emit:
9051 var = begin;
9052 goto cond_label;
9053 beg_label:
9054 ...
9055 var = var + step;
9056 cond_label:
9057 if (orig_step > 0) {
9058 if (var < end) goto beg_label;
9059 } else {
9060 if (var > end) goto beg_label;
9061 }
9062 for each iterator, with inner iterators added to
9063 the ... above. */
9064 tree beg_label = create_artificial_label (loc);
9065 tree cond_label = NULL_TREE;
9066 tem = build2_loc (loc, code: MODIFY_EXPR, void_type_node,
9067 arg0: var, arg1: begin);
9068 append_to_statement_list_force (tem, p);
9069 tem = build_and_jump (label_p: &cond_label);
9070 append_to_statement_list_force (tem, p);
9071 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
9072 append_to_statement_list (tem, p);
9073 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
9074 NULL_TREE, NULL_TREE);
9075 TREE_SIDE_EFFECTS (bind) = 1;
9076 SET_EXPR_LOCATION (bind, loc);
9077 append_to_statement_list_force (bind, p);
9078 if (POINTER_TYPE_P (type))
9079 tem = build2_loc (loc, code: POINTER_PLUS_EXPR, type,
9080 arg0: var, arg1: fold_convert_loc (loc, sizetype,
9081 step));
9082 else
9083 tem = build2_loc (loc, code: PLUS_EXPR, type, arg0: var, arg1: step);
9084 tem = build2_loc (loc, code: MODIFY_EXPR, void_type_node,
9085 arg0: var, arg1: tem);
9086 append_to_statement_list_force (tem, p);
9087 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
9088 append_to_statement_list (tem, p);
9089 tree cond = fold_build2_loc (loc, LT_EXPR,
9090 boolean_type_node,
9091 var, end);
9092 tree pos
9093 = fold_build3_loc (loc, COND_EXPR, void_type_node,
9094 cond, build_and_jump (label_p: &beg_label),
9095 void_node);
9096 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
9097 var, end);
9098 tree neg
9099 = fold_build3_loc (loc, COND_EXPR, void_type_node,
9100 cond, build_and_jump (label_p: &beg_label),
9101 void_node);
9102 tree osteptype = TREE_TYPE (orig_step);
9103 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
9104 orig_step,
9105 build_int_cst (osteptype, 0));
9106 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
9107 cond, pos, neg);
9108 append_to_statement_list_force (tem, p);
9109 p = &BIND_EXPR_BODY (bind);
9110 }
9111 last_body = p;
9112 }
9113 last_iter = TREE_PURPOSE (t);
9114 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
9115 {
9116 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
9117 0), last_body);
9118 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
9119 }
9120 if (error_operand_p (TREE_VALUE (t)))
9121 return 2;
9122 if (TREE_VALUE (t) != null_pointer_node)
9123 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
9124 if (i == 4)
9125 {
9126 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9127 NULL_TREE, NULL_TREE);
9128 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
9129 NULL_TREE, NULL_TREE);
9130 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9131 tem = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR,
9132 void_type_node, arg0: r, arg1: r2);
9133 append_to_statement_list_force (tem, last_body);
9134 tem = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR,
9135 void_type_node, arg0: cnts[i],
9136 size_binop (PLUS_EXPR, cnts[i],
9137 size_int (1)));
9138 append_to_statement_list_force (tem, last_body);
9139 i = 5;
9140 }
9141 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9142 NULL_TREE, NULL_TREE);
9143 tem = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR,
9144 void_type_node, arg0: r, TREE_VALUE (t));
9145 append_to_statement_list_force (tem, last_body);
9146 if (i == 5)
9147 {
9148 r = build4 (ARRAY_REF, ptr_type_node, array,
9149 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9150 NULL_TREE, NULL_TREE);
9151 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9152 tem = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR,
9153 void_type_node, arg0: r, arg1: tem);
9154 append_to_statement_list_force (tem, last_body);
9155 }
9156 tem = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR,
9157 void_type_node, arg0: cnts[i],
9158 size_binop (PLUS_EXPR, cnts[i],
9159 size_int (1 + (i == 5))));
9160 append_to_statement_list_force (tem, last_body);
9161 TREE_VALUE (t) = null_pointer_node;
9162 }
9163 else
9164 {
9165 if (last_bind)
9166 {
9167 gimplify_and_add (t: last_bind, seq_p: pre_p);
9168 last_bind = NULL_TREE;
9169 }
9170 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9171 {
9172 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9173 NULL, is_gimple_val, fb_rvalue);
9174 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9175 }
9176 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9177 return 2;
9178 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
9179 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9180 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9181 is_gimple_val, fb_rvalue) == GS_ERROR)
9182 return 2;
9183 if (i == 4)
9184 {
9185 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9186 NULL_TREE, NULL_TREE);
9187 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
9188 NULL_TREE, NULL_TREE);
9189 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9190 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
9191 gimplify_and_add (t: tem, seq_p: pre_p);
9192 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
9193 cnts[i],
9194 size_int (1)));
9195 gimple_seq_add_stmt (pre_p, g);
9196 i = 5;
9197 }
9198 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9199 NULL_TREE, NULL_TREE);
9200 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
9201 gimplify_and_add (t: tem, seq_p: pre_p);
9202 if (i == 5)
9203 {
9204 r = build4 (ARRAY_REF, ptr_type_node, array,
9205 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9206 NULL_TREE, NULL_TREE);
9207 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9208 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
9209 append_to_statement_list_force (tem, last_body);
9210 gimplify_and_add (t: tem, seq_p: pre_p);
9211 }
9212 g = gimple_build_assign (cnts[i],
9213 size_binop (PLUS_EXPR, cnts[i],
9214 size_int (1 + (i == 5))));
9215 gimple_seq_add_stmt (pre_p, g);
9216 }
9217 }
9218 if (last_bind)
9219 gimplify_and_add (t: last_bind, seq_p: pre_p);
9220 tree cond = boolean_false_node;
9221 if (is_old)
9222 {
9223 if (!unused[0])
9224 cond = build2_loc (loc: first_loc, code: NE_EXPR, boolean_type_node, arg0: cnts[0],
9225 arg1: size_binop_loc (first_loc, PLUS_EXPR, counts[0],
9226 size_int (2)));
9227 if (!unused[2])
9228 cond = build2_loc (loc: first_loc, code: TRUTH_OR_EXPR, boolean_type_node, arg0: cond,
9229 arg1: build2_loc (loc: first_loc, code: NE_EXPR, boolean_type_node,
9230 arg0: cnts[2],
9231 arg1: size_binop_loc (first_loc, PLUS_EXPR,
9232 totalpx,
9233 size_int (1))));
9234 }
9235 else
9236 {
9237 tree prev = size_int (5);
9238 for (i = 0; i < 5; i++)
9239 {
9240 if (unused[i])
9241 continue;
9242 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
9243 cond = build2_loc (loc: first_loc, code: TRUTH_OR_EXPR, boolean_type_node, arg0: cond,
9244 arg1: build2_loc (loc: first_loc, code: NE_EXPR, boolean_type_node,
9245 arg0: cnts[i], arg1: unshare_expr (expr: prev)));
9246 }
9247 }
9248 tem = build3_loc (loc: first_loc, code: COND_EXPR, void_type_node, arg0: cond,
9249 arg1: build_call_expr_loc (first_loc,
9250 builtin_decl_explicit (fncode: BUILT_IN_TRAP),
9251 0), void_node);
9252 gimplify_and_add (t: tem, seq_p: pre_p);
9253 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9254 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
9255 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9256 OMP_CLAUSE_CHAIN (c) = *list_p;
9257 *list_p = c;
9258 return 1;
9259}
9260
9261/* True if mapping node C maps, or unmaps, a (Fortran) array descriptor. */
9262
9263static bool
9264omp_map_clause_descriptor_p (tree c)
9265{
9266 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
9267 return false;
9268
9269 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
9270 return true;
9271
9272 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_RELEASE
9273 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DELETE)
9274 && OMP_CLAUSE_RELEASE_DESCRIPTOR (c))
9275 return true;
9276
9277 return false;
9278}
9279
9280/* For a set of mappings describing an array section pointed to by a struct
9281 (or derived type, etc.) component, create an "alloc" or "release" node to
9282 insert into a list following a GOMP_MAP_STRUCT node. For some types of
9283 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
9284 be created that is inserted into the list of mapping nodes attached to the
9285 directive being processed -- not part of the sorted list of nodes after
9286 GOMP_MAP_STRUCT.
9287
9288 CODE is the code of the directive being processed. GRP_START and GRP_END
9289 are the first and last of two or three nodes representing this array section
9290 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
9291 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
9292 filled with the additional node described above, if needed.
9293
9294 This function does not add the new nodes to any lists itself. It is the
9295 responsibility of the caller to do that. */
9296
9297static tree
9298build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
9299 tree *extra_node)
9300{
9301 enum gomp_map_kind mkind
9302 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
9303 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
9304
9305 gcc_assert (grp_start != grp_end);
9306
9307 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9308 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9309 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
9310 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
9311 tree grp_mid = NULL_TREE;
9312 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
9313 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
9314
9315 if (grp_mid && omp_map_clause_descriptor_p (c: grp_mid))
9316 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
9317 else
9318 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
9319
9320 if (grp_mid
9321 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
9322 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER)
9323 {
9324 tree c3
9325 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9326 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
9327 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
9328 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
9329 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
9330
9331 *extra_node = c3;
9332 }
9333 else
9334 *extra_node = NULL_TREE;
9335
9336 return c2;
9337}
9338
9339/* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
9340 and set *BITPOSP and *POFFSETP to the bit offset of the access.
9341 If BASE_REF is non-NULL and the containing object is a reference, set
9342 *BASE_REF to that reference before dereferencing the object.
9343 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
9344 has array type, else return NULL. */
9345
9346static tree
9347extract_base_bit_offset (tree base, poly_int64 *bitposp,
9348 poly_offset_int *poffsetp,
9349 bool *variable_offset)
9350{
9351 tree offset;
9352 poly_int64 bitsize, bitpos;
9353 machine_mode mode;
9354 int unsignedp, reversep, volatilep = 0;
9355 poly_offset_int poffset;
9356
9357 STRIP_NOPS (base);
9358
9359 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
9360 &unsignedp, &reversep, &volatilep);
9361
9362 STRIP_NOPS (base);
9363
9364 if (offset && poly_int_tree_p (t: offset))
9365 {
9366 poffset = wi::to_poly_offset (t: offset);
9367 *variable_offset = false;
9368 }
9369 else
9370 {
9371 poffset = 0;
9372 *variable_offset = (offset != NULL_TREE);
9373 }
9374
9375 if (maybe_ne (a: bitpos, b: 0))
9376 poffset += bits_to_bytes_round_down (bitpos);
9377
9378 *bitposp = bitpos;
9379 *poffsetp = poffset;
9380
9381 return base;
9382}
9383
9384/* Used for topological sorting of mapping groups. UNVISITED means we haven't
9385 started processing the group yet. The TEMPORARY mark is used when we first
9386 encounter a group on a depth-first traversal, and the PERMANENT mark is used
9387 when we have processed all the group's children (i.e. all the base pointers
9388 referred to by the group's mapping nodes, recursively). */
9389
9390enum omp_tsort_mark {
9391 UNVISITED,
9392 TEMPORARY,
9393 PERMANENT
9394};
9395
9396/* Hash for trees based on operand_equal_p. Like tree_operand_hash
9397 but ignores side effects in the equality comparisons. */
9398
9399struct tree_operand_hash_no_se : tree_operand_hash
9400{
9401 static inline bool equal (const value_type &,
9402 const compare_type &);
9403};
9404
9405inline bool
9406tree_operand_hash_no_se::equal (const value_type &t1,
9407 const compare_type &t2)
9408{
9409 return operand_equal_p (t1, t2, flags: OEP_MATCH_SIDE_EFFECTS);
9410}
9411
9412/* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9413 clause. */
9414
9415struct omp_mapping_group {
9416 tree *grp_start;
9417 tree grp_end;
9418 omp_tsort_mark mark;
9419 /* If we've removed the group but need to reindex, mark the group as
9420 deleted. */
9421 bool deleted;
9422 /* The group points to an already-created "GOMP_MAP_STRUCT
9423 GOMP_MAP_ATTACH_DETACH" pair. */
9424 bool reprocess_struct;
9425 /* The group should use "zero-length" allocations for pointers that are not
9426 mapped "to" on the same directive. */
9427 bool fragile;
9428 struct omp_mapping_group *sibling;
9429 struct omp_mapping_group *next;
9430};
9431
9432DEBUG_FUNCTION void
9433debug_mapping_group (omp_mapping_group *grp)
9434{
9435 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
9436 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
9437 debug_generic_expr (*grp->grp_start);
9438 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
9439}
9440
9441/* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9442 isn't one. */
9443
9444static tree
9445omp_get_base_pointer (tree expr)
9446{
9447 while (TREE_CODE (expr) == ARRAY_REF
9448 || TREE_CODE (expr) == COMPONENT_REF)
9449 expr = TREE_OPERAND (expr, 0);
9450
9451 if (INDIRECT_REF_P (expr)
9452 || (TREE_CODE (expr) == MEM_REF
9453 && integer_zerop (TREE_OPERAND (expr, 1))))
9454 {
9455 expr = TREE_OPERAND (expr, 0);
9456 while (TREE_CODE (expr) == COMPOUND_EXPR)
9457 expr = TREE_OPERAND (expr, 1);
9458 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9459 expr = TREE_OPERAND (expr, 0);
9460 if (TREE_CODE (expr) == SAVE_EXPR)
9461 expr = TREE_OPERAND (expr, 0);
9462 STRIP_NOPS (expr);
9463 return expr;
9464 }
9465
9466 return NULL_TREE;
9467}
9468
9469/* An attach or detach operation depends directly on the address being
9470 attached/detached. Return that address, or none if there are no
9471 attachments/detachments. */
9472
9473static tree
9474omp_get_attachment (omp_mapping_group *grp)
9475{
9476 tree node = *grp->grp_start;
9477
9478 switch (OMP_CLAUSE_MAP_KIND (node))
9479 {
9480 case GOMP_MAP_TO:
9481 case GOMP_MAP_FROM:
9482 case GOMP_MAP_TOFROM:
9483 case GOMP_MAP_ALWAYS_FROM:
9484 case GOMP_MAP_ALWAYS_TO:
9485 case GOMP_MAP_ALWAYS_TOFROM:
9486 case GOMP_MAP_FORCE_FROM:
9487 case GOMP_MAP_FORCE_TO:
9488 case GOMP_MAP_FORCE_TOFROM:
9489 case GOMP_MAP_FORCE_PRESENT:
9490 case GOMP_MAP_PRESENT_ALLOC:
9491 case GOMP_MAP_PRESENT_FROM:
9492 case GOMP_MAP_PRESENT_TO:
9493 case GOMP_MAP_PRESENT_TOFROM:
9494 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9495 case GOMP_MAP_ALWAYS_PRESENT_TO:
9496 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9497 case GOMP_MAP_ALLOC:
9498 case GOMP_MAP_RELEASE:
9499 case GOMP_MAP_DELETE:
9500 case GOMP_MAP_FORCE_ALLOC:
9501 if (node == grp->grp_end)
9502 return NULL_TREE;
9503
9504 node = OMP_CLAUSE_CHAIN (node);
9505 if (node && omp_map_clause_descriptor_p (c: node))
9506 {
9507 gcc_assert (node != grp->grp_end);
9508 node = OMP_CLAUSE_CHAIN (node);
9509 }
9510 if (node)
9511 switch (OMP_CLAUSE_MAP_KIND (node))
9512 {
9513 case GOMP_MAP_POINTER:
9514 case GOMP_MAP_ALWAYS_POINTER:
9515 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9516 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9517 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9518 return NULL_TREE;
9519
9520 case GOMP_MAP_ATTACH_DETACH:
9521 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9522 case GOMP_MAP_DETACH:
9523 return OMP_CLAUSE_DECL (node);
9524
9525 default:
9526 internal_error ("unexpected mapping node");
9527 }
9528 return error_mark_node;
9529
9530 case GOMP_MAP_TO_PSET:
9531 gcc_assert (node != grp->grp_end);
9532 node = OMP_CLAUSE_CHAIN (node);
9533 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9534 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9535 return OMP_CLAUSE_DECL (node);
9536 else
9537 internal_error ("unexpected mapping node");
9538 return error_mark_node;
9539
9540 case GOMP_MAP_ATTACH:
9541 case GOMP_MAP_DETACH:
9542 node = OMP_CLAUSE_CHAIN (node);
9543 if (!node || *grp->grp_start == grp->grp_end)
9544 return OMP_CLAUSE_DECL (*grp->grp_start);
9545 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9546 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9547 return OMP_CLAUSE_DECL (*grp->grp_start);
9548 else
9549 internal_error ("unexpected mapping node");
9550 return error_mark_node;
9551
9552 case GOMP_MAP_STRUCT:
9553 case GOMP_MAP_STRUCT_UNORD:
9554 case GOMP_MAP_FORCE_DEVICEPTR:
9555 case GOMP_MAP_DEVICE_RESIDENT:
9556 case GOMP_MAP_LINK:
9557 case GOMP_MAP_IF_PRESENT:
9558 case GOMP_MAP_FIRSTPRIVATE:
9559 case GOMP_MAP_FIRSTPRIVATE_INT:
9560 case GOMP_MAP_USE_DEVICE_PTR:
9561 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9562 return NULL_TREE;
9563
9564 default:
9565 internal_error ("unexpected mapping node");
9566 }
9567
9568 return error_mark_node;
9569}
9570
9571/* Given a pointer START_P to the start of a group of related (e.g. pointer)
9572 mappings, return the chain pointer to the end of that group in the list. */
9573
9574static tree *
9575omp_group_last (tree *start_p)
9576{
9577 tree c = *start_p, nc, *grp_last_p = start_p;
9578
9579 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9580
9581 nc = OMP_CLAUSE_CHAIN (c);
9582
9583 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9584 return grp_last_p;
9585
9586 switch (OMP_CLAUSE_MAP_KIND (c))
9587 {
9588 default:
9589 while (nc
9590 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9591 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9592 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9593 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9594 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9595 || (OMP_CLAUSE_MAP_KIND (nc)
9596 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9597 || (OMP_CLAUSE_MAP_KIND (nc)
9598 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9599 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH
9600 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9601 || omp_map_clause_descriptor_p (c: nc)))
9602 {
9603 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9604 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH)
9605 {
9606 /* In the specific case we're doing "exit data" on an array
9607 slice of a reference-to-pointer struct component, we will see
9608 DETACH followed by ATTACH_DETACH here. We want to treat that
9609 as a single group. In other cases DETACH might represent a
9610 stand-alone "detach" clause, so we don't want to consider
9611 that part of the group. */
9612 if (nc2
9613 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9614 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH_DETACH)
9615 goto consume_two_nodes;
9616 else
9617 break;
9618 }
9619 if (nc2
9620 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9621 && (OMP_CLAUSE_MAP_KIND (nc)
9622 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9623 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9624 {
9625 consume_two_nodes:
9626 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9627 c = nc2;
9628 nc = OMP_CLAUSE_CHAIN (nc2);
9629 }
9630 else
9631 {
9632 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9633 c = nc;
9634 nc = nc2;
9635 }
9636 }
9637 break;
9638
9639 case GOMP_MAP_ATTACH:
9640 case GOMP_MAP_DETACH:
9641 /* This is a weird artifact of how directives are parsed: bare attach or
9642 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9643 FIRSTPRIVATE_REFERENCE node. FIXME. */
9644 if (nc
9645 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9646 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9647 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9648 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9649 break;
9650
9651 case GOMP_MAP_TO_PSET:
9652 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9653 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9654 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9655 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9656 break;
9657
9658 case GOMP_MAP_STRUCT:
9659 case GOMP_MAP_STRUCT_UNORD:
9660 {
9661 unsigned HOST_WIDE_INT num_mappings
9662 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9663 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9664 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9665 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9666 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9667 for (unsigned i = 0; i < num_mappings; i++)
9668 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9669 }
9670 break;
9671 }
9672
9673 return grp_last_p;
9674}
9675
9676/* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9677 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9678 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9679 if we have more than one such group, else return NULL. */
9680
9681static void
9682omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9683 tree gather_sentinel)
9684{
9685 for (tree *cp = list_p;
9686 *cp && *cp != gather_sentinel;
9687 cp = &OMP_CLAUSE_CHAIN (*cp))
9688 {
9689 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9690 continue;
9691
9692 tree *grp_last_p = omp_group_last (start_p: cp);
9693 omp_mapping_group grp;
9694
9695 grp.grp_start = cp;
9696 grp.grp_end = *grp_last_p;
9697 grp.mark = UNVISITED;
9698 grp.sibling = NULL;
9699 grp.deleted = false;
9700 grp.reprocess_struct = false;
9701 grp.fragile = false;
9702 grp.next = NULL;
9703 groups->safe_push (obj: grp);
9704
9705 cp = grp_last_p;
9706 }
9707}
9708
9709static vec<omp_mapping_group> *
9710omp_gather_mapping_groups (tree *list_p)
9711{
9712 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9713
9714 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9715
9716 if (groups->length () > 0)
9717 return groups;
9718 else
9719 {
9720 delete groups;
9721 return NULL;
9722 }
9723}
9724
9725/* A pointer mapping group GRP may define a block of memory starting at some
9726 base address, and maybe also define a firstprivate pointer or firstprivate
9727 reference that points to that block. The return value is a node containing
9728 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9729 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9730 return the number of consecutive chained nodes in CHAINED. */
9731
9732static tree
9733omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9734 tree *firstprivate)
9735{
9736 tree node = *grp->grp_start;
9737
9738 *firstprivate = NULL_TREE;
9739 *chained = 1;
9740
9741 switch (OMP_CLAUSE_MAP_KIND (node))
9742 {
9743 case GOMP_MAP_TO:
9744 case GOMP_MAP_FROM:
9745 case GOMP_MAP_TOFROM:
9746 case GOMP_MAP_ALWAYS_FROM:
9747 case GOMP_MAP_ALWAYS_TO:
9748 case GOMP_MAP_ALWAYS_TOFROM:
9749 case GOMP_MAP_FORCE_FROM:
9750 case GOMP_MAP_FORCE_TO:
9751 case GOMP_MAP_FORCE_TOFROM:
9752 case GOMP_MAP_FORCE_PRESENT:
9753 case GOMP_MAP_PRESENT_ALLOC:
9754 case GOMP_MAP_PRESENT_FROM:
9755 case GOMP_MAP_PRESENT_TO:
9756 case GOMP_MAP_PRESENT_TOFROM:
9757 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9758 case GOMP_MAP_ALWAYS_PRESENT_TO:
9759 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9760 case GOMP_MAP_ALLOC:
9761 case GOMP_MAP_RELEASE:
9762 case GOMP_MAP_DELETE:
9763 case GOMP_MAP_FORCE_ALLOC:
9764 case GOMP_MAP_IF_PRESENT:
9765 if (node == grp->grp_end)
9766 return node;
9767
9768 node = OMP_CLAUSE_CHAIN (node);
9769 if (!node)
9770 internal_error ("unexpected mapping node");
9771 if (omp_map_clause_descriptor_p (c: node))
9772 {
9773 if (node == grp->grp_end)
9774 return *grp->grp_start;
9775 node = OMP_CLAUSE_CHAIN (node);
9776 }
9777 switch (OMP_CLAUSE_MAP_KIND (node))
9778 {
9779 case GOMP_MAP_POINTER:
9780 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9781 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9782 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9783 *firstprivate = OMP_CLAUSE_DECL (node);
9784 return *grp->grp_start;
9785
9786 case GOMP_MAP_ALWAYS_POINTER:
9787 case GOMP_MAP_ATTACH_DETACH:
9788 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9789 case GOMP_MAP_DETACH:
9790 return *grp->grp_start;
9791
9792 default:
9793 internal_error ("unexpected mapping node");
9794 }
9795 return error_mark_node;
9796
9797 case GOMP_MAP_TO_PSET:
9798 gcc_assert (node != grp->grp_end);
9799 node = OMP_CLAUSE_CHAIN (node);
9800 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9801 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9802 return NULL_TREE;
9803 else
9804 internal_error ("unexpected mapping node");
9805 return error_mark_node;
9806
9807 case GOMP_MAP_ATTACH:
9808 case GOMP_MAP_DETACH:
9809 node = OMP_CLAUSE_CHAIN (node);
9810 if (!node || *grp->grp_start == grp->grp_end)
9811 return NULL_TREE;
9812 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9813 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9814 {
9815 /* We're mapping the base pointer itself in a bare attach or detach
9816 node. This is a side effect of how parsing works, and the mapping
9817 will be removed anyway (at least for enter/exit data directives).
9818 We should ignore the mapping here. FIXME. */
9819 return NULL_TREE;
9820 }
9821 else
9822 internal_error ("unexpected mapping node");
9823 return error_mark_node;
9824
9825 case GOMP_MAP_STRUCT:
9826 case GOMP_MAP_STRUCT_UNORD:
9827 {
9828 unsigned HOST_WIDE_INT num_mappings
9829 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9830 node = OMP_CLAUSE_CHAIN (node);
9831 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9832 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9833 {
9834 *firstprivate = OMP_CLAUSE_DECL (node);
9835 node = OMP_CLAUSE_CHAIN (node);
9836 }
9837 else if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH_DETACH)
9838 node = OMP_CLAUSE_CHAIN (node);
9839 *chained = num_mappings;
9840 return node;
9841 }
9842
9843 case GOMP_MAP_FORCE_DEVICEPTR:
9844 case GOMP_MAP_DEVICE_RESIDENT:
9845 case GOMP_MAP_LINK:
9846 case GOMP_MAP_FIRSTPRIVATE:
9847 case GOMP_MAP_FIRSTPRIVATE_INT:
9848 case GOMP_MAP_USE_DEVICE_PTR:
9849 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9850 return NULL_TREE;
9851
9852 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9853 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9854 case GOMP_MAP_POINTER:
9855 case GOMP_MAP_ALWAYS_POINTER:
9856 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9857 /* These shouldn't appear by themselves. */
9858 if (!seen_error ())
9859 internal_error ("unexpected pointer mapping node");
9860 return error_mark_node;
9861
9862 default:
9863 gcc_unreachable ();
9864 }
9865
9866 return error_mark_node;
9867}
9868
9869/* Given a vector of omp_mapping_groups, build a hash table so we can look up
9870 nodes by tree_operand_hash_no_se. */
9871
9872static void
9873omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
9874 omp_mapping_group *> *grpmap,
9875 vec<omp_mapping_group> *groups,
9876 tree reindex_sentinel)
9877{
9878 omp_mapping_group *grp;
9879 unsigned int i;
9880 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9881
9882 FOR_EACH_VEC_ELT (*groups, i, grp)
9883 {
9884 if (reindexing && *grp->grp_start == reindex_sentinel)
9885 above_hwm = true;
9886
9887 if (reindexing && !above_hwm)
9888 continue;
9889
9890 if (grp->reprocess_struct)
9891 continue;
9892
9893 tree fpp;
9894 unsigned int chained;
9895 tree node = omp_group_base (grp, chained: &chained, firstprivate: &fpp);
9896
9897 if (node == error_mark_node || (!node && !fpp))
9898 continue;
9899
9900 for (unsigned j = 0;
9901 node && j < chained;
9902 node = OMP_CLAUSE_CHAIN (node), j++)
9903 {
9904 tree decl = OMP_CLAUSE_DECL (node);
9905 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9906 meaning node-hash lookups don't work. This is a workaround for
9907 that, but ideally we should just create the INDIRECT_REF at
9908 source instead. FIXME. */
9909 if (TREE_CODE (decl) == MEM_REF
9910 && integer_zerop (TREE_OPERAND (decl, 1)))
9911 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9912
9913 omp_mapping_group **prev = grpmap->get (k: decl);
9914
9915 if (prev && *prev == grp)
9916 /* Empty. */;
9917 else if (prev)
9918 {
9919 /* Mapping the same thing twice is normally diagnosed as an error,
9920 but can happen under some circumstances, e.g. in pr99928-16.c,
9921 the directive:
9922
9923 #pragma omp target simd reduction(+:a[:3]) \
9924 map(always, tofrom: a[:6])
9925 ...
9926
9927 will result in two "a[0]" mappings (of different sizes). */
9928
9929 grp->sibling = (*prev)->sibling;
9930 (*prev)->sibling = grp;
9931 }
9932 else
9933 grpmap->put (k: decl, v: grp);
9934 }
9935
9936 if (!fpp)
9937 continue;
9938
9939 omp_mapping_group **prev = grpmap->get (k: fpp);
9940 if (prev && *prev != grp)
9941 {
9942 grp->sibling = (*prev)->sibling;
9943 (*prev)->sibling = grp;
9944 }
9945 else
9946 grpmap->put (k: fpp, v: grp);
9947 }
9948}
9949
9950static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9951omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9952{
9953 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9954 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9955
9956 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9957
9958 return grpmap;
9959}
9960
9961/* Rebuild group map from partially-processed clause list (during
9962 omp_build_struct_sibling_lists). We have already processed nodes up until
9963 a high-water mark (HWM). This is a bit tricky because the list is being
9964 reordered as it is scanned, but we know:
9965
9966 1. The list after HWM has not been touched yet, so we can reindex it safely.
9967
9968 2. The list before and including HWM has been altered, but remains
9969 well-formed throughout the sibling-list building operation.
9970
9971 so, we can do the reindex operation in two parts, on the processed and
9972 then the unprocessed halves of the list. */
9973
9974static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9975omp_reindex_mapping_groups (tree *list_p,
9976 vec<omp_mapping_group> *groups,
9977 vec<omp_mapping_group> *processed_groups,
9978 tree sentinel)
9979{
9980 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9981 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9982
9983 processed_groups->truncate (size: 0);
9984
9985 omp_gather_mapping_groups_1 (list_p, groups: processed_groups, gather_sentinel: sentinel);
9986 omp_index_mapping_groups_1 (grpmap, groups: processed_groups, NULL_TREE);
9987 if (sentinel)
9988 omp_index_mapping_groups_1 (grpmap, groups, reindex_sentinel: sentinel);
9989
9990 return grpmap;
9991}
9992
9993/* Find the immediately-containing struct for a component ref (etc.)
9994 expression EXPR. */
9995
9996static tree
9997omp_containing_struct (tree expr)
9998{
9999 tree expr0 = expr;
10000
10001 STRIP_NOPS (expr);
10002
10003 /* Note: don't strip NOPs unless we're also stripping off array refs or a
10004 component ref. */
10005 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
10006 return expr0;
10007
10008 while (TREE_CODE (expr) == ARRAY_REF)
10009 expr = TREE_OPERAND (expr, 0);
10010
10011 if (TREE_CODE (expr) == COMPONENT_REF)
10012 expr = TREE_OPERAND (expr, 0);
10013
10014 return expr;
10015}
10016
10017/* Return TRUE if DECL describes a component that is part of a whole structure
10018 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
10019 that maps that structure, if present. */
10020
10021static bool
10022omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
10023 omp_mapping_group *> *grpmap,
10024 tree decl,
10025 omp_mapping_group **mapped_by_group)
10026{
10027 tree wsdecl = NULL_TREE;
10028
10029 *mapped_by_group = NULL;
10030
10031 while (true)
10032 {
10033 wsdecl = omp_containing_struct (expr: decl);
10034 if (wsdecl == decl)
10035 break;
10036 omp_mapping_group **wholestruct = grpmap->get (k: wsdecl);
10037 if (!wholestruct
10038 && TREE_CODE (wsdecl) == MEM_REF
10039 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
10040 {
10041 tree deref = TREE_OPERAND (wsdecl, 0);
10042 deref = build_fold_indirect_ref (deref);
10043 wholestruct = grpmap->get (k: deref);
10044 }
10045 if (wholestruct)
10046 {
10047 *mapped_by_group = *wholestruct;
10048 return true;
10049 }
10050 decl = wsdecl;
10051 }
10052
10053 return false;
10054}
10055
10056/* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
10057 FALSE on error. */
10058
10059static bool
10060omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
10061 vec<omp_mapping_group> *groups,
10062 hash_map<tree_operand_hash_no_se,
10063 omp_mapping_group *> *grpmap,
10064 omp_mapping_group *grp)
10065{
10066 if (grp->mark == PERMANENT)
10067 return true;
10068 if (grp->mark == TEMPORARY)
10069 {
10070 fprintf (stderr, format: "when processing group:\n");
10071 debug_mapping_group (grp);
10072 internal_error ("base pointer cycle detected");
10073 return false;
10074 }
10075 grp->mark = TEMPORARY;
10076
10077 tree attaches_to = omp_get_attachment (grp);
10078
10079 if (attaches_to)
10080 {
10081 omp_mapping_group **basep = grpmap->get (k: attaches_to);
10082
10083 if (basep && *basep != grp)
10084 {
10085 for (omp_mapping_group *w = *basep; w; w = w->sibling)
10086 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, grp: w))
10087 return false;
10088 }
10089 }
10090
10091 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
10092
10093 while (decl)
10094 {
10095 tree base = omp_get_base_pointer (expr: decl);
10096
10097 if (!base)
10098 break;
10099
10100 omp_mapping_group **innerp = grpmap->get (k: base);
10101 omp_mapping_group *wholestruct;
10102
10103 /* We should treat whole-structure mappings as if all (pointer, in this
10104 case) members are mapped as individual list items. Check if we have
10105 such a whole-structure mapping, if we don't have an explicit reference
10106 to the pointer member itself. */
10107 if (!innerp
10108 && TREE_CODE (base) == COMPONENT_REF
10109 && omp_mapped_by_containing_struct (grpmap, decl: base, mapped_by_group: &wholestruct))
10110 innerp = &wholestruct;
10111
10112 if (innerp && *innerp != grp)
10113 {
10114 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
10115 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, grp: w))
10116 return false;
10117 break;
10118 }
10119
10120 decl = base;
10121 }
10122
10123 grp->mark = PERMANENT;
10124
10125 /* Emit grp to output list. */
10126
10127 **outlist = grp;
10128 *outlist = &grp->next;
10129
10130 return true;
10131}
10132
10133/* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
10134 before mappings that use those pointers. This is an implementation of the
10135 depth-first search algorithm, described e.g. at:
10136
10137 https://en.wikipedia.org/wiki/Topological_sorting
10138*/
10139
10140static omp_mapping_group *
10141omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
10142 hash_map<tree_operand_hash_no_se, omp_mapping_group *>
10143 *grpmap,
10144 bool enter_exit_data)
10145{
10146 omp_mapping_group *grp, *outlist = NULL, **cursor;
10147 unsigned int i;
10148 bool saw_runtime_implicit = false;
10149
10150 cursor = &outlist;
10151
10152 FOR_EACH_VEC_ELT (*groups, i, grp)
10153 {
10154 if (grp->mark != PERMANENT)
10155 {
10156 if (OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start))
10157 {
10158 saw_runtime_implicit = true;
10159 continue;
10160 }
10161 if (!omp_tsort_mapping_groups_1 (outlist: &cursor, groups, grpmap, grp))
10162 return NULL;
10163 }
10164 }
10165
10166 if (!saw_runtime_implicit)
10167 return outlist;
10168
10169 FOR_EACH_VEC_ELT (*groups, i, grp)
10170 {
10171 if (grp->mark != PERMANENT
10172 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start))
10173 {
10174 /* Clear the flag for enter/exit data because it is currently
10175 meaningless for those operations in libgomp. */
10176 if (enter_exit_data)
10177 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start) = 0;
10178
10179 if (!omp_tsort_mapping_groups_1 (outlist: &cursor, groups, grpmap, grp))
10180 return NULL;
10181 }
10182 }
10183
10184 return outlist;
10185}
10186
10187/* Split INLIST into three parts:
10188
10189 - "present" alloc/to/from groups
10190 - other to/from groups
10191 - other alloc/release/delete groups
10192
10193 These sub-lists are then concatenated together to form the final list.
10194 Each sub-list retains the order of the original list.
10195 Note that ATTACH nodes are later moved to the end of the list in
10196 gimplify_adjust_omp_clauses, for target regions. */
10197
10198static omp_mapping_group *
10199omp_segregate_mapping_groups (omp_mapping_group *inlist)
10200{
10201 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
10202 omp_mapping_group *p_groups = NULL;
10203 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
10204 omp_mapping_group **p_tail = &p_groups;
10205
10206 for (omp_mapping_group *w = inlist; w;)
10207 {
10208 tree c = *w->grp_start;
10209 omp_mapping_group *next = w->next;
10210
10211 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
10212
10213 switch (OMP_CLAUSE_MAP_KIND (c))
10214 {
10215 case GOMP_MAP_ALLOC:
10216 case GOMP_MAP_RELEASE:
10217 case GOMP_MAP_DELETE:
10218 *ard_tail = w;
10219 w->next = NULL;
10220 ard_tail = &w->next;
10221 break;
10222
10223 /* These map types are all semantically identical, so are moved into a
10224 single group. They will each be changed into GOMP_MAP_FORCE_PRESENT
10225 in gimplify_adjust_omp_clauses. */
10226 case GOMP_MAP_PRESENT_ALLOC:
10227 case GOMP_MAP_PRESENT_FROM:
10228 case GOMP_MAP_PRESENT_TO:
10229 case GOMP_MAP_PRESENT_TOFROM:
10230 *p_tail = w;
10231 w->next = NULL;
10232 p_tail = &w->next;
10233 break;
10234
10235 default:
10236 *tf_tail = w;
10237 w->next = NULL;
10238 tf_tail = &w->next;
10239 }
10240
10241 w = next;
10242 }
10243
10244 /* Now splice the lists together... */
10245 *tf_tail = ard_groups;
10246 *p_tail = tf_groups;
10247
10248 return p_groups;
10249}
10250
10251/* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
10252 those groups based on the output list of omp_tsort_mapping_groups --
10253 singly-linked, threaded through each element's NEXT pointer starting at
10254 HEAD. Each list element appears exactly once in that linked list.
10255
10256 Each element of GROUPS may correspond to one or several mapping nodes.
10257 Node groups are kept together, and in the reordered list, the positions of
10258 the original groups are reused for the positions of the reordered list.
10259 Hence if we have e.g.
10260
10261 {to ptr ptr} firstprivate {tofrom ptr} ...
10262 ^ ^ ^
10263 first group non-"map" second group
10264
10265 and say the second group contains a base pointer for the first so must be
10266 moved before it, the resulting list will contain:
10267
10268 {tofrom ptr} firstprivate {to ptr ptr} ...
10269 ^ prev. second group ^ prev. first group
10270*/
10271
10272static tree *
10273omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
10274 omp_mapping_group *head,
10275 tree *list_p)
10276{
10277 omp_mapping_group *grp;
10278 unsigned int i;
10279 unsigned numgroups = groups->length ();
10280 auto_vec<tree> old_heads (numgroups);
10281 auto_vec<tree *> old_headps (numgroups);
10282 auto_vec<tree> new_heads (numgroups);
10283 auto_vec<tree> old_succs (numgroups);
10284 bool map_at_start = (list_p == (*groups)[0].grp_start);
10285
10286 tree *new_grp_tail = NULL;
10287
10288 /* Stash the start & end nodes of each mapping group before we start
10289 modifying the list. */
10290 FOR_EACH_VEC_ELT (*groups, i, grp)
10291 {
10292 old_headps.quick_push (obj: grp->grp_start);
10293 old_heads.quick_push (obj: *grp->grp_start);
10294 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
10295 }
10296
10297 /* And similarly, the heads of the groups in the order we want to rearrange
10298 the list to. */
10299 for (omp_mapping_group *w = head; w; w = w->next)
10300 new_heads.quick_push (obj: *w->grp_start);
10301
10302 FOR_EACH_VEC_ELT (*groups, i, grp)
10303 {
10304 gcc_assert (head);
10305
10306 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
10307 {
10308 /* a {b c d} {e f g} h i j (original)
10309 -->
10310 a {k l m} {e f g} h i j (inserted new group on last iter)
10311 -->
10312 a {k l m} {n o p} h i j (this time, chain last group to new one)
10313 ^new_grp_tail
10314 */
10315 *new_grp_tail = new_heads[i];
10316 }
10317 else if (new_grp_tail)
10318 {
10319 /* a {b c d} e {f g h} i j k (original)
10320 -->
10321 a {l m n} e {f g h} i j k (gap after last iter's group)
10322 -->
10323 a {l m n} e {o p q} h i j (chain last group to old successor)
10324 ^new_grp_tail
10325 */
10326 *new_grp_tail = old_succs[i - 1];
10327 *old_headps[i] = new_heads[i];
10328 }
10329 else
10330 {
10331 /* The first inserted group -- point to new group, and leave end
10332 open.
10333 a {b c d} e f
10334 -->
10335 a {g h i...
10336 */
10337 *grp->grp_start = new_heads[i];
10338 }
10339
10340 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
10341
10342 head = head->next;
10343 }
10344
10345 if (new_grp_tail)
10346 *new_grp_tail = old_succs[numgroups - 1];
10347
10348 gcc_assert (!head);
10349
10350 return map_at_start ? (*groups)[0].grp_start : list_p;
10351}
10352
10353/* DECL is supposed to have lastprivate semantics in the outer contexts
10354 of combined/composite constructs, starting with OCTX.
10355 Add needed lastprivate, shared or map clause if no data sharing or
10356 mapping clause are present. IMPLICIT_P is true if it is an implicit
10357 clause (IV on simd), in which case the lastprivate will not be
10358 copied to some constructs. */
10359
10360static void
10361omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
10362 tree decl, bool implicit_p)
10363{
10364 struct gimplify_omp_ctx *orig_octx = octx;
10365 for (; octx; octx = octx->outer_context)
10366 {
10367 if ((octx->region_type == ORT_COMBINED_PARALLEL
10368 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
10369 && splay_tree_lookup (octx->variables,
10370 (splay_tree_key) decl) == NULL)
10371 {
10372 omp_add_variable (ctx: octx, decl, flags: GOVD_SHARED | GOVD_SEEN);
10373 continue;
10374 }
10375 if ((octx->region_type & ORT_TASK) != 0
10376 && octx->combined_loop
10377 && splay_tree_lookup (octx->variables,
10378 (splay_tree_key) decl) == NULL)
10379 {
10380 omp_add_variable (ctx: octx, decl, flags: GOVD_LASTPRIVATE | GOVD_SEEN);
10381 continue;
10382 }
10383 if (implicit_p
10384 && octx->region_type == ORT_WORKSHARE
10385 && octx->combined_loop
10386 && splay_tree_lookup (octx->variables,
10387 (splay_tree_key) decl) == NULL
10388 && octx->outer_context
10389 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
10390 && splay_tree_lookup (octx->outer_context->variables,
10391 (splay_tree_key) decl) == NULL)
10392 {
10393 octx = octx->outer_context;
10394 omp_add_variable (ctx: octx, decl, flags: GOVD_LASTPRIVATE | GOVD_SEEN);
10395 continue;
10396 }
10397 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
10398 && octx->combined_loop
10399 && splay_tree_lookup (octx->variables,
10400 (splay_tree_key) decl) == NULL
10401 && !omp_check_private (ctx: octx, decl, copyprivate: false))
10402 {
10403 omp_add_variable (ctx: octx, decl, flags: GOVD_LASTPRIVATE | GOVD_SEEN);
10404 continue;
10405 }
10406 if (octx->region_type == ORT_COMBINED_TARGET)
10407 {
10408 splay_tree_node n = splay_tree_lookup (octx->variables,
10409 (splay_tree_key) decl);
10410 if (n == NULL)
10411 {
10412 omp_add_variable (ctx: octx, decl, flags: GOVD_MAP | GOVD_SEEN);
10413 octx = octx->outer_context;
10414 }
10415 else if (!implicit_p
10416 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
10417 {
10418 n->value &= ~(GOVD_FIRSTPRIVATE
10419 | GOVD_FIRSTPRIVATE_IMPLICIT
10420 | GOVD_EXPLICIT);
10421 omp_add_variable (ctx: octx, decl, flags: GOVD_MAP | GOVD_SEEN);
10422 octx = octx->outer_context;
10423 }
10424 }
10425 break;
10426 }
10427 if (octx && (implicit_p || octx != orig_octx))
10428 omp_notice_variable (ctx: octx, decl, in_code: true);
10429}
10430
10431/* We might have indexed several groups for DECL, e.g. a "TO" mapping and also
10432 a "FIRSTPRIVATE" mapping. Return the one that isn't firstprivate, etc. */
10433
10434static omp_mapping_group *
10435omp_get_nonfirstprivate_group (hash_map<tree_operand_hash_no_se,
10436 omp_mapping_group *> *grpmap,
10437 tree decl, bool allow_deleted = false)
10438{
10439 omp_mapping_group **to_group_p = grpmap->get (k: decl);
10440
10441 if (!to_group_p)
10442 return NULL;
10443
10444 omp_mapping_group *to_group = *to_group_p;
10445
10446 for (; to_group; to_group = to_group->sibling)
10447 {
10448 tree grp_end = to_group->grp_end;
10449 switch (OMP_CLAUSE_MAP_KIND (grp_end))
10450 {
10451 case GOMP_MAP_FIRSTPRIVATE_POINTER:
10452 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10453 break;
10454
10455 default:
10456 if (allow_deleted || !to_group->deleted)
10457 return to_group;
10458 }
10459 }
10460
10461 return NULL;
10462}
10463
10464/* Return TRUE if the directive (whose clauses are described by the hash table
10465 of mapping groups, GRPMAP) maps DECL explicitly. If TO_SPECIFICALLY is
10466 true, only count TO mappings. If ALLOW_DELETED is true, ignore the
10467 "deleted" flag for groups. If CONTAINED_IN_STRUCT is true, also return
10468 TRUE if DECL is mapped as a member of a whole-struct mapping. */
10469
10470static bool
10471omp_directive_maps_explicitly (hash_map<tree_operand_hash_no_se,
10472 omp_mapping_group *> *grpmap,
10473 tree decl, omp_mapping_group **base_group,
10474 bool to_specifically, bool allow_deleted,
10475 bool contained_in_struct)
10476{
10477 omp_mapping_group *decl_group
10478 = omp_get_nonfirstprivate_group (grpmap, decl, allow_deleted);
10479
10480 *base_group = NULL;
10481
10482 if (decl_group)
10483 {
10484 tree grp_first = *decl_group->grp_start;
10485 /* We might be called during omp_build_struct_sibling_lists, when
10486 GOMP_MAP_STRUCT might have been inserted at the start of the group.
10487 Skip over that, and also possibly the node after it. */
10488 if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT
10489 || OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT_UNORD)
10490 {
10491 grp_first = OMP_CLAUSE_CHAIN (grp_first);
10492 if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_FIRSTPRIVATE_POINTER
10493 || (OMP_CLAUSE_MAP_KIND (grp_first)
10494 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10495 || OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_ATTACH_DETACH)
10496 grp_first = OMP_CLAUSE_CHAIN (grp_first);
10497 }
10498 enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
10499 if (!to_specifically
10500 || GOMP_MAP_COPY_TO_P (first_kind)
10501 || first_kind == GOMP_MAP_ALLOC)
10502 {
10503 *base_group = decl_group;
10504 return true;
10505 }
10506 }
10507
10508 if (contained_in_struct
10509 && omp_mapped_by_containing_struct (grpmap, decl, mapped_by_group: base_group))
10510 return true;
10511
10512 return false;
10513}
10514
10515/* If we have mappings INNER and OUTER, where INNER is a component access and
10516 OUTER is a mapping of the whole containing struct, check that the mappings
10517 are compatible. We'll be deleting the inner mapping, so we need to make
10518 sure the outer mapping does (at least) the same transfers to/from the device
10519 as the inner mapping. */
10520
10521bool
10522omp_check_mapping_compatibility (location_t loc,
10523 omp_mapping_group *outer,
10524 omp_mapping_group *inner)
10525{
10526 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
10527
10528 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
10529 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
10530
10531 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
10532 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
10533
10534 if (outer_kind == inner_kind)
10535 return true;
10536
10537 switch (outer_kind)
10538 {
10539 case GOMP_MAP_ALWAYS_TO:
10540 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10541 || inner_kind == GOMP_MAP_ALLOC
10542 || inner_kind == GOMP_MAP_TO)
10543 return true;
10544 break;
10545
10546 case GOMP_MAP_ALWAYS_FROM:
10547 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10548 || inner_kind == GOMP_MAP_RELEASE
10549 || inner_kind == GOMP_MAP_FROM)
10550 return true;
10551 break;
10552
10553 case GOMP_MAP_TO:
10554 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10555 || inner_kind == GOMP_MAP_ALLOC)
10556 return true;
10557 break;
10558
10559 case GOMP_MAP_FROM:
10560 if (inner_kind == GOMP_MAP_RELEASE
10561 || inner_kind == GOMP_MAP_FORCE_PRESENT)
10562 return true;
10563 break;
10564
10565 case GOMP_MAP_ALWAYS_TOFROM:
10566 case GOMP_MAP_TOFROM:
10567 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10568 || inner_kind == GOMP_MAP_ALLOC
10569 || inner_kind == GOMP_MAP_TO
10570 || inner_kind == GOMP_MAP_FROM
10571 || inner_kind == GOMP_MAP_TOFROM)
10572 return true;
10573 break;
10574
10575 default:
10576 ;
10577 }
10578
10579 error_at (loc, "data movement for component %qE is not compatible with "
10580 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
10581 OMP_CLAUSE_DECL (first_outer));
10582
10583 return false;
10584}
10585
10586/* This function handles several cases where clauses on a mapping directive
10587 can interact with each other.
10588
10589 If we have a FIRSTPRIVATE_POINTER node and we're also mapping the pointer
10590 on the same directive, change the mapping of the first node to
10591 ATTACH_DETACH. We should have detected that this will happen already in
10592 c-omp.cc:c_omp_adjust_map_clauses and marked the appropriate decl
10593 as addressable. (If we didn't, bail out.)
10594
10595 If we have a FIRSTPRIVATE_REFERENCE (for a reference to pointer) and we're
10596 mapping the base pointer also, we may need to change the mapping type to
10597 ATTACH_DETACH and synthesize an alloc node for the reference itself.
10598
10599 If we have an ATTACH_DETACH node, this is an array section with a pointer
10600 base. If we're mapping the base on the same directive too, we can drop its
10601 mapping. However, if we have a reference to pointer, make other appropriate
10602 adjustments to the mapping nodes instead.
10603
10604 If we have an ATTACH_DETACH node with a Fortran pointer-set (array
10605 descriptor) mapping for a derived-type component, and we're also mapping the
10606 whole of the derived-type variable on another clause, the pointer-set
10607 mapping is removed.
10608
10609 If we have a component access but we're also mapping the whole of the
10610 containing struct, drop the former access.
10611
10612 If the expression is a component access, and we're also mapping a base
10613 pointer used in that component access in the same expression, change the
10614 mapping type of the latter to ALLOC (ready for processing by
10615 omp_build_struct_sibling_lists). */
10616
10617void
10618omp_resolve_clause_dependencies (enum tree_code code,
10619 vec<omp_mapping_group> *groups,
10620 hash_map<tree_operand_hash_no_se,
10621 omp_mapping_group *> *grpmap)
10622{
10623 int i;
10624 omp_mapping_group *grp;
10625 bool repair_chain = false;
10626
10627 FOR_EACH_VEC_ELT (*groups, i, grp)
10628 {
10629 tree grp_end = grp->grp_end;
10630 tree decl = OMP_CLAUSE_DECL (grp_end);
10631
10632 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10633
10634 switch (OMP_CLAUSE_MAP_KIND (grp_end))
10635 {
10636 case GOMP_MAP_FIRSTPRIVATE_POINTER:
10637 {
10638 omp_mapping_group *to_group
10639 = omp_get_nonfirstprivate_group (grpmap, decl);
10640
10641 if (!to_group || to_group == grp)
10642 continue;
10643
10644 tree grp_first = *to_group->grp_start;
10645 enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
10646
10647 if ((GOMP_MAP_COPY_TO_P (first_kind)
10648 || first_kind == GOMP_MAP_ALLOC)
10649 && (OMP_CLAUSE_MAP_KIND (to_group->grp_end)
10650 != GOMP_MAP_FIRSTPRIVATE_POINTER))
10651 {
10652 gcc_assert (TREE_ADDRESSABLE (OMP_CLAUSE_DECL (grp_end)));
10653 OMP_CLAUSE_SET_MAP_KIND (grp_end, GOMP_MAP_ATTACH_DETACH);
10654 }
10655 }
10656 break;
10657
10658 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10659 {
10660 tree ptr = build_fold_indirect_ref (decl);
10661
10662 omp_mapping_group *to_group
10663 = omp_get_nonfirstprivate_group (grpmap, decl: ptr);
10664
10665 if (!to_group || to_group == grp)
10666 continue;
10667
10668 tree grp_first = *to_group->grp_start;
10669 enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
10670
10671 if (GOMP_MAP_COPY_TO_P (first_kind)
10672 || first_kind == GOMP_MAP_ALLOC)
10673 {
10674 OMP_CLAUSE_SET_MAP_KIND (grp_end, GOMP_MAP_ATTACH_DETACH);
10675 OMP_CLAUSE_DECL (grp_end) = ptr;
10676 if ((OMP_CLAUSE_CHAIN (*to_group->grp_start)
10677 == to_group->grp_end)
10678 && (OMP_CLAUSE_MAP_KIND (to_group->grp_end)
10679 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10680 {
10681 gcc_assert (TREE_ADDRESSABLE
10682 (OMP_CLAUSE_DECL (to_group->grp_end)));
10683 OMP_CLAUSE_SET_MAP_KIND (to_group->grp_end,
10684 GOMP_MAP_ATTACH_DETACH);
10685
10686 location_t loc = OMP_CLAUSE_LOCATION (to_group->grp_end);
10687 tree alloc
10688 = build_omp_clause (loc, OMP_CLAUSE_MAP);
10689 OMP_CLAUSE_SET_MAP_KIND (alloc, GOMP_MAP_ALLOC);
10690 tree tmp = build_fold_addr_expr (OMP_CLAUSE_DECL
10691 (to_group->grp_end));
10692 tree char_ptr_type = build_pointer_type (char_type_node);
10693 OMP_CLAUSE_DECL (alloc)
10694 = build2 (MEM_REF, char_type_node,
10695 tmp,
10696 build_int_cst (char_ptr_type, 0));
10697 OMP_CLAUSE_SIZE (alloc) = TYPE_SIZE_UNIT (TREE_TYPE (tmp));
10698
10699 OMP_CLAUSE_CHAIN (alloc)
10700 = OMP_CLAUSE_CHAIN (*to_group->grp_start);
10701 OMP_CLAUSE_CHAIN (*to_group->grp_start) = alloc;
10702 }
10703 }
10704 }
10705 break;
10706
10707 case GOMP_MAP_ATTACH_DETACH:
10708 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
10709 {
10710 tree base_ptr, referenced_ptr_node = NULL_TREE;
10711
10712 while (TREE_CODE (decl) == ARRAY_REF)
10713 decl = TREE_OPERAND (decl, 0);
10714
10715 if (TREE_CODE (decl) == INDIRECT_REF)
10716 decl = TREE_OPERAND (decl, 0);
10717
10718 /* Only component accesses. */
10719 if (DECL_P (decl))
10720 continue;
10721
10722 /* We want the pointer itself when checking if the base pointer is
10723 mapped elsewhere in the same directive -- if we have a
10724 reference to the pointer, don't use that. */
10725
10726 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10727 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10728 {
10729 referenced_ptr_node = OMP_CLAUSE_CHAIN (*grp->grp_start);
10730 base_ptr = OMP_CLAUSE_DECL (referenced_ptr_node);
10731 }
10732 else
10733 base_ptr = decl;
10734
10735 gomp_map_kind zlas_kind
10736 = (code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
10737 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION;
10738
10739 if (TREE_CODE (TREE_TYPE (base_ptr)) == POINTER_TYPE)
10740 {
10741 /* If we map the base TO, and we're doing an attachment, we can
10742 skip the TO mapping altogether and create an ALLOC mapping
10743 instead, since the attachment will overwrite the device
10744 pointer in that location immediately anyway. Otherwise,
10745 change our mapping to
10746 GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION in case the
10747 attachment target has not been copied to the device already
10748 by some earlier directive. */
10749
10750 bool base_mapped_to = false;
10751
10752 omp_mapping_group *base_group;
10753
10754 if (omp_directive_maps_explicitly (grpmap, decl: base_ptr,
10755 base_group: &base_group, to_specifically: false, allow_deleted: true,
10756 contained_in_struct: false))
10757 {
10758 if (referenced_ptr_node)
10759 {
10760 base_mapped_to = true;
10761 if ((OMP_CLAUSE_MAP_KIND (base_group->grp_end)
10762 == GOMP_MAP_ATTACH_DETACH)
10763 && (OMP_CLAUSE_CHAIN (*base_group->grp_start)
10764 == base_group->grp_end))
10765 {
10766 OMP_CLAUSE_CHAIN (*base_group->grp_start)
10767 = OMP_CLAUSE_CHAIN (base_group->grp_end);
10768 base_group->grp_end = *base_group->grp_start;
10769 repair_chain = true;
10770 }
10771 }
10772 else
10773 {
10774 base_group->deleted = true;
10775 OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end) = 1;
10776 }
10777 }
10778
10779 /* We're dealing with a reference to a pointer, and we are
10780 attaching both the reference and the pointer. We know the
10781 reference itself is on the target, because we are going to
10782 create an ALLOC node for it in accumulate_sibling_list. The
10783 pointer might be on the target already or it might not, but
10784 if it isn't then it's not an error, so use
10785 GOMP_MAP_ATTACH_ZLAS for it. */
10786 if (!base_mapped_to && referenced_ptr_node)
10787 OMP_CLAUSE_SET_MAP_KIND (referenced_ptr_node, zlas_kind);
10788
10789 omp_mapping_group *struct_group;
10790 tree desc;
10791 if ((desc = OMP_CLAUSE_CHAIN (*grp->grp_start))
10792 && omp_map_clause_descriptor_p (c: desc)
10793 && omp_mapped_by_containing_struct (grpmap, decl,
10794 mapped_by_group: &struct_group))
10795 /* If we have a pointer set but we're mapping (or unmapping)
10796 the whole of the containing struct, we can remove the
10797 pointer set mapping. */
10798 OMP_CLAUSE_CHAIN (*grp->grp_start) = OMP_CLAUSE_CHAIN (desc);
10799 }
10800 else if (TREE_CODE (TREE_TYPE (base_ptr)) == REFERENCE_TYPE
10801 && (TREE_CODE (TREE_TYPE (TREE_TYPE (base_ptr)))
10802 == ARRAY_TYPE)
10803 && OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION
10804 (*grp->grp_start))
10805 OMP_CLAUSE_SET_MAP_KIND (grp->grp_end, zlas_kind);
10806 }
10807 break;
10808
10809 case GOMP_MAP_ATTACH:
10810 /* Ignore standalone attach here. */
10811 break;
10812
10813 default:
10814 {
10815 omp_mapping_group *struct_group;
10816 if (omp_mapped_by_containing_struct (grpmap, decl, mapped_by_group: &struct_group)
10817 && *grp->grp_start == grp_end)
10818 {
10819 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10820 outer: struct_group, inner: grp);
10821 /* Remove the whole of this mapping -- redundant. */
10822 grp->deleted = true;
10823 }
10824
10825 tree base = decl;
10826 while ((base = omp_get_base_pointer (expr: base)))
10827 {
10828 omp_mapping_group *base_group;
10829
10830 if (omp_directive_maps_explicitly (grpmap, decl: base, base_group: &base_group,
10831 to_specifically: true, allow_deleted: true, contained_in_struct: false))
10832 {
10833 tree grp_first = *base_group->grp_start;
10834 OMP_CLAUSE_SET_MAP_KIND (grp_first, GOMP_MAP_ALLOC);
10835 }
10836 }
10837 }
10838 }
10839 }
10840
10841 if (repair_chain)
10842 {
10843 /* Group start pointers may have become detached from the
10844 OMP_CLAUSE_CHAIN of previous groups if elements were removed from the
10845 end of those groups. Fix that now. */
10846 tree *new_next = NULL;
10847 FOR_EACH_VEC_ELT (*groups, i, grp)
10848 {
10849 if (new_next)
10850 grp->grp_start = new_next;
10851
10852 new_next = &OMP_CLAUSE_CHAIN (grp->grp_end);
10853 }
10854 }
10855}
10856
10857/* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10858 clause dependencies we handle for now are struct element mappings and
10859 whole-struct mappings on the same directive, and duplicate clause
10860 detection. */
10861
10862void
10863oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
10864 hash_map<tree_operand_hash_no_se,
10865 omp_mapping_group *> *grpmap)
10866{
10867 int i;
10868 omp_mapping_group *grp;
10869 hash_set<tree_operand_hash> *seen_components = NULL;
10870 hash_set<tree_operand_hash> *shown_error = NULL;
10871
10872 FOR_EACH_VEC_ELT (*groups, i, grp)
10873 {
10874 tree grp_end = grp->grp_end;
10875 tree decl = OMP_CLAUSE_DECL (grp_end);
10876
10877 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10878
10879 if (DECL_P (grp_end))
10880 continue;
10881
10882 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10883 while (TREE_CODE (c) == ARRAY_REF)
10884 c = TREE_OPERAND (c, 0);
10885 if (TREE_CODE (c) != COMPONENT_REF)
10886 continue;
10887 if (!seen_components)
10888 seen_components = new hash_set<tree_operand_hash> ();
10889 if (!shown_error)
10890 shown_error = new hash_set<tree_operand_hash> ();
10891 if (seen_components->contains (k: c)
10892 && !shown_error->contains (k: c))
10893 {
10894 error_at (OMP_CLAUSE_LOCATION (grp_end),
10895 "%qE appears more than once in map clauses",
10896 OMP_CLAUSE_DECL (grp_end));
10897 shown_error->add (k: c);
10898 }
10899 else
10900 seen_components->add (k: c);
10901
10902 omp_mapping_group *struct_group;
10903 if (omp_mapped_by_containing_struct (grpmap, decl, mapped_by_group: &struct_group)
10904 && *grp->grp_start == grp_end)
10905 {
10906 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10907 outer: struct_group, inner: grp);
10908 /* Remove the whole of this mapping -- redundant. */
10909 grp->deleted = true;
10910 }
10911 }
10912
10913 if (seen_components)
10914 delete seen_components;
10915 if (shown_error)
10916 delete shown_error;
10917}
10918
10919/* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10920 is linked to the previous node pointed to by INSERT_AT. */
10921
10922static tree *
10923omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10924{
10925 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10926 *insert_at = newnode;
10927 return &OMP_CLAUSE_CHAIN (newnode);
10928}
10929
10930/* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10931 pointed to by chain MOVE_AFTER instead. */
10932
10933static void
10934omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10935{
10936 gcc_assert (node == *old_pos);
10937 *old_pos = OMP_CLAUSE_CHAIN (node);
10938 OMP_CLAUSE_CHAIN (node) = *move_after;
10939 *move_after = node;
10940}
10941
10942/* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10943 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10944 new nodes are prepended to the list before splicing into the new position.
10945 Return the position we should continue scanning the list at, or NULL to
10946 stay where we were. */
10947
10948static tree *
10949omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10950 tree *move_after)
10951{
10952 if (first_ptr == move_after)
10953 return NULL;
10954
10955 tree tmp = *first_ptr;
10956 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10957 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10958 *move_after = tmp;
10959
10960 return first_ptr;
10961}
10962
10963/* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10964 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10965 pointer MOVE_AFTER.
10966
10967 The latter list was previously part of the OMP clause list, and the former
10968 (prepended) part is comprised of new nodes.
10969
10970 We start with a list of nodes starting with a struct mapping node. We
10971 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10972 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10973 the group of mapping nodes we are currently processing (from the chain
10974 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10975 we should continue processing from, or NULL to stay where we were.
10976
10977 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10978 different) is worked through below. Here we are processing LAST_NODE, and
10979 FIRST_PTR points at the preceding mapping clause:
10980
10981 #. mapping node chain
10982 ---------------------------------------------------
10983 A. struct_node [->B]
10984 B. comp_1 [->C]
10985 C. comp_2 [->D (move_after)]
10986 D. map_to_3 [->E]
10987 E. attach_3 [->F (first_ptr)]
10988 F. map_to_4 [->G (continue_at)]
10989 G. attach_4 (last_node) [->H]
10990 H. ...
10991
10992 *last_new_tail = *first_ptr;
10993
10994 I. new_node (first_new) [->F (last_new_tail)]
10995
10996 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10997
10998 #. mapping node chain
10999 ----------------------------------------------------
11000 A. struct_node [->B]
11001 B. comp_1 [->C]
11002 C. comp_2 [->D (move_after)]
11003 D. map_to_3 [->E]
11004 E. attach_3 [->H (first_ptr)]
11005 F. map_to_4 [->G (continue_at)]
11006 G. attach_4 (last_node) [->H]
11007 H. ...
11008
11009 I. new_node (first_new) [->F (last_new_tail)]
11010
11011 OMP_CLAUSE_CHAIN (last_node) = *move_after;
11012
11013 #. mapping node chain
11014 ---------------------------------------------------
11015 A. struct_node [->B]
11016 B. comp_1 [->C]
11017 C. comp_2 [->D (move_after)]
11018 D. map_to_3 [->E]
11019 E. attach_3 [->H (continue_at)]
11020 F. map_to_4 [->G]
11021 G. attach_4 (last_node) [->D]
11022 H. ...
11023
11024 I. new_node (first_new) [->F (last_new_tail)]
11025
11026 *move_after = first_new;
11027
11028 #. mapping node chain
11029 ---------------------------------------------------
11030 A. struct_node [->B]
11031 B. comp_1 [->C]
11032 C. comp_2 [->I (move_after)]
11033 D. map_to_3 [->E]
11034 E. attach_3 [->H (continue_at)]
11035 F. map_to_4 [->G]
11036 G. attach_4 (last_node) [->D]
11037 H. ...
11038 I. new_node (first_new) [->F (last_new_tail)]
11039
11040 or, in order:
11041
11042 #. mapping node chain
11043 ---------------------------------------------------
11044 A. struct_node [->B]
11045 B. comp_1 [->C]
11046 C. comp_2 [->I (move_after)]
11047 I. new_node (first_new) [->F (last_new_tail)]
11048 F. map_to_4 [->G]
11049 G. attach_4 (last_node) [->D]
11050 D. map_to_3 [->E]
11051 E. attach_3 [->H (continue_at)]
11052 H. ...
11053*/
11054
11055static tree *
11056omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
11057 tree *first_ptr, tree last_node,
11058 tree *move_after)
11059{
11060 tree *continue_at = NULL;
11061 *last_new_tail = *first_ptr;
11062 if (first_ptr == move_after)
11063 *move_after = first_new;
11064 else
11065 {
11066 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
11067 continue_at = first_ptr;
11068 OMP_CLAUSE_CHAIN (last_node) = *move_after;
11069 *move_after = first_new;
11070 }
11071 return continue_at;
11072}
11073
11074static omp_addr_token *
11075omp_first_chained_access_token (vec<omp_addr_token *> &addr_tokens)
11076{
11077 using namespace omp_addr_tokenizer;
11078 int idx = addr_tokens.length () - 1;
11079 gcc_assert (idx >= 0);
11080 if (addr_tokens[idx]->type != ACCESS_METHOD)
11081 return addr_tokens[idx];
11082 while (idx > 0 && addr_tokens[idx - 1]->type == ACCESS_METHOD)
11083 idx--;
11084 return addr_tokens[idx];
11085}
11086
11087/* Mapping struct members causes an additional set of nodes to be created,
11088 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
11089 number of members being mapped, in order of ascending position (address or
11090 bitwise).
11091
11092 We scan through the list of mapping clauses, calling this function for each
11093 struct member mapping we find, and build up the list of mappings after the
11094 initial GOMP_MAP_STRUCT node. For pointer members, these will be
11095 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
11096 moved into place in the sorted list.
11097
11098 struct {
11099 int *a;
11100 int *b;
11101 int c;
11102 int *d;
11103 };
11104
11105 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
11106 struct.d[0:n])
11107
11108 GOMP_MAP_STRUCT (4)
11109 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
11110 GOMP_MAP_ALLOC (struct.a)
11111 GOMP_MAP_ALLOC (struct.b)
11112 GOMP_MAP_TO (struct.c)
11113 GOMP_MAP_ALLOC (struct.d)
11114 ...
11115
11116 In the case where we are mapping references to pointers, or in Fortran if
11117 we are mapping an array with a descriptor, additional nodes may be created
11118 after the struct node list also.
11119
11120 The return code is either a pointer to the next node to process (if the
11121 list has been rearranged), else NULL to continue with the next node in the
11122 original list. */
11123
11124static tree *
11125omp_accumulate_sibling_list (enum omp_region_type region_type,
11126 enum tree_code code,
11127 hash_map<tree_operand_hash, tree>
11128 *&struct_map_to_clause,
11129 hash_map<tree_operand_hash_no_se,
11130 omp_mapping_group *> *group_map,
11131 tree *grp_start_p, tree grp_end,
11132 vec<omp_addr_token *> &addr_tokens, tree **inner,
11133 bool *fragile_p, bool reprocessing_struct,
11134 tree **added_tail)
11135{
11136 using namespace omp_addr_tokenizer;
11137 poly_offset_int coffset;
11138 poly_int64 cbitpos;
11139 tree ocd = OMP_CLAUSE_DECL (grp_end);
11140 bool openmp = !(region_type & ORT_ACC);
11141 bool target = (region_type & ORT_TARGET) != 0;
11142 tree *continue_at = NULL;
11143
11144 while (TREE_CODE (ocd) == ARRAY_REF)
11145 ocd = TREE_OPERAND (ocd, 0);
11146
11147 if (*fragile_p)
11148 {
11149 omp_mapping_group *to_group
11150 = omp_get_nonfirstprivate_group (grpmap: group_map, decl: ocd, allow_deleted: true);
11151
11152 if (to_group)
11153 return NULL;
11154 }
11155
11156 omp_addr_token *last_token = omp_first_chained_access_token (addr_tokens);
11157 if (last_token->type == ACCESS_METHOD)
11158 {
11159 switch (last_token->u.access_kind)
11160 {
11161 case ACCESS_REF:
11162 case ACCESS_REF_TO_POINTER:
11163 case ACCESS_REF_TO_POINTER_OFFSET:
11164 case ACCESS_INDEXED_REF_TO_ARRAY:
11165 /* We may see either a bare reference or a dereferenced
11166 "convert_from_reference"-like one here. Handle either way. */
11167 if (TREE_CODE (ocd) == INDIRECT_REF)
11168 ocd = TREE_OPERAND (ocd, 0);
11169 gcc_assert (TREE_CODE (TREE_TYPE (ocd)) == REFERENCE_TYPE);
11170 break;
11171
11172 default:
11173 ;
11174 }
11175 }
11176
11177 bool variable_offset;
11178 tree base
11179 = extract_base_bit_offset (base: ocd, bitposp: &cbitpos, poffsetp: &coffset, variable_offset: &variable_offset);
11180
11181 int base_token;
11182 for (base_token = addr_tokens.length () - 1; base_token >= 0; base_token--)
11183 {
11184 if (addr_tokens[base_token]->type == ARRAY_BASE
11185 || addr_tokens[base_token]->type == STRUCTURE_BASE)
11186 break;
11187 }
11188
11189 /* The two expressions in the assertion below aren't quite the same: if we
11190 have 'struct_base_decl access_indexed_array' for something like
11191 "myvar[2].x" then base will be "myvar" and addr_tokens[base_token]->expr
11192 will be "myvar[2]" -- the actual base of the structure.
11193 The former interpretation leads to a strange situation where we get
11194 struct(myvar) alloc(myvar[2].ptr1)
11195 That is, the array of structures is kind of treated as one big structure
11196 for the purposes of gathering sibling lists, etc. */
11197 /* gcc_assert (base == addr_tokens[base_token]->expr); */
11198
11199 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
11200 == GOMP_MAP_ATTACH_DETACH)
11201 || (OMP_CLAUSE_MAP_KIND (grp_end)
11202 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
11203 bool has_descriptor = false;
11204 if (OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
11205 {
11206 tree grp_mid = OMP_CLAUSE_CHAIN (*grp_start_p);
11207 if (grp_mid && omp_map_clause_descriptor_p (c: grp_mid))
11208 has_descriptor = true;
11209 }
11210
11211 if (!struct_map_to_clause || struct_map_to_clause->get (k: base) == NULL)
11212 {
11213 enum gomp_map_kind str_kind = GOMP_MAP_STRUCT;
11214
11215 if (struct_map_to_clause == NULL)
11216 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
11217
11218 if (variable_offset)
11219 str_kind = GOMP_MAP_STRUCT_UNORD;
11220
11221 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
11222
11223 OMP_CLAUSE_SET_MAP_KIND (l, str_kind);
11224 OMP_CLAUSE_DECL (l) = unshare_expr (expr: base);
11225 OMP_CLAUSE_SIZE (l) = size_int (1);
11226
11227 struct_map_to_clause->put (k: base, v: l);
11228
11229 /* On first iterating through the clause list, we insert the struct node
11230 just before the component access node that triggers the initial
11231 omp_accumulate_sibling_list call for a particular sibling list (and
11232 it then forms the first entry in that list). When reprocessing
11233 struct bases that are themselves component accesses, we insert the
11234 struct node on an off-side list to avoid inserting the new
11235 GOMP_MAP_STRUCT into the middle of the old one. */
11236 tree *insert_node_pos = reprocessing_struct ? *added_tail : grp_start_p;
11237
11238 if (has_descriptor)
11239 {
11240 tree desc = OMP_CLAUSE_CHAIN (*grp_start_p);
11241 if (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
11242 OMP_CLAUSE_SET_MAP_KIND (desc, GOMP_MAP_RELEASE);
11243 tree sc = *insert_node_pos;
11244 OMP_CLAUSE_CHAIN (l) = desc;
11245 OMP_CLAUSE_CHAIN (*grp_start_p) = OMP_CLAUSE_CHAIN (desc);
11246 OMP_CLAUSE_CHAIN (desc) = sc;
11247 *insert_node_pos = l;
11248 }
11249 else if (attach_detach)
11250 {
11251 tree extra_node;
11252 tree alloc_node
11253 = build_omp_struct_comp_nodes (code, grp_start: *grp_start_p, grp_end,
11254 extra_node: &extra_node);
11255 tree *tail;
11256 OMP_CLAUSE_CHAIN (l) = alloc_node;
11257
11258 if (extra_node)
11259 {
11260 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
11261 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
11262 tail = &OMP_CLAUSE_CHAIN (extra_node);
11263 }
11264 else
11265 {
11266 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
11267 tail = &OMP_CLAUSE_CHAIN (alloc_node);
11268 }
11269
11270 /* For OpenMP semantics, we don't want to implicitly allocate
11271 space for the pointer here for non-compute regions (e.g. "enter
11272 data"). A FRAGILE_P node is only being created so that
11273 omp-low.cc is able to rewrite the struct properly.
11274 For references (to pointers), we want to actually allocate the
11275 space for the reference itself in the sorted list following the
11276 struct node.
11277 For pointers, we want to allocate space if we had an explicit
11278 mapping of the attachment point, but not otherwise. */
11279 if (*fragile_p
11280 || (openmp
11281 && !target
11282 && attach_detach
11283 && TREE_CODE (TREE_TYPE (ocd)) == POINTER_TYPE
11284 && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end)))
11285 {
11286 if (!lang_GNU_Fortran ())
11287 /* In Fortran, pointers are dereferenced automatically, but may
11288 be unassociated. So we still want to allocate space for the
11289 pointer (as the base for an attach operation that should be
11290 present in the same directive's clause list also). */
11291 OMP_CLAUSE_SIZE (alloc_node) = size_zero_node;
11292 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node) = 1;
11293 }
11294
11295 *insert_node_pos = l;
11296
11297 if (reprocessing_struct)
11298 {
11299 /* When reprocessing a struct node group used as the base of a
11300 subcomponent access, if we have a reference-to-pointer base,
11301 we will see:
11302 struct(**ptr) attach(*ptr)
11303 whereas for a non-reprocess-struct group, we see, e.g.:
11304 tofrom(**ptr) attach(*ptr) attach(ptr)
11305 and we create the "alloc" for the second "attach", i.e.
11306 for the reference itself. When reprocessing a struct group we
11307 thus change the pointer attachment into a reference attachment
11308 by stripping the indirection. (The attachment of the
11309 referenced pointer must happen elsewhere, either on the same
11310 directive, or otherwise.) */
11311 tree adecl = OMP_CLAUSE_DECL (alloc_node);
11312
11313 if ((TREE_CODE (adecl) == INDIRECT_REF
11314 || (TREE_CODE (adecl) == MEM_REF
11315 && integer_zerop (TREE_OPERAND (adecl, 1))))
11316 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (adecl, 0)))
11317 == REFERENCE_TYPE)
11318 && (TREE_CODE (TREE_TYPE (TREE_TYPE
11319 (TREE_OPERAND (adecl, 0)))) == POINTER_TYPE))
11320 OMP_CLAUSE_DECL (alloc_node) = TREE_OPERAND (adecl, 0);
11321
11322 *added_tail = tail;
11323 }
11324 }
11325 else
11326 {
11327 gcc_assert (*grp_start_p == grp_end);
11328 if (reprocessing_struct)
11329 {
11330 /* If we don't have an attach/detach node, this is a
11331 "target data" directive or similar, not an offload region.
11332 Synthesize an "alloc" node using just the initiating
11333 GOMP_MAP_STRUCT decl. */
11334 gomp_map_kind k = (code == OMP_TARGET_EXIT_DATA
11335 || code == OACC_EXIT_DATA)
11336 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
11337 tree alloc_node
11338 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
11339 OMP_CLAUSE_MAP);
11340 OMP_CLAUSE_SET_MAP_KIND (alloc_node, k);
11341 OMP_CLAUSE_DECL (alloc_node) = unshare_expr (expr: last_token->expr);
11342 OMP_CLAUSE_SIZE (alloc_node)
11343 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node)));
11344
11345 OMP_CLAUSE_CHAIN (alloc_node) = OMP_CLAUSE_CHAIN (l);
11346 OMP_CLAUSE_CHAIN (l) = alloc_node;
11347 *insert_node_pos = l;
11348 *added_tail = &OMP_CLAUSE_CHAIN (alloc_node);
11349 }
11350 else
11351 grp_start_p = omp_siblist_insert_node_after (newnode: l, insert_at: insert_node_pos);
11352 }
11353
11354 unsigned last_access = base_token + 1;
11355
11356 while (last_access + 1 < addr_tokens.length ()
11357 && addr_tokens[last_access + 1]->type == ACCESS_METHOD)
11358 last_access++;
11359
11360 if ((region_type & ORT_TARGET)
11361 && addr_tokens[base_token + 1]->type == ACCESS_METHOD)
11362 {
11363 bool base_ref = false;
11364 access_method_kinds access_kind
11365 = addr_tokens[last_access]->u.access_kind;
11366
11367 switch (access_kind)
11368 {
11369 case ACCESS_DIRECT:
11370 case ACCESS_INDEXED_ARRAY:
11371 return NULL;
11372
11373 case ACCESS_REF:
11374 case ACCESS_REF_TO_POINTER:
11375 case ACCESS_REF_TO_POINTER_OFFSET:
11376 case ACCESS_INDEXED_REF_TO_ARRAY:
11377 base_ref = true;
11378 break;
11379
11380 default:
11381 ;
11382 }
11383 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
11384 OMP_CLAUSE_MAP);
11385 enum gomp_map_kind mkind;
11386 omp_mapping_group *decl_group;
11387 tree use_base;
11388 switch (access_kind)
11389 {
11390 case ACCESS_POINTER:
11391 case ACCESS_POINTER_OFFSET:
11392 use_base = addr_tokens[last_access]->expr;
11393 break;
11394 case ACCESS_REF_TO_POINTER:
11395 case ACCESS_REF_TO_POINTER_OFFSET:
11396 use_base
11397 = build_fold_indirect_ref (addr_tokens[last_access]->expr);
11398 break;
11399 default:
11400 use_base = addr_tokens[base_token]->expr;
11401 }
11402 bool mapped_to_p
11403 = omp_directive_maps_explicitly (grpmap: group_map, decl: use_base, base_group: &decl_group,
11404 to_specifically: true, allow_deleted: false, contained_in_struct: true);
11405 if (addr_tokens[base_token]->type == STRUCTURE_BASE
11406 && DECL_P (addr_tokens[last_access]->expr)
11407 && !mapped_to_p)
11408 mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
11409 : GOMP_MAP_FIRSTPRIVATE_POINTER;
11410 else
11411 mkind = GOMP_MAP_ATTACH_DETACH;
11412
11413 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
11414 /* If we have a reference to pointer base, we want to attach the
11415 pointer here, not the reference. The reference attachment happens
11416 elsewhere. */
11417 bool ref_to_ptr
11418 = (access_kind == ACCESS_REF_TO_POINTER
11419 || access_kind == ACCESS_REF_TO_POINTER_OFFSET);
11420 tree sdecl = addr_tokens[last_access]->expr;
11421 tree sdecl_ptr = ref_to_ptr ? build_fold_indirect_ref (sdecl)
11422 : sdecl;
11423 /* For the FIRSTPRIVATE_REFERENCE after the struct node, we
11424 want to use the reference itself for the decl, but we
11425 still want to use the pointer to calculate the bias. */
11426 OMP_CLAUSE_DECL (c2) = (mkind == GOMP_MAP_ATTACH_DETACH)
11427 ? sdecl_ptr : sdecl;
11428 sdecl = sdecl_ptr;
11429 tree baddr = build_fold_addr_expr (base);
11430 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
11431 ptrdiff_type_node, baddr);
11432 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
11433 ptrdiff_type_node, sdecl);
11434 OMP_CLAUSE_SIZE (c2)
11435 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
11436 ptrdiff_type_node, baddr, decladdr);
11437 /* Insert after struct node. */
11438 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
11439 OMP_CLAUSE_CHAIN (l) = c2;
11440
11441 if (addr_tokens[base_token]->type == STRUCTURE_BASE
11442 && (addr_tokens[base_token]->u.structure_base_kind
11443 == BASE_COMPONENT_EXPR)
11444 && mkind == GOMP_MAP_ATTACH_DETACH
11445 && addr_tokens[last_access]->u.access_kind != ACCESS_REF)
11446 {
11447 *inner = insert_node_pos;
11448 if (openmp)
11449 *fragile_p = true;
11450 return NULL;
11451 }
11452 }
11453
11454 if (addr_tokens[base_token]->type == STRUCTURE_BASE
11455 && (addr_tokens[base_token]->u.structure_base_kind
11456 == BASE_COMPONENT_EXPR)
11457 && addr_tokens[last_access]->u.access_kind == ACCESS_REF)
11458 *inner = insert_node_pos;
11459
11460 return NULL;
11461 }
11462 else if (struct_map_to_clause)
11463 {
11464 tree *osc = struct_map_to_clause->get (k: base);
11465 tree *sc = NULL, *scp = NULL;
11466 bool unordered = false;
11467
11468 if (osc && OMP_CLAUSE_MAP_KIND (*osc) == GOMP_MAP_STRUCT_UNORD)
11469 unordered = true;
11470
11471 unsigned HOST_WIDE_INT i, elems = tree_to_uhwi (OMP_CLAUSE_SIZE (*osc));
11472 sc = &OMP_CLAUSE_CHAIN (*osc);
11473 /* The struct mapping might be immediately followed by a
11474 FIRSTPRIVATE_POINTER, FIRSTPRIVATE_REFERENCE or an ATTACH_DETACH --
11475 if it's an indirect access or a reference, or if the structure base
11476 is not a decl. The FIRSTPRIVATE_* nodes are removed in omp-low.cc
11477 after they have been processed there, and ATTACH_DETACH nodes are
11478 recomputed and moved out of the GOMP_MAP_STRUCT construct once
11479 sibling list building is complete. */
11480 if (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
11481 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
11482 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_ATTACH_DETACH)
11483 sc = &OMP_CLAUSE_CHAIN (*sc);
11484 for (i = 0; i < elems; i++, sc = &OMP_CLAUSE_CHAIN (*sc))
11485 if (attach_detach && sc == grp_start_p)
11486 break;
11487 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
11488 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
11489 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
11490 break;
11491 else
11492 {
11493 tree sc_decl = OMP_CLAUSE_DECL (*sc);
11494 poly_offset_int offset;
11495 poly_int64 bitpos;
11496
11497 if (TREE_CODE (sc_decl) == ARRAY_REF)
11498 {
11499 while (TREE_CODE (sc_decl) == ARRAY_REF)
11500 sc_decl = TREE_OPERAND (sc_decl, 0);
11501 if (TREE_CODE (sc_decl) != COMPONENT_REF
11502 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
11503 break;
11504 }
11505 else if (INDIRECT_REF_P (sc_decl)
11506 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
11507 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
11508 == REFERENCE_TYPE))
11509 sc_decl = TREE_OPERAND (sc_decl, 0);
11510
11511 bool variable_offset2;
11512 tree base2 = extract_base_bit_offset (base: sc_decl, bitposp: &bitpos, poffsetp: &offset,
11513 variable_offset: &variable_offset2);
11514 if (!base2 || !operand_equal_p (base2, base, flags: 0))
11515 break;
11516 if (scp)
11517 continue;
11518 if (variable_offset2)
11519 {
11520 OMP_CLAUSE_SET_MAP_KIND (*osc, GOMP_MAP_STRUCT_UNORD);
11521 unordered = true;
11522 break;
11523 }
11524 else if ((region_type & ORT_ACC) != 0)
11525 {
11526 /* For OpenACC, allow (ignore) duplicate struct accesses in
11527 the middle of a mapping clause, e.g. "mystruct->foo" in:
11528 copy(mystruct->foo->bar) copy(mystruct->foo->qux). */
11529 if (reprocessing_struct
11530 && known_eq (coffset, offset)
11531 && known_eq (cbitpos, bitpos))
11532 return NULL;
11533 }
11534 else if (known_eq (coffset, offset)
11535 && known_eq (cbitpos, bitpos))
11536 {
11537 /* Having two struct members at the same offset doesn't work,
11538 so make sure we don't. (We're allowed to ignore this.
11539 Should we report the error?) */
11540 /*error_at (OMP_CLAUSE_LOCATION (grp_end),
11541 "duplicate struct member %qE in map clauses",
11542 OMP_CLAUSE_DECL (grp_end));*/
11543 return NULL;
11544 }
11545 if (maybe_lt (a: coffset, b: offset)
11546 || (known_eq (coffset, offset)
11547 && maybe_lt (a: cbitpos, b: bitpos)))
11548 {
11549 if (attach_detach)
11550 scp = sc;
11551 else
11552 break;
11553 }
11554 }
11555
11556 /* If this is an unordered struct, just insert the new element at the
11557 end of the list. */
11558 if (unordered)
11559 {
11560 for (; i < elems; i++)
11561 sc = &OMP_CLAUSE_CHAIN (*sc);
11562 scp = NULL;
11563 }
11564
11565 OMP_CLAUSE_SIZE (*osc)
11566 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
11567
11568 if (reprocessing_struct)
11569 {
11570 /* If we're reprocessing a struct node, we don't want to do most of
11571 the list manipulation below. We only need to handle the (pointer
11572 or reference) attach/detach case. */
11573 tree extra_node, alloc_node;
11574 if (has_descriptor)
11575 gcc_unreachable ();
11576 else if (attach_detach)
11577 alloc_node = build_omp_struct_comp_nodes (code, grp_start: *grp_start_p,
11578 grp_end, extra_node: &extra_node);
11579 else
11580 {
11581 /* If we don't have an attach/detach node, this is a
11582 "target data" directive or similar, not an offload region.
11583 Synthesize an "alloc" node using just the initiating
11584 GOMP_MAP_STRUCT decl. */
11585 gomp_map_kind k = (code == OMP_TARGET_EXIT_DATA
11586 || code == OACC_EXIT_DATA)
11587 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
11588 alloc_node
11589 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
11590 OMP_CLAUSE_MAP);
11591 OMP_CLAUSE_SET_MAP_KIND (alloc_node, k);
11592 OMP_CLAUSE_DECL (alloc_node) = unshare_expr (expr: last_token->expr);
11593 OMP_CLAUSE_SIZE (alloc_node)
11594 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node)));
11595 }
11596
11597 if (scp)
11598 omp_siblist_insert_node_after (newnode: alloc_node, insert_at: scp);
11599 else
11600 {
11601 tree *new_end = omp_siblist_insert_node_after (newnode: alloc_node, insert_at: sc);
11602 if (sc == *added_tail)
11603 *added_tail = new_end;
11604 }
11605
11606 return NULL;
11607 }
11608
11609 if (has_descriptor)
11610 {
11611 tree desc = OMP_CLAUSE_CHAIN (*grp_start_p);
11612 if (code == OMP_TARGET_EXIT_DATA
11613 || code == OACC_EXIT_DATA)
11614 OMP_CLAUSE_SET_MAP_KIND (desc, GOMP_MAP_RELEASE);
11615 omp_siblist_move_node_after (node: desc,
11616 old_pos: &OMP_CLAUSE_CHAIN (*grp_start_p),
11617 move_after: scp ? scp : sc);
11618 }
11619 else if (attach_detach)
11620 {
11621 tree cl = NULL_TREE, extra_node;
11622 tree alloc_node = build_omp_struct_comp_nodes (code, grp_start: *grp_start_p,
11623 grp_end, extra_node: &extra_node);
11624 tree *tail_chain = NULL;
11625
11626 if (*fragile_p
11627 || (openmp
11628 && !target
11629 && attach_detach
11630 && TREE_CODE (TREE_TYPE (ocd)) == POINTER_TYPE
11631 && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end)))
11632 {
11633 if (!lang_GNU_Fortran ())
11634 OMP_CLAUSE_SIZE (alloc_node) = size_zero_node;
11635 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node) = 1;
11636 }
11637
11638 /* Here, we have:
11639
11640 grp_end : the last (or only) node in this group.
11641 grp_start_p : pointer to the first node in a pointer mapping group
11642 up to and including GRP_END.
11643 sc : pointer to the chain for the end of the struct component
11644 list.
11645 scp : pointer to the chain for the sorted position at which we
11646 should insert in the middle of the struct component list
11647 (else NULL to insert at end).
11648 alloc_node : the "alloc" node for the structure (pointer-type)
11649 component. We insert at SCP (if present), else SC
11650 (the end of the struct component list).
11651 extra_node : a newly-synthesized node for an additional indirect
11652 pointer mapping or a Fortran pointer set, if needed.
11653 cl : first node to prepend before grp_start_p.
11654 tail_chain : pointer to chain of last prepended node.
11655
11656 The general idea is we move the nodes for this struct mapping
11657 together: the alloc node goes into the sorted list directly after
11658 the struct mapping, and any extra nodes (together with the nodes
11659 mapping arrays pointed to by struct components) get moved after
11660 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
11661 the end of the struct component mapping list. It's important that
11662 the alloc_node comes first in that case because it's part of the
11663 sorted component mapping list (but subsequent nodes are not!). */
11664
11665 if (scp)
11666 omp_siblist_insert_node_after (newnode: alloc_node, insert_at: scp);
11667
11668 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
11669 already inserted it) and the extra_node (if it is present). The
11670 list can be empty if we added alloc_node above and there is no
11671 extra node. */
11672 if (scp && extra_node)
11673 {
11674 cl = extra_node;
11675 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
11676 }
11677 else if (extra_node)
11678 {
11679 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
11680 cl = alloc_node;
11681 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
11682 }
11683 else if (!scp)
11684 {
11685 cl = alloc_node;
11686 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
11687 }
11688
11689 continue_at
11690 = cl ? omp_siblist_move_concat_nodes_after (first_new: cl, last_new_tail: tail_chain,
11691 first_ptr: grp_start_p, last_node: grp_end,
11692 move_after: sc)
11693 : omp_siblist_move_nodes_after (first_ptr: grp_start_p, last_node: grp_end, move_after: sc);
11694 }
11695 else if (*sc != grp_end)
11696 {
11697 gcc_assert (*grp_start_p == grp_end);
11698
11699 /* We are moving the current node back to a previous struct node:
11700 the node that used to point to the current node will now point to
11701 the next node. */
11702 continue_at = grp_start_p;
11703 /* In the non-pointer case, the mapping clause itself is moved into
11704 the correct position in the struct component list, which in this
11705 case is just SC. */
11706 omp_siblist_move_node_after (node: *grp_start_p, old_pos: grp_start_p, move_after: sc);
11707 }
11708 }
11709 return continue_at;
11710}
11711
11712/* Scan through GROUPS, and create sorted structure sibling lists without
11713 gimplifying. */
11714
11715static bool
11716omp_build_struct_sibling_lists (enum tree_code code,
11717 enum omp_region_type region_type,
11718 vec<omp_mapping_group> *groups,
11719 hash_map<tree_operand_hash_no_se,
11720 omp_mapping_group *> **grpmap,
11721 tree *list_p)
11722{
11723 using namespace omp_addr_tokenizer;
11724 unsigned i;
11725 omp_mapping_group *grp;
11726 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
11727 bool success = true;
11728 tree *new_next = NULL;
11729 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
11730 tree added_nodes = NULL_TREE;
11731 tree *added_tail = &added_nodes;
11732 auto_vec<omp_mapping_group> pre_hwm_groups;
11733
11734 FOR_EACH_VEC_ELT (*groups, i, grp)
11735 {
11736 tree c = grp->grp_end;
11737 tree decl = OMP_CLAUSE_DECL (c);
11738 tree grp_end = grp->grp_end;
11739 auto_vec<omp_addr_token *> addr_tokens;
11740 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
11741
11742 if (new_next && !grp->reprocess_struct)
11743 grp->grp_start = new_next;
11744
11745 new_next = NULL;
11746
11747 tree *grp_start_p = grp->grp_start;
11748
11749 if (DECL_P (decl))
11750 continue;
11751
11752 /* Skip groups we marked for deletion in
11753 {omp,oacc}_resolve_clause_dependencies. */
11754 if (grp->deleted)
11755 continue;
11756
11757 if (OMP_CLAUSE_CHAIN (*grp_start_p)
11758 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
11759 {
11760 /* Don't process an array descriptor that isn't inside a derived type
11761 as a struct (the GOMP_MAP_POINTER following will have the form
11762 "var.data", but such mappings are handled specially). */
11763 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
11764 if (omp_map_clause_descriptor_p (c: grpmid)
11765 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
11766 continue;
11767 }
11768
11769 tree expr = decl;
11770
11771 while (TREE_CODE (expr) == ARRAY_REF)
11772 expr = TREE_OPERAND (expr, 0);
11773
11774 if (!omp_parse_expr (addr_tokens, expr))
11775 continue;
11776
11777 omp_addr_token *last_token
11778 = omp_first_chained_access_token (addr_tokens);
11779
11780 /* A mapping of a reference to a pointer member that doesn't specify an
11781 array section, etc., like this:
11782 *mystruct.ref_to_ptr
11783 should not be processed by the struct sibling-list handling code --
11784 it just transfers the referenced pointer.
11785
11786 In contrast, the quite similar-looking construct:
11787 *mystruct.ptr
11788 which is equivalent to e.g.
11789 mystruct.ptr[0]
11790 *does* trigger sibling-list processing.
11791
11792 An exception for the former case is for "fragile" groups where the
11793 reference itself is not handled otherwise; this is subject to special
11794 handling in omp_accumulate_sibling_list also. */
11795
11796 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
11797 && last_token->type == ACCESS_METHOD
11798 && last_token->u.access_kind == ACCESS_REF
11799 && !grp->fragile)
11800 continue;
11801
11802 tree d = decl;
11803 if (TREE_CODE (d) == ARRAY_REF)
11804 {
11805 while (TREE_CODE (d) == ARRAY_REF)
11806 d = TREE_OPERAND (d, 0);
11807 if (TREE_CODE (d) == COMPONENT_REF
11808 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11809 decl = d;
11810 }
11811 if (d == decl
11812 && INDIRECT_REF_P (decl)
11813 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11814 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11815 == REFERENCE_TYPE)
11816 && (OMP_CLAUSE_MAP_KIND (c)
11817 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11818 decl = TREE_OPERAND (decl, 0);
11819
11820 STRIP_NOPS (decl);
11821
11822 if (TREE_CODE (decl) != COMPONENT_REF)
11823 continue;
11824
11825 /* If we're mapping the whole struct in another node, skip adding this
11826 node to a sibling list. */
11827 omp_mapping_group *wholestruct;
11828 if (omp_mapped_by_containing_struct (grpmap: *grpmap, OMP_CLAUSE_DECL (c),
11829 mapped_by_group: &wholestruct))
11830 continue;
11831
11832 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
11833 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
11834 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
11835 && code != OACC_UPDATE
11836 && code != OMP_TARGET_UPDATE)
11837 {
11838 if (error_operand_p (t: decl))
11839 {
11840 success = false;
11841 goto error_out;
11842 }
11843
11844 tree stype = TREE_TYPE (decl);
11845 if (TREE_CODE (stype) == REFERENCE_TYPE)
11846 stype = TREE_TYPE (stype);
11847 if (TYPE_SIZE_UNIT (stype) == NULL
11848 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
11849 {
11850 error_at (OMP_CLAUSE_LOCATION (c),
11851 "mapping field %qE of variable length "
11852 "structure", OMP_CLAUSE_DECL (c));
11853 success = false;
11854 goto error_out;
11855 }
11856
11857 tree *inner = NULL;
11858 bool fragile_p = grp->fragile;
11859
11860 new_next
11861 = omp_accumulate_sibling_list (region_type, code,
11862 struct_map_to_clause, group_map: *grpmap,
11863 grp_start_p, grp_end, addr_tokens,
11864 inner: &inner, fragile_p: &fragile_p,
11865 reprocessing_struct: grp->reprocess_struct, added_tail: &added_tail);
11866
11867 if (inner)
11868 {
11869 omp_mapping_group newgrp;
11870 newgrp.grp_start = inner;
11871 if (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (*inner))
11872 == GOMP_MAP_ATTACH_DETACH)
11873 newgrp.grp_end = OMP_CLAUSE_CHAIN (*inner);
11874 else
11875 newgrp.grp_end = *inner;
11876 newgrp.mark = UNVISITED;
11877 newgrp.sibling = NULL;
11878 newgrp.deleted = false;
11879 newgrp.reprocess_struct = true;
11880 newgrp.fragile = fragile_p;
11881 newgrp.next = NULL;
11882 groups->safe_push (obj: newgrp);
11883
11884 /* !!! Growing GROUPS might invalidate the pointers in the group
11885 map. Rebuild it here. This is a bit inefficient, but
11886 shouldn't happen very often. */
11887 delete (*grpmap);
11888 *grpmap
11889 = omp_reindex_mapping_groups (list_p, groups, processed_groups: &pre_hwm_groups,
11890 sentinel);
11891 }
11892 }
11893 }
11894
11895 /* Delete groups marked for deletion above. At this point the order of the
11896 groups may no longer correspond to the order of the underlying list,
11897 which complicates this a little. First clear out OMP_CLAUSE_DECL for
11898 deleted nodes... */
11899
11900 FOR_EACH_VEC_ELT (*groups, i, grp)
11901 if (grp->deleted)
11902 for (tree d = *grp->grp_start;
11903 d != OMP_CLAUSE_CHAIN (grp->grp_end);
11904 d = OMP_CLAUSE_CHAIN (d))
11905 OMP_CLAUSE_DECL (d) = NULL_TREE;
11906
11907 /* ...then sweep through the list removing the now-empty nodes. */
11908
11909 tail = list_p;
11910 while (*tail)
11911 {
11912 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
11913 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
11914 *tail = OMP_CLAUSE_CHAIN (*tail);
11915 else
11916 tail = &OMP_CLAUSE_CHAIN (*tail);
11917 }
11918
11919 /* Tack on the struct nodes added during nested struct reprocessing. */
11920 if (added_nodes)
11921 {
11922 *tail = added_nodes;
11923 tail = added_tail;
11924 }
11925
11926 /* Now we have finished building the struct sibling lists, reprocess
11927 newly-added "attach" nodes: we need the address of the first
11928 mapped element of each struct sibling list for the bias of the attach
11929 operation -- not necessarily the base address of the whole struct. */
11930 if (struct_map_to_clause)
11931 for (hash_map<tree_operand_hash, tree>::iterator iter
11932 = struct_map_to_clause->begin ();
11933 iter != struct_map_to_clause->end ();
11934 ++iter)
11935 {
11936 tree struct_node = (*iter).second;
11937 gcc_assert (OMP_CLAUSE_CODE (struct_node) == OMP_CLAUSE_MAP);
11938 tree attach = OMP_CLAUSE_CHAIN (struct_node);
11939
11940 if (OMP_CLAUSE_CODE (attach) != OMP_CLAUSE_MAP
11941 || OMP_CLAUSE_MAP_KIND (attach) != GOMP_MAP_ATTACH_DETACH)
11942 continue;
11943
11944 OMP_CLAUSE_SET_MAP_KIND (attach, GOMP_MAP_ATTACH);
11945
11946 /* Sanity check: the standalone attach node will not work if we have
11947 an "enter data" operation (because for those, variables need to be
11948 mapped separately and attach nodes must be grouped together with the
11949 base they attach to). We should only have created the
11950 ATTACH_DETACH node after GOMP_MAP_STRUCT for a target region, so
11951 this should never be true. */
11952 gcc_assert ((region_type & ORT_TARGET) != 0);
11953
11954 /* This is the first sorted node in the struct sibling list. Use it
11955 to recalculate the correct bias to use.
11956 (&first_node - attach_decl).
11957 For GOMP_MAP_STRUCT_UNORD, we need e.g. the
11958 min(min(min(first,second),third),fourth) element, because the
11959 elements aren't in any particular order. */
11960 tree lowest_addr;
11961 if (OMP_CLAUSE_MAP_KIND (struct_node) == GOMP_MAP_STRUCT_UNORD)
11962 {
11963 tree first_node = OMP_CLAUSE_CHAIN (attach);
11964 unsigned HOST_WIDE_INT num_mappings
11965 = tree_to_uhwi (OMP_CLAUSE_SIZE (struct_node));
11966 lowest_addr = OMP_CLAUSE_DECL (first_node);
11967 lowest_addr = build_fold_addr_expr (lowest_addr);
11968 lowest_addr = fold_convert (pointer_sized_int_node, lowest_addr);
11969 tree next_node = OMP_CLAUSE_CHAIN (first_node);
11970 while (num_mappings > 1)
11971 {
11972 tree tmp = OMP_CLAUSE_DECL (next_node);
11973 tmp = build_fold_addr_expr (tmp);
11974 tmp = fold_convert (pointer_sized_int_node, tmp);
11975 lowest_addr = fold_build2 (MIN_EXPR, pointer_sized_int_node,
11976 lowest_addr, tmp);
11977 next_node = OMP_CLAUSE_CHAIN (next_node);
11978 num_mappings--;
11979 }
11980 lowest_addr = fold_convert (ptrdiff_type_node, lowest_addr);
11981 }
11982 else
11983 {
11984 tree first_node = OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (attach));
11985 first_node = build_fold_addr_expr (first_node);
11986 lowest_addr = fold_convert (ptrdiff_type_node, first_node);
11987 }
11988 tree attach_decl = OMP_CLAUSE_DECL (attach);
11989 attach_decl = fold_convert (ptrdiff_type_node, attach_decl);
11990 OMP_CLAUSE_SIZE (attach)
11991 = fold_build2 (MINUS_EXPR, ptrdiff_type_node, lowest_addr,
11992 attach_decl);
11993
11994 /* Remove GOMP_MAP_ATTACH node from after struct node. */
11995 OMP_CLAUSE_CHAIN (struct_node) = OMP_CLAUSE_CHAIN (attach);
11996 /* ...and re-insert it at the end of our clause list. */
11997 *tail = attach;
11998 OMP_CLAUSE_CHAIN (attach) = NULL_TREE;
11999 tail = &OMP_CLAUSE_CHAIN (attach);
12000 }
12001
12002error_out:
12003 if (struct_map_to_clause)
12004 delete struct_map_to_clause;
12005
12006 return success;
12007}
12008
12009/* Scan the OMP clauses in *LIST_P, installing mappings into a new
12010 and previous omp contexts. */
12011
12012static void
12013gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
12014 enum omp_region_type region_type,
12015 enum tree_code code)
12016{
12017 using namespace omp_addr_tokenizer;
12018 struct gimplify_omp_ctx *ctx, *outer_ctx;
12019 tree c;
12020 tree *orig_list_p = list_p;
12021 int handled_depend_iterators = -1;
12022 int nowait = -1;
12023
12024 ctx = new_omp_context (region_type);
12025 ctx->code = code;
12026 outer_ctx = ctx->outer_context;
12027 if (code == OMP_TARGET)
12028 {
12029 if (!lang_GNU_Fortran ())
12030 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
12031 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
12032 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
12033 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
12034 }
12035 if (!lang_GNU_Fortran ())
12036 switch (code)
12037 {
12038 case OMP_TARGET:
12039 case OMP_TARGET_DATA:
12040 case OMP_TARGET_ENTER_DATA:
12041 case OMP_TARGET_EXIT_DATA:
12042 case OACC_DECLARE:
12043 case OACC_HOST_DATA:
12044 case OACC_PARALLEL:
12045 case OACC_KERNELS:
12046 ctx->target_firstprivatize_array_bases = true;
12047 default:
12048 break;
12049 }
12050
12051 vec<omp_mapping_group> *groups = NULL;
12052 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap = NULL;
12053 unsigned grpnum = 0;
12054 tree *grp_start_p = NULL, grp_end = NULL_TREE;
12055
12056 if (code == OMP_TARGET
12057 || code == OMP_TARGET_DATA
12058 || code == OMP_TARGET_ENTER_DATA
12059 || code == OMP_TARGET_EXIT_DATA
12060 || code == OACC_DATA
12061 || code == OACC_KERNELS
12062 || code == OACC_PARALLEL
12063 || code == OACC_SERIAL
12064 || code == OACC_ENTER_DATA
12065 || code == OACC_EXIT_DATA
12066 || code == OACC_UPDATE
12067 || code == OACC_DECLARE)
12068 {
12069 groups = omp_gather_mapping_groups (list_p);
12070
12071 if (groups)
12072 grpmap = omp_index_mapping_groups (groups);
12073 }
12074
12075 while ((c = *list_p) != NULL)
12076 {
12077 bool remove = false;
12078 bool notice_outer = true;
12079 bool map_descriptor;
12080 const char *check_non_private = NULL;
12081 unsigned int flags;
12082 tree decl;
12083 auto_vec<omp_addr_token *, 10> addr_tokens;
12084
12085 if (grp_end && c == OMP_CLAUSE_CHAIN (grp_end))
12086 {
12087 grp_start_p = NULL;
12088 grp_end = NULL_TREE;
12089 }
12090
12091 switch (OMP_CLAUSE_CODE (c))
12092 {
12093 case OMP_CLAUSE_PRIVATE:
12094 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
12095 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
12096 {
12097 flags |= GOVD_PRIVATE_OUTER_REF;
12098 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
12099 }
12100 else
12101 notice_outer = false;
12102 goto do_add;
12103 case OMP_CLAUSE_SHARED:
12104 flags = GOVD_SHARED | GOVD_EXPLICIT;
12105 goto do_add;
12106 case OMP_CLAUSE_FIRSTPRIVATE:
12107 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
12108 check_non_private = "firstprivate";
12109 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12110 {
12111 gcc_assert (code == OMP_TARGET);
12112 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
12113 }
12114 goto do_add;
12115 case OMP_CLAUSE_LASTPRIVATE:
12116 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12117 switch (code)
12118 {
12119 case OMP_DISTRIBUTE:
12120 error_at (OMP_CLAUSE_LOCATION (c),
12121 "conditional %<lastprivate%> clause on "
12122 "%qs construct", "distribute");
12123 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
12124 break;
12125 case OMP_TASKLOOP:
12126 error_at (OMP_CLAUSE_LOCATION (c),
12127 "conditional %<lastprivate%> clause on "
12128 "%qs construct", "taskloop");
12129 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
12130 break;
12131 default:
12132 break;
12133 }
12134 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
12135 if (code != OMP_LOOP)
12136 check_non_private = "lastprivate";
12137 decl = OMP_CLAUSE_DECL (c);
12138 if (error_operand_p (t: decl))
12139 goto do_add;
12140 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
12141 && !lang_hooks.decls.omp_scalar_p (decl, true))
12142 {
12143 error_at (OMP_CLAUSE_LOCATION (c),
12144 "non-scalar variable %qD in conditional "
12145 "%<lastprivate%> clause", decl);
12146 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
12147 }
12148 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12149 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
12150 omp_lastprivate_for_combined_outer_constructs (octx: outer_ctx, decl,
12151 implicit_p: false);
12152 goto do_add;
12153 case OMP_CLAUSE_REDUCTION:
12154 if (OMP_CLAUSE_REDUCTION_TASK (c))
12155 {
12156 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
12157 {
12158 if (nowait == -1)
12159 nowait = omp_find_clause (clauses: *list_p,
12160 kind: OMP_CLAUSE_NOWAIT) != NULL_TREE;
12161 if (nowait
12162 && (outer_ctx == NULL
12163 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
12164 {
12165 error_at (OMP_CLAUSE_LOCATION (c),
12166 "%<task%> reduction modifier on a construct "
12167 "with a %<nowait%> clause");
12168 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
12169 }
12170 }
12171 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
12172 {
12173 error_at (OMP_CLAUSE_LOCATION (c),
12174 "invalid %<task%> reduction modifier on construct "
12175 "other than %<parallel%>, %qs, %<sections%> or "
12176 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
12177 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
12178 }
12179 }
12180 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
12181 switch (code)
12182 {
12183 case OMP_SECTIONS:
12184 error_at (OMP_CLAUSE_LOCATION (c),
12185 "%<inscan%> %<reduction%> clause on "
12186 "%qs construct", "sections");
12187 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12188 break;
12189 case OMP_PARALLEL:
12190 error_at (OMP_CLAUSE_LOCATION (c),
12191 "%<inscan%> %<reduction%> clause on "
12192 "%qs construct", "parallel");
12193 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12194 break;
12195 case OMP_TEAMS:
12196 error_at (OMP_CLAUSE_LOCATION (c),
12197 "%<inscan%> %<reduction%> clause on "
12198 "%qs construct", "teams");
12199 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12200 break;
12201 case OMP_TASKLOOP:
12202 error_at (OMP_CLAUSE_LOCATION (c),
12203 "%<inscan%> %<reduction%> clause on "
12204 "%qs construct", "taskloop");
12205 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12206 break;
12207 case OMP_SCOPE:
12208 error_at (OMP_CLAUSE_LOCATION (c),
12209 "%<inscan%> %<reduction%> clause on "
12210 "%qs construct", "scope");
12211 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12212 break;
12213 default:
12214 break;
12215 }
12216 /* FALLTHRU */
12217 case OMP_CLAUSE_IN_REDUCTION:
12218 case OMP_CLAUSE_TASK_REDUCTION:
12219 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
12220 /* OpenACC permits reductions on private variables. */
12221 if (!(region_type & ORT_ACC)
12222 /* taskgroup is actually not a worksharing region. */
12223 && code != OMP_TASKGROUP)
12224 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
12225 decl = OMP_CLAUSE_DECL (c);
12226 if (TREE_CODE (decl) == MEM_REF)
12227 {
12228 tree type = TREE_TYPE (decl);
12229 bool saved_into_ssa = gimplify_ctxp->into_ssa;
12230 gimplify_ctxp->into_ssa = false;
12231 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
12232 NULL, is_gimple_val, fb_rvalue, false)
12233 == GS_ERROR)
12234 {
12235 gimplify_ctxp->into_ssa = saved_into_ssa;
12236 remove = true;
12237 break;
12238 }
12239 gimplify_ctxp->into_ssa = saved_into_ssa;
12240 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12241 if (DECL_P (v))
12242 {
12243 omp_firstprivatize_variable (ctx, decl: v);
12244 omp_notice_variable (ctx, decl: v, in_code: true);
12245 }
12246 decl = TREE_OPERAND (decl, 0);
12247 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12248 {
12249 gimplify_ctxp->into_ssa = false;
12250 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
12251 NULL, is_gimple_val, fb_rvalue, false)
12252 == GS_ERROR)
12253 {
12254 gimplify_ctxp->into_ssa = saved_into_ssa;
12255 remove = true;
12256 break;
12257 }
12258 gimplify_ctxp->into_ssa = saved_into_ssa;
12259 v = TREE_OPERAND (decl, 1);
12260 if (DECL_P (v))
12261 {
12262 omp_firstprivatize_variable (ctx, decl: v);
12263 omp_notice_variable (ctx, decl: v, in_code: true);
12264 }
12265 decl = TREE_OPERAND (decl, 0);
12266 }
12267 if (TREE_CODE (decl) == ADDR_EXPR
12268 || TREE_CODE (decl) == INDIRECT_REF)
12269 decl = TREE_OPERAND (decl, 0);
12270 }
12271 goto do_add_decl;
12272 case OMP_CLAUSE_LINEAR:
12273 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
12274 is_gimple_val, fb_rvalue) == GS_ERROR)
12275 {
12276 remove = true;
12277 break;
12278 }
12279 else
12280 {
12281 if (code == OMP_SIMD
12282 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
12283 {
12284 struct gimplify_omp_ctx *octx = outer_ctx;
12285 if (octx
12286 && octx->region_type == ORT_WORKSHARE
12287 && octx->combined_loop
12288 && !octx->distribute)
12289 {
12290 if (octx->outer_context
12291 && (octx->outer_context->region_type
12292 == ORT_COMBINED_PARALLEL))
12293 octx = octx->outer_context->outer_context;
12294 else
12295 octx = octx->outer_context;
12296 }
12297 if (octx
12298 && octx->region_type == ORT_WORKSHARE
12299 && octx->combined_loop
12300 && octx->distribute)
12301 {
12302 error_at (OMP_CLAUSE_LOCATION (c),
12303 "%<linear%> clause for variable other than "
12304 "loop iterator specified on construct "
12305 "combined with %<distribute%>");
12306 remove = true;
12307 break;
12308 }
12309 }
12310 /* For combined #pragma omp parallel for simd, need to put
12311 lastprivate and perhaps firstprivate too on the
12312 parallel. Similarly for #pragma omp for simd. */
12313 struct gimplify_omp_ctx *octx = outer_ctx;
12314 bool taskloop_seen = false;
12315 decl = NULL_TREE;
12316 do
12317 {
12318 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
12319 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12320 break;
12321 decl = OMP_CLAUSE_DECL (c);
12322 if (error_operand_p (t: decl))
12323 {
12324 decl = NULL_TREE;
12325 break;
12326 }
12327 flags = GOVD_SEEN;
12328 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
12329 flags |= GOVD_FIRSTPRIVATE;
12330 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12331 flags |= GOVD_LASTPRIVATE;
12332 if (octx
12333 && octx->region_type == ORT_WORKSHARE
12334 && octx->combined_loop)
12335 {
12336 if (octx->outer_context
12337 && (octx->outer_context->region_type
12338 == ORT_COMBINED_PARALLEL))
12339 octx = octx->outer_context;
12340 else if (omp_check_private (ctx: octx, decl, copyprivate: false))
12341 break;
12342 }
12343 else if (octx
12344 && (octx->region_type & ORT_TASK) != 0
12345 && octx->combined_loop)
12346 taskloop_seen = true;
12347 else if (octx
12348 && octx->region_type == ORT_COMBINED_PARALLEL
12349 && ((ctx->region_type == ORT_WORKSHARE
12350 && octx == outer_ctx)
12351 || taskloop_seen))
12352 flags = GOVD_SEEN | GOVD_SHARED;
12353 else if (octx
12354 && ((octx->region_type & ORT_COMBINED_TEAMS)
12355 == ORT_COMBINED_TEAMS))
12356 flags = GOVD_SEEN | GOVD_SHARED;
12357 else if (octx
12358 && octx->region_type == ORT_COMBINED_TARGET)
12359 {
12360 if (flags & GOVD_LASTPRIVATE)
12361 flags = GOVD_SEEN | GOVD_MAP;
12362 }
12363 else
12364 break;
12365 splay_tree_node on
12366 = splay_tree_lookup (octx->variables,
12367 (splay_tree_key) decl);
12368 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
12369 {
12370 octx = NULL;
12371 break;
12372 }
12373 omp_add_variable (ctx: octx, decl, flags);
12374 if (octx->outer_context == NULL)
12375 break;
12376 octx = octx->outer_context;
12377 }
12378 while (1);
12379 if (octx
12380 && decl
12381 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
12382 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12383 omp_notice_variable (ctx: octx, decl, in_code: true);
12384 }
12385 flags = GOVD_LINEAR | GOVD_EXPLICIT;
12386 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
12387 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12388 {
12389 notice_outer = false;
12390 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12391 }
12392 goto do_add;
12393
12394 case OMP_CLAUSE_MAP:
12395 if (!grp_start_p)
12396 {
12397 grp_start_p = list_p;
12398 grp_end = (*groups)[grpnum].grp_end;
12399 grpnum++;
12400 }
12401 decl = OMP_CLAUSE_DECL (c);
12402
12403 if (error_operand_p (t: decl))
12404 {
12405 remove = true;
12406 break;
12407 }
12408
12409 if (!omp_parse_expr (addr_tokens, decl))
12410 {
12411 remove = true;
12412 break;
12413 }
12414
12415 if (remove)
12416 break;
12417 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
12418 {
12419 struct gimplify_omp_ctx *octx;
12420 for (octx = outer_ctx; octx; octx = octx->outer_context)
12421 {
12422 if (octx->region_type != ORT_ACC_HOST_DATA)
12423 break;
12424 splay_tree_node n2
12425 = splay_tree_lookup (octx->variables,
12426 (splay_tree_key) decl);
12427 if (n2)
12428 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
12429 "declared in enclosing %<host_data%> region",
12430 DECL_NAME (decl));
12431 }
12432 }
12433
12434 map_descriptor = false;
12435
12436 /* This condition checks if we're mapping an array descriptor that
12437 isn't inside a derived type -- these have special handling, and
12438 are not handled as structs in omp_build_struct_sibling_lists.
12439 See that function for further details. */
12440 if (*grp_start_p != grp_end
12441 && OMP_CLAUSE_CHAIN (*grp_start_p)
12442 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
12443 {
12444 tree grp_mid = OMP_CLAUSE_CHAIN (*grp_start_p);
12445 if (omp_map_clause_descriptor_p (c: grp_mid)
12446 && DECL_P (OMP_CLAUSE_DECL (grp_mid)))
12447 map_descriptor = true;
12448 }
12449 else if (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP
12450 && (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_RELEASE
12451 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DELETE)
12452 && OMP_CLAUSE_RELEASE_DESCRIPTOR (grp_end))
12453 map_descriptor = true;
12454
12455 /* Adding the decl for a struct access: we haven't created
12456 GOMP_MAP_STRUCT nodes yet, so this statement needs to predict
12457 whether they will be created in gimplify_adjust_omp_clauses.
12458 NOTE: Technically we should probably look through DECL_VALUE_EXPR
12459 here because something that looks like a DECL_P may actually be a
12460 struct access, e.g. variables in a lambda closure
12461 (__closure->__foo) or class members (this->foo). Currently in both
12462 those cases we map the whole of the containing object (directly in
12463 the C++ FE) though, so struct nodes are not created. */
12464 if (c == grp_end
12465 && addr_tokens[0]->type == STRUCTURE_BASE
12466 && addr_tokens[0]->u.structure_base_kind == BASE_DECL
12467 && !map_descriptor)
12468 {
12469 gcc_assert (addr_tokens[1]->type == ACCESS_METHOD);
12470 /* If we got to this struct via a chain of pointers, maybe we
12471 want to map it implicitly instead. */
12472 if (omp_access_chain_p (addr_tokens, 1))
12473 break;
12474 omp_mapping_group *wholestruct;
12475 if (!(region_type & ORT_ACC)
12476 && omp_mapped_by_containing_struct (grpmap,
12477 OMP_CLAUSE_DECL (c),
12478 mapped_by_group: &wholestruct))
12479 break;
12480 decl = addr_tokens[1]->expr;
12481 if (splay_tree_lookup (ctx->variables, (splay_tree_key) decl))
12482 break;
12483 /* Standalone attach or detach clauses for a struct element
12484 should not inhibit implicit mapping of the whole struct. */
12485 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12486 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12487 break;
12488 flags = GOVD_MAP | GOVD_EXPLICIT;
12489
12490 gcc_assert (addr_tokens[1]->u.access_kind != ACCESS_DIRECT
12491 || TREE_ADDRESSABLE (decl));
12492 goto do_add_decl;
12493 }
12494
12495 if (!DECL_P (decl))
12496 {
12497 tree d = decl, *pd;
12498 if (TREE_CODE (d) == ARRAY_REF)
12499 {
12500 while (TREE_CODE (d) == ARRAY_REF)
12501 d = TREE_OPERAND (d, 0);
12502 if (TREE_CODE (d) == COMPONENT_REF
12503 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
12504 decl = d;
12505 }
12506 pd = &OMP_CLAUSE_DECL (c);
12507 if (d == decl
12508 && TREE_CODE (decl) == INDIRECT_REF
12509 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
12510 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
12511 == REFERENCE_TYPE)
12512 && (OMP_CLAUSE_MAP_KIND (c)
12513 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
12514 {
12515 pd = &TREE_OPERAND (decl, 0);
12516 decl = TREE_OPERAND (decl, 0);
12517 }
12518
12519 if (addr_tokens[0]->type == STRUCTURE_BASE
12520 && addr_tokens[0]->u.structure_base_kind == BASE_DECL
12521 && addr_tokens[1]->type == ACCESS_METHOD
12522 && (addr_tokens[1]->u.access_kind == ACCESS_POINTER
12523 || (addr_tokens[1]->u.access_kind
12524 == ACCESS_POINTER_OFFSET))
12525 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)))
12526 {
12527 tree base = addr_tokens[1]->expr;
12528 splay_tree_node n
12529 = splay_tree_lookup (ctx->variables,
12530 (splay_tree_key) base);
12531 n->value |= GOVD_SEEN;
12532 }
12533
12534 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12535 {
12536 /* Don't gimplify *pd fully at this point, as the base
12537 will need to be adjusted during omp lowering. */
12538 auto_vec<tree, 10> expr_stack;
12539 tree *p = pd;
12540 while (handled_component_p (t: *p)
12541 || TREE_CODE (*p) == INDIRECT_REF
12542 || TREE_CODE (*p) == ADDR_EXPR
12543 || TREE_CODE (*p) == MEM_REF
12544 || TREE_CODE (*p) == NON_LVALUE_EXPR)
12545 {
12546 expr_stack.safe_push (obj: *p);
12547 p = &TREE_OPERAND (*p, 0);
12548 }
12549 for (int i = expr_stack.length () - 1; i >= 0; i--)
12550 {
12551 tree t = expr_stack[i];
12552 if (TREE_CODE (t) == ARRAY_REF
12553 || TREE_CODE (t) == ARRAY_RANGE_REF)
12554 {
12555 if (TREE_OPERAND (t, 2) == NULL_TREE)
12556 {
12557 tree low = unshare_expr (expr: array_ref_low_bound (t));
12558 if (!is_gimple_min_invariant (low))
12559 {
12560 TREE_OPERAND (t, 2) = low;
12561 if (gimplify_expr (&TREE_OPERAND (t, 2),
12562 pre_p, NULL,
12563 is_gimple_reg,
12564 fb_rvalue) == GS_ERROR)
12565 remove = true;
12566 }
12567 }
12568 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
12569 NULL, is_gimple_reg,
12570 fb_rvalue) == GS_ERROR)
12571 remove = true;
12572 if (TREE_OPERAND (t, 3) == NULL_TREE)
12573 {
12574 tree elmt_size = array_ref_element_size (t);
12575 if (!is_gimple_min_invariant (elmt_size))
12576 {
12577 elmt_size = unshare_expr (expr: elmt_size);
12578 tree elmt_type
12579 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
12580 0)));
12581 tree factor
12582 = size_int (TYPE_ALIGN_UNIT (elmt_type));
12583 elmt_size
12584 = size_binop (EXACT_DIV_EXPR, elmt_size,
12585 factor);
12586 TREE_OPERAND (t, 3) = elmt_size;
12587 if (gimplify_expr (&TREE_OPERAND (t, 3),
12588 pre_p, NULL,
12589 is_gimple_reg,
12590 fb_rvalue) == GS_ERROR)
12591 remove = true;
12592 }
12593 }
12594 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
12595 NULL, is_gimple_reg,
12596 fb_rvalue) == GS_ERROR)
12597 remove = true;
12598 }
12599 else if (TREE_CODE (t) == COMPONENT_REF)
12600 {
12601 if (TREE_OPERAND (t, 2) == NULL_TREE)
12602 {
12603 tree offset = component_ref_field_offset (t);
12604 if (!is_gimple_min_invariant (offset))
12605 {
12606 offset = unshare_expr (expr: offset);
12607 tree field = TREE_OPERAND (t, 1);
12608 tree factor
12609 = size_int (DECL_OFFSET_ALIGN (field)
12610 / BITS_PER_UNIT);
12611 offset = size_binop (EXACT_DIV_EXPR, offset,
12612 factor);
12613 TREE_OPERAND (t, 2) = offset;
12614 if (gimplify_expr (&TREE_OPERAND (t, 2),
12615 pre_p, NULL,
12616 is_gimple_reg,
12617 fb_rvalue) == GS_ERROR)
12618 remove = true;
12619 }
12620 }
12621 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
12622 NULL, is_gimple_reg,
12623 fb_rvalue) == GS_ERROR)
12624 remove = true;
12625 }
12626 }
12627 for (; expr_stack.length () > 0; )
12628 {
12629 tree t = expr_stack.pop ();
12630
12631 if (TREE_CODE (t) == ARRAY_REF
12632 || TREE_CODE (t) == ARRAY_RANGE_REF)
12633 {
12634 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
12635 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
12636 NULL, is_gimple_val,
12637 fb_rvalue) == GS_ERROR)
12638 remove = true;
12639 }
12640 }
12641 }
12642 break;
12643 }
12644
12645 if ((code == OMP_TARGET
12646 || code == OMP_TARGET_DATA
12647 || code == OMP_TARGET_ENTER_DATA
12648 || code == OMP_TARGET_EXIT_DATA)
12649 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
12650 {
12651 /* If we have attach/detach but the decl we have is a pointer to
12652 pointer, we're probably mapping the "base level" array
12653 implicitly. Make sure we don't add the decl as if we mapped
12654 it explicitly. That is,
12655
12656 int **arr;
12657 [...]
12658 #pragma omp target map(arr[a][b:c])
12659
12660 should *not* map "arr" explicitly. That way we get a
12661 zero-length "alloc" mapping for it, and assuming it's been
12662 mapped by some previous directive, etc., things work as they
12663 should. */
12664
12665 tree basetype = TREE_TYPE (addr_tokens[0]->expr);
12666
12667 if (TREE_CODE (basetype) == REFERENCE_TYPE)
12668 basetype = TREE_TYPE (basetype);
12669
12670 if (code == OMP_TARGET
12671 && addr_tokens[0]->type == ARRAY_BASE
12672 && addr_tokens[0]->u.structure_base_kind == BASE_DECL
12673 && TREE_CODE (basetype) == POINTER_TYPE
12674 && TREE_CODE (TREE_TYPE (basetype)) == POINTER_TYPE)
12675 break;
12676 }
12677
12678 flags = GOVD_MAP | GOVD_EXPLICIT;
12679 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
12680 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM
12681 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TO
12682 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TOFROM)
12683 flags |= GOVD_MAP_ALWAYS_TO;
12684
12685 goto do_add;
12686
12687 case OMP_CLAUSE_AFFINITY:
12688 gimplify_omp_affinity (list_p, pre_p);
12689 remove = true;
12690 break;
12691 case OMP_CLAUSE_DOACROSS:
12692 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
12693 {
12694 tree deps = OMP_CLAUSE_DECL (c);
12695 while (deps && TREE_CODE (deps) == TREE_LIST)
12696 {
12697 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
12698 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
12699 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
12700 pre_p, NULL, is_gimple_val, fb_rvalue);
12701 deps = TREE_CHAIN (deps);
12702 }
12703 }
12704 else
12705 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
12706 == OMP_CLAUSE_DOACROSS_SOURCE);
12707 break;
12708 case OMP_CLAUSE_DEPEND:
12709 if (handled_depend_iterators == -1)
12710 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
12711 if (handled_depend_iterators)
12712 {
12713 if (handled_depend_iterators == 2)
12714 remove = true;
12715 break;
12716 }
12717 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
12718 {
12719 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
12720 NULL, is_gimple_val, fb_rvalue);
12721 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
12722 }
12723 if (error_operand_p (OMP_CLAUSE_DECL (c)))
12724 {
12725 remove = true;
12726 break;
12727 }
12728 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
12729 {
12730 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
12731 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
12732 is_gimple_val, fb_rvalue) == GS_ERROR)
12733 {
12734 remove = true;
12735 break;
12736 }
12737 }
12738 if (code == OMP_TASK)
12739 ctx->has_depend = true;
12740 break;
12741
12742 case OMP_CLAUSE_TO:
12743 case OMP_CLAUSE_FROM:
12744 case OMP_CLAUSE__CACHE_:
12745 decl = OMP_CLAUSE_DECL (c);
12746 if (error_operand_p (t: decl))
12747 {
12748 remove = true;
12749 break;
12750 }
12751 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12752 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
12753 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
12754 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
12755 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
12756 {
12757 remove = true;
12758 break;
12759 }
12760 if (!DECL_P (decl))
12761 {
12762 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
12763 NULL, is_gimple_lvalue, fb_lvalue)
12764 == GS_ERROR)
12765 {
12766 remove = true;
12767 break;
12768 }
12769 break;
12770 }
12771 goto do_notice;
12772
12773 case OMP_CLAUSE_USE_DEVICE_PTR:
12774 case OMP_CLAUSE_USE_DEVICE_ADDR:
12775 flags = GOVD_EXPLICIT;
12776 goto do_add;
12777
12778 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12779 decl = OMP_CLAUSE_DECL (c);
12780 while (TREE_CODE (decl) == INDIRECT_REF
12781 || TREE_CODE (decl) == ARRAY_REF)
12782 decl = TREE_OPERAND (decl, 0);
12783 flags = GOVD_EXPLICIT;
12784 goto do_add_decl;
12785
12786 case OMP_CLAUSE_IS_DEVICE_PTR:
12787 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
12788 goto do_add;
12789
12790 do_add:
12791 decl = OMP_CLAUSE_DECL (c);
12792 do_add_decl:
12793 if (error_operand_p (t: decl))
12794 {
12795 remove = true;
12796 break;
12797 }
12798 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
12799 {
12800 tree t = omp_member_access_dummy_var (decl);
12801 if (t)
12802 {
12803 tree v = DECL_VALUE_EXPR (decl);
12804 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
12805 if (outer_ctx)
12806 omp_notice_variable (ctx: outer_ctx, decl: t, in_code: true);
12807 }
12808 }
12809 if (code == OACC_DATA
12810 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12811 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
12812 flags |= GOVD_MAP_0LEN_ARRAY;
12813 omp_add_variable (ctx, decl, flags);
12814 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12815 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
12816 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
12817 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
12818 {
12819 struct gimplify_omp_ctx *pctx
12820 = code == OMP_TARGET ? outer_ctx : ctx;
12821 if (pctx)
12822 omp_add_variable (ctx: pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
12823 flags: GOVD_LOCAL | GOVD_SEEN);
12824 if (pctx
12825 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
12826 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
12827 find_decl_expr,
12828 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
12829 NULL) == NULL_TREE)
12830 omp_add_variable (ctx: pctx,
12831 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
12832 flags: GOVD_LOCAL | GOVD_SEEN);
12833 gimplify_omp_ctxp = pctx;
12834 push_gimplify_context ();
12835
12836 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
12837 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
12838
12839 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
12840 seq_p: &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
12841 pop_gimplify_context
12842 (body: gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
12843 push_gimplify_context ();
12844 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
12845 seq_p: &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
12846 pop_gimplify_context
12847 (body: gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
12848 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
12849 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
12850
12851 gimplify_omp_ctxp = outer_ctx;
12852 }
12853 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12854 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
12855 {
12856 gimplify_omp_ctxp = ctx;
12857 push_gimplify_context ();
12858 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
12859 {
12860 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
12861 NULL, NULL);
12862 TREE_SIDE_EFFECTS (bind) = 1;
12863 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
12864 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
12865 }
12866 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
12867 seq_p: &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
12868 pop_gimplify_context
12869 (body: gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
12870 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
12871
12872 gimplify_omp_ctxp = outer_ctx;
12873 }
12874 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12875 && OMP_CLAUSE_LINEAR_STMT (c))
12876 {
12877 gimplify_omp_ctxp = ctx;
12878 push_gimplify_context ();
12879 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
12880 {
12881 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
12882 NULL, NULL);
12883 TREE_SIDE_EFFECTS (bind) = 1;
12884 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
12885 OMP_CLAUSE_LINEAR_STMT (c) = bind;
12886 }
12887 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
12888 seq_p: &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
12889 pop_gimplify_context
12890 (body: gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
12891 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
12892
12893 gimplify_omp_ctxp = outer_ctx;
12894 }
12895 if (notice_outer)
12896 goto do_notice;
12897 break;
12898
12899 case OMP_CLAUSE_COPYIN:
12900 case OMP_CLAUSE_COPYPRIVATE:
12901 decl = OMP_CLAUSE_DECL (c);
12902 if (error_operand_p (t: decl))
12903 {
12904 remove = true;
12905 break;
12906 }
12907 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
12908 && !remove
12909 && !omp_check_private (ctx, decl, copyprivate: true))
12910 {
12911 remove = true;
12912 if (is_global_var (t: decl))
12913 {
12914 if (DECL_THREAD_LOCAL_P (decl))
12915 remove = false;
12916 else if (DECL_HAS_VALUE_EXPR_P (decl))
12917 {
12918 tree value = get_base_address (DECL_VALUE_EXPR (decl));
12919
12920 if (value
12921 && DECL_P (value)
12922 && DECL_THREAD_LOCAL_P (value))
12923 remove = false;
12924 }
12925 }
12926 if (remove)
12927 error_at (OMP_CLAUSE_LOCATION (c),
12928 "copyprivate variable %qE is not threadprivate"
12929 " or private in outer context", DECL_NAME (decl));
12930 }
12931 do_notice:
12932 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12933 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
12934 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
12935 && outer_ctx
12936 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
12937 || (region_type == ORT_WORKSHARE
12938 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12939 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
12940 || code == OMP_LOOP)))
12941 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
12942 || (code == OMP_LOOP
12943 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12944 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
12945 == ORT_COMBINED_TEAMS))))
12946 {
12947 splay_tree_node on
12948 = splay_tree_lookup (outer_ctx->variables,
12949 (splay_tree_key)decl);
12950 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
12951 {
12952 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12953 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
12954 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
12955 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12956 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
12957 == POINTER_TYPE))))
12958 omp_firstprivatize_variable (ctx: outer_ctx, decl);
12959 else
12960 {
12961 omp_add_variable (ctx: outer_ctx, decl,
12962 flags: GOVD_SEEN | GOVD_SHARED);
12963 if (outer_ctx->outer_context)
12964 omp_notice_variable (ctx: outer_ctx->outer_context, decl,
12965 in_code: true);
12966 }
12967 }
12968 }
12969 if (outer_ctx)
12970 omp_notice_variable (ctx: outer_ctx, decl, in_code: true);
12971 if (check_non_private
12972 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
12973 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
12974 || decl == OMP_CLAUSE_DECL (c)
12975 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
12976 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12977 == ADDR_EXPR
12978 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12979 == POINTER_PLUS_EXPR
12980 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
12981 (OMP_CLAUSE_DECL (c), 0), 0))
12982 == ADDR_EXPR)))))
12983 && omp_check_private (ctx, decl, copyprivate: false))
12984 {
12985 error ("%s variable %qE is private in outer context",
12986 check_non_private, DECL_NAME (decl));
12987 remove = true;
12988 }
12989 break;
12990
12991 case OMP_CLAUSE_DETACH:
12992 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
12993 goto do_add;
12994
12995 case OMP_CLAUSE_IF:
12996 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
12997 && OMP_CLAUSE_IF_MODIFIER (c) != code)
12998 {
12999 const char *p[2];
13000 for (int i = 0; i < 2; i++)
13001 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
13002 {
13003 case VOID_CST: p[i] = "cancel"; break;
13004 case OMP_PARALLEL: p[i] = "parallel"; break;
13005 case OMP_SIMD: p[i] = "simd"; break;
13006 case OMP_TASK: p[i] = "task"; break;
13007 case OMP_TASKLOOP: p[i] = "taskloop"; break;
13008 case OMP_TARGET_DATA: p[i] = "target data"; break;
13009 case OMP_TARGET: p[i] = "target"; break;
13010 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
13011 case OMP_TARGET_ENTER_DATA:
13012 p[i] = "target enter data"; break;
13013 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
13014 default: gcc_unreachable ();
13015 }
13016 error_at (OMP_CLAUSE_LOCATION (c),
13017 "expected %qs %<if%> clause modifier rather than %qs",
13018 p[0], p[1]);
13019 remove = true;
13020 }
13021 /* Fall through. */
13022
13023 case OMP_CLAUSE_SELF:
13024 case OMP_CLAUSE_FINAL:
13025 OMP_CLAUSE_OPERAND (c, 0)
13026 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
13027 /* Fall through. */
13028
13029 case OMP_CLAUSE_NUM_TEAMS:
13030 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
13031 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
13032 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
13033 {
13034 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
13035 {
13036 remove = true;
13037 break;
13038 }
13039 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
13040 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
13041 pre_p, NULL, allow_ssa: true);
13042 }
13043 /* Fall through. */
13044
13045 case OMP_CLAUSE_SCHEDULE:
13046 case OMP_CLAUSE_NUM_THREADS:
13047 case OMP_CLAUSE_THREAD_LIMIT:
13048 case OMP_CLAUSE_DIST_SCHEDULE:
13049 case OMP_CLAUSE_DEVICE:
13050 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
13051 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
13052 {
13053 if (code != OMP_TARGET)
13054 {
13055 error_at (OMP_CLAUSE_LOCATION (c),
13056 "%<device%> clause with %<ancestor%> is only "
13057 "allowed on %<target%> construct");
13058 remove = true;
13059 break;
13060 }
13061
13062 tree clauses = *orig_list_p;
13063 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
13064 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
13065 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
13066 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
13067 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
13068 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
13069 )
13070 {
13071 error_at (OMP_CLAUSE_LOCATION (c),
13072 "with %<ancestor%>, only the %<device%>, "
13073 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
13074 "and %<map%> clauses may appear on the "
13075 "construct");
13076 remove = true;
13077 break;
13078 }
13079 }
13080 /* Fall through. */
13081
13082 case OMP_CLAUSE_PRIORITY:
13083 case OMP_CLAUSE_GRAINSIZE:
13084 case OMP_CLAUSE_NUM_TASKS:
13085 case OMP_CLAUSE_FILTER:
13086 case OMP_CLAUSE_HINT:
13087 case OMP_CLAUSE_ASYNC:
13088 case OMP_CLAUSE_WAIT:
13089 case OMP_CLAUSE_NUM_GANGS:
13090 case OMP_CLAUSE_NUM_WORKERS:
13091 case OMP_CLAUSE_VECTOR_LENGTH:
13092 case OMP_CLAUSE_WORKER:
13093 case OMP_CLAUSE_VECTOR:
13094 if (OMP_CLAUSE_OPERAND (c, 0)
13095 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
13096 {
13097 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
13098 {
13099 remove = true;
13100 break;
13101 }
13102 /* All these clauses care about value, not a particular decl,
13103 so try to force it into a SSA_NAME or fresh temporary. */
13104 OMP_CLAUSE_OPERAND (c, 0)
13105 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
13106 pre_p, NULL, allow_ssa: true);
13107 }
13108 break;
13109
13110 case OMP_CLAUSE_GANG:
13111 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
13112 is_gimple_val, fb_rvalue) == GS_ERROR)
13113 remove = true;
13114 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
13115 is_gimple_val, fb_rvalue) == GS_ERROR)
13116 remove = true;
13117 break;
13118
13119 case OMP_CLAUSE_NOWAIT:
13120 nowait = 1;
13121 break;
13122
13123 case OMP_CLAUSE_ORDERED:
13124 case OMP_CLAUSE_UNTIED:
13125 case OMP_CLAUSE_COLLAPSE:
13126 case OMP_CLAUSE_TILE:
13127 case OMP_CLAUSE_AUTO:
13128 case OMP_CLAUSE_SEQ:
13129 case OMP_CLAUSE_INDEPENDENT:
13130 case OMP_CLAUSE_MERGEABLE:
13131 case OMP_CLAUSE_PROC_BIND:
13132 case OMP_CLAUSE_SAFELEN:
13133 case OMP_CLAUSE_SIMDLEN:
13134 case OMP_CLAUSE_NOGROUP:
13135 case OMP_CLAUSE_THREADS:
13136 case OMP_CLAUSE_SIMD:
13137 case OMP_CLAUSE_BIND:
13138 case OMP_CLAUSE_IF_PRESENT:
13139 case OMP_CLAUSE_FINALIZE:
13140 break;
13141
13142 case OMP_CLAUSE_ORDER:
13143 ctx->order_concurrent = true;
13144 break;
13145
13146 case OMP_CLAUSE_DEFAULTMAP:
13147 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
13148 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
13149 {
13150 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
13151 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL:
13152 gdmkmin = GDMK_SCALAR;
13153 gdmkmax = GDMK_POINTER;
13154 break;
13155 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
13156 gdmkmin = GDMK_SCALAR;
13157 gdmkmax = GDMK_SCALAR_TARGET;
13158 break;
13159 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
13160 gdmkmin = gdmkmax = GDMK_AGGREGATE;
13161 break;
13162 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
13163 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
13164 break;
13165 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
13166 gdmkmin = gdmkmax = GDMK_POINTER;
13167 break;
13168 default:
13169 gcc_unreachable ();
13170 }
13171 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
13172 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
13173 {
13174 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
13175 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
13176 break;
13177 case OMP_CLAUSE_DEFAULTMAP_TO:
13178 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
13179 break;
13180 case OMP_CLAUSE_DEFAULTMAP_FROM:
13181 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
13182 break;
13183 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
13184 ctx->defaultmap[gdmk] = GOVD_MAP;
13185 break;
13186 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
13187 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
13188 break;
13189 case OMP_CLAUSE_DEFAULTMAP_NONE:
13190 ctx->defaultmap[gdmk] = 0;
13191 break;
13192 case OMP_CLAUSE_DEFAULTMAP_PRESENT:
13193 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
13194 break;
13195 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
13196 switch (gdmk)
13197 {
13198 case GDMK_SCALAR:
13199 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
13200 break;
13201 case GDMK_SCALAR_TARGET:
13202 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
13203 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
13204 break;
13205 case GDMK_AGGREGATE:
13206 case GDMK_ALLOCATABLE:
13207 ctx->defaultmap[gdmk] = GOVD_MAP;
13208 break;
13209 case GDMK_POINTER:
13210 ctx->defaultmap[gdmk] = GOVD_MAP;
13211 if (!lang_GNU_Fortran ())
13212 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
13213 break;
13214 default:
13215 gcc_unreachable ();
13216 }
13217 break;
13218 default:
13219 gcc_unreachable ();
13220 }
13221 break;
13222
13223 case OMP_CLAUSE_ALIGNED:
13224 decl = OMP_CLAUSE_DECL (c);
13225 if (error_operand_p (t: decl))
13226 {
13227 remove = true;
13228 break;
13229 }
13230 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
13231 is_gimple_val, fb_rvalue) == GS_ERROR)
13232 {
13233 remove = true;
13234 break;
13235 }
13236 if (!is_global_var (t: decl)
13237 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
13238 omp_add_variable (ctx, decl, flags: GOVD_ALIGNED);
13239 break;
13240
13241 case OMP_CLAUSE_NONTEMPORAL:
13242 decl = OMP_CLAUSE_DECL (c);
13243 if (error_operand_p (t: decl))
13244 {
13245 remove = true;
13246 break;
13247 }
13248 omp_add_variable (ctx, decl, flags: GOVD_NONTEMPORAL);
13249 break;
13250
13251 case OMP_CLAUSE_ALLOCATE:
13252 decl = OMP_CLAUSE_DECL (c);
13253 if (error_operand_p (t: decl))
13254 {
13255 remove = true;
13256 break;
13257 }
13258 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
13259 is_gimple_val, fb_rvalue) == GS_ERROR)
13260 {
13261 remove = true;
13262 break;
13263 }
13264 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
13265 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
13266 == INTEGER_CST))
13267 ;
13268 else if (code == OMP_TASKLOOP
13269 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
13270 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13271 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13272 pre_p, NULL, allow_ssa: false);
13273 break;
13274
13275 case OMP_CLAUSE_DEFAULT:
13276 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
13277 break;
13278
13279 case OMP_CLAUSE_INCLUSIVE:
13280 case OMP_CLAUSE_EXCLUSIVE:
13281 decl = OMP_CLAUSE_DECL (c);
13282 {
13283 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
13284 (splay_tree_key) decl);
13285 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
13286 {
13287 error_at (OMP_CLAUSE_LOCATION (c),
13288 "%qD specified in %qs clause but not in %<inscan%> "
13289 "%<reduction%> clause on the containing construct",
13290 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
13291 remove = true;
13292 }
13293 else
13294 {
13295 n->value |= GOVD_REDUCTION_INSCAN;
13296 if (outer_ctx->region_type == ORT_SIMD
13297 && outer_ctx->outer_context
13298 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
13299 {
13300 n = splay_tree_lookup (outer_ctx->outer_context->variables,
13301 (splay_tree_key) decl);
13302 if (n && (n->value & GOVD_REDUCTION) != 0)
13303 n->value |= GOVD_REDUCTION_INSCAN;
13304 }
13305 }
13306 }
13307 break;
13308
13309 case OMP_CLAUSE_NOHOST:
13310 default:
13311 gcc_unreachable ();
13312 }
13313
13314 if (code == OACC_DATA
13315 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13316 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13317 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
13318 remove = true;
13319 if (remove)
13320 *list_p = OMP_CLAUSE_CHAIN (c);
13321 else
13322 list_p = &OMP_CLAUSE_CHAIN (c);
13323 }
13324
13325 if (groups)
13326 {
13327 delete grpmap;
13328 delete groups;
13329 }
13330
13331 ctx->clauses = *orig_list_p;
13332 gimplify_omp_ctxp = ctx;
13333}
13334
13335/* Return true if DECL is a candidate for shared to firstprivate
13336 optimization. We only consider non-addressable scalars, not
13337 too big, and not references. */
13338
13339static bool
13340omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
13341{
13342 if (TREE_ADDRESSABLE (decl))
13343 return false;
13344 tree type = TREE_TYPE (decl);
13345 if (!is_gimple_reg_type (type)
13346 || TREE_CODE (type) == REFERENCE_TYPE
13347 || TREE_ADDRESSABLE (type))
13348 return false;
13349 /* Don't optimize too large decls, as each thread/task will have
13350 its own. */
13351 HOST_WIDE_INT len = int_size_in_bytes (type);
13352 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
13353 return false;
13354 if (omp_privatize_by_reference (decl))
13355 return false;
13356 return true;
13357}
13358
13359/* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
13360 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
13361 GOVD_WRITTEN in outer contexts. */
13362
13363static void
13364omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
13365{
13366 for (; ctx; ctx = ctx->outer_context)
13367 {
13368 splay_tree_node n = splay_tree_lookup (ctx->variables,
13369 (splay_tree_key) decl);
13370 if (n == NULL)
13371 continue;
13372 else if (n->value & GOVD_SHARED)
13373 {
13374 n->value |= GOVD_WRITTEN;
13375 return;
13376 }
13377 else if (n->value & GOVD_DATA_SHARE_CLASS)
13378 return;
13379 }
13380}
13381
13382/* Helper callback for walk_gimple_seq to discover possible stores
13383 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
13384 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
13385 for those. */
13386
13387static tree
13388omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
13389{
13390 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13391
13392 *walk_subtrees = 0;
13393 if (!wi->is_lhs)
13394 return NULL_TREE;
13395
13396 tree op = *tp;
13397 do
13398 {
13399 if (handled_component_p (t: op))
13400 op = TREE_OPERAND (op, 0);
13401 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
13402 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
13403 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
13404 else
13405 break;
13406 }
13407 while (1);
13408 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (decl: op))
13409 return NULL_TREE;
13410
13411 omp_mark_stores (ctx: gimplify_omp_ctxp, decl: op);
13412 return NULL_TREE;
13413}
13414
13415/* Helper callback for walk_gimple_seq to discover possible stores
13416 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
13417 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
13418 for those. */
13419
13420static tree
13421omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
13422 bool *handled_ops_p,
13423 struct walk_stmt_info *wi)
13424{
13425 gimple *stmt = gsi_stmt (i: *gsi_p);
13426 switch (gimple_code (g: stmt))
13427 {
13428 /* Don't recurse on OpenMP constructs for which
13429 gimplify_adjust_omp_clauses already handled the bodies,
13430 except handle gimple_omp_for_pre_body. */
13431 case GIMPLE_OMP_FOR:
13432 *handled_ops_p = true;
13433 if (gimple_omp_for_pre_body (gs: stmt))
13434 walk_gimple_seq (gimple_omp_for_pre_body (gs: stmt),
13435 omp_find_stores_stmt, omp_find_stores_op, wi);
13436 break;
13437 case GIMPLE_OMP_PARALLEL:
13438 case GIMPLE_OMP_TASK:
13439 case GIMPLE_OMP_SECTIONS:
13440 case GIMPLE_OMP_SINGLE:
13441 case GIMPLE_OMP_SCOPE:
13442 case GIMPLE_OMP_TARGET:
13443 case GIMPLE_OMP_TEAMS:
13444 case GIMPLE_OMP_CRITICAL:
13445 *handled_ops_p = true;
13446 break;
13447 default:
13448 break;
13449 }
13450 return NULL_TREE;
13451}
13452
13453struct gimplify_adjust_omp_clauses_data
13454{
13455 tree *list_p;
13456 gimple_seq *pre_p;
13457};
13458
13459/* For all variables that were not actually used within the context,
13460 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
13461
13462static int
13463gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
13464{
13465 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
13466 gimple_seq *pre_p
13467 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
13468 tree decl = (tree) n->key;
13469 unsigned flags = n->value;
13470 enum omp_clause_code code;
13471 tree clause;
13472 bool private_debug;
13473
13474 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
13475 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
13476 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
13477 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
13478 return 0;
13479 if ((flags & GOVD_SEEN) == 0)
13480 return 0;
13481 if (flags & GOVD_DEBUG_PRIVATE)
13482 {
13483 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
13484 private_debug = true;
13485 }
13486 else if (flags & GOVD_MAP)
13487 private_debug = false;
13488 else
13489 private_debug
13490 = lang_hooks.decls.omp_private_debug_clause (decl,
13491 !!(flags & GOVD_SHARED));
13492 if (private_debug)
13493 code = OMP_CLAUSE_PRIVATE;
13494 else if (flags & GOVD_MAP)
13495 {
13496 code = OMP_CLAUSE_MAP;
13497 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
13498 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
13499 {
13500 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
13501 return 0;
13502 }
13503 if (VAR_P (decl)
13504 && DECL_IN_CONSTANT_POOL (decl)
13505 && !lookup_attribute (attr_name: "omp declare target",
13506 DECL_ATTRIBUTES (decl)))
13507 {
13508 tree id = get_identifier ("omp declare target");
13509 DECL_ATTRIBUTES (decl)
13510 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13511 varpool_node *node = varpool_node::get (decl);
13512 if (node)
13513 {
13514 node->offloadable = 1;
13515 if (ENABLE_OFFLOADING)
13516 g->have_offload = true;
13517 }
13518 }
13519 }
13520 else if (flags & GOVD_SHARED)
13521 {
13522 if (is_global_var (t: decl))
13523 {
13524 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
13525 while (ctx != NULL)
13526 {
13527 splay_tree_node on
13528 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13529 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
13530 | GOVD_PRIVATE | GOVD_REDUCTION
13531 | GOVD_LINEAR | GOVD_MAP)) != 0)
13532 break;
13533 ctx = ctx->outer_context;
13534 }
13535 if (ctx == NULL)
13536 return 0;
13537 }
13538 code = OMP_CLAUSE_SHARED;
13539 /* Don't optimize shared into firstprivate for read-only vars
13540 on tasks with depend clause, we shouldn't try to copy them
13541 until the dependencies are satisfied. */
13542 if (gimplify_omp_ctxp->has_depend)
13543 flags |= GOVD_WRITTEN;
13544 }
13545 else if (flags & GOVD_PRIVATE)
13546 code = OMP_CLAUSE_PRIVATE;
13547 else if (flags & GOVD_FIRSTPRIVATE)
13548 {
13549 code = OMP_CLAUSE_FIRSTPRIVATE;
13550 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
13551 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
13552 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
13553 {
13554 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
13555 "%<target%> construct", decl);
13556 return 0;
13557 }
13558 }
13559 else if (flags & GOVD_LASTPRIVATE)
13560 code = OMP_CLAUSE_LASTPRIVATE;
13561 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
13562 return 0;
13563 else if (flags & GOVD_CONDTEMP)
13564 {
13565 code = OMP_CLAUSE__CONDTEMP_;
13566 gimple_add_tmp_var (tmp: decl);
13567 }
13568 else
13569 gcc_unreachable ();
13570
13571 if (((flags & GOVD_LASTPRIVATE)
13572 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
13573 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13574 omp_mark_stores (ctx: gimplify_omp_ctxp->outer_context, decl);
13575
13576 tree chain = *list_p;
13577 clause = build_omp_clause (input_location, code);
13578 OMP_CLAUSE_DECL (clause) = decl;
13579 OMP_CLAUSE_CHAIN (clause) = chain;
13580 if (private_debug)
13581 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
13582 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
13583 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
13584 else if (code == OMP_CLAUSE_SHARED
13585 && (flags & GOVD_WRITTEN) == 0
13586 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13587 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
13588 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
13589 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
13590 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
13591 {
13592 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
13593 OMP_CLAUSE_DECL (nc) = decl;
13594 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
13595 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
13596 OMP_CLAUSE_DECL (clause)
13597 = build_fold_indirect_ref_loc (input_location, decl);
13598 OMP_CLAUSE_DECL (clause)
13599 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
13600 build_int_cst (build_pointer_type (char_type_node), 0));
13601 OMP_CLAUSE_SIZE (clause) = size_zero_node;
13602 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13603 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
13604 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
13605 tree dtype = TREE_TYPE (decl);
13606 if (TREE_CODE (dtype) == REFERENCE_TYPE)
13607 dtype = TREE_TYPE (dtype);
13608 /* FIRSTPRIVATE_POINTER doesn't work well if we have a
13609 multiply-indirected pointer. If we have a reference to a pointer to
13610 a pointer, it's possible that this should really be
13611 GOMP_MAP_FIRSTPRIVATE_REFERENCE -- but that also doesn't work at the
13612 moment, so stick with this. (See PR113279 and testcases
13613 baseptrs-{4,6}.C:ref2ptrptr_offset_decl_member_slice). */
13614 if (TREE_CODE (dtype) == POINTER_TYPE
13615 && TREE_CODE (TREE_TYPE (dtype)) == POINTER_TYPE)
13616 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
13617 else
13618 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
13619 OMP_CLAUSE_CHAIN (nc) = chain;
13620 OMP_CLAUSE_CHAIN (clause) = nc;
13621 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13622 gimplify_omp_ctxp = ctx->outer_context;
13623 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
13624 pre_p, NULL, is_gimple_val, fb_rvalue);
13625 gimplify_omp_ctxp = ctx;
13626 }
13627 else if (code == OMP_CLAUSE_MAP)
13628 {
13629 int kind;
13630 /* Not all combinations of these GOVD_MAP flags are actually valid. */
13631 switch (flags & (GOVD_MAP_TO_ONLY
13632 | GOVD_MAP_FORCE
13633 | GOVD_MAP_FORCE_PRESENT
13634 | GOVD_MAP_ALLOC_ONLY
13635 | GOVD_MAP_FROM_ONLY))
13636 {
13637 case 0:
13638 kind = GOMP_MAP_TOFROM;
13639 break;
13640 case GOVD_MAP_FORCE:
13641 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
13642 break;
13643 case GOVD_MAP_TO_ONLY:
13644 kind = GOMP_MAP_TO;
13645 break;
13646 case GOVD_MAP_FROM_ONLY:
13647 kind = GOMP_MAP_FROM;
13648 break;
13649 case GOVD_MAP_ALLOC_ONLY:
13650 kind = GOMP_MAP_ALLOC;
13651 break;
13652 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
13653 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
13654 break;
13655 case GOVD_MAP_FORCE_PRESENT:
13656 kind = GOMP_MAP_FORCE_PRESENT;
13657 break;
13658 case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
13659 kind = GOMP_MAP_FORCE_PRESENT;
13660 break;
13661 default:
13662 gcc_unreachable ();
13663 }
13664 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
13665 /* Setting of the implicit flag for the runtime is currently disabled for
13666 OpenACC. */
13667 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13668 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
13669 if (DECL_SIZE (decl)
13670 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
13671 {
13672 tree decl2 = DECL_VALUE_EXPR (decl);
13673 gcc_assert (INDIRECT_REF_P (decl2));
13674 decl2 = TREE_OPERAND (decl2, 0);
13675 gcc_assert (DECL_P (decl2));
13676 tree mem = build_simple_mem_ref (decl2);
13677 OMP_CLAUSE_DECL (clause) = mem;
13678 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
13679 if (gimplify_omp_ctxp->outer_context)
13680 {
13681 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
13682 omp_notice_variable (ctx, decl: decl2, in_code: true);
13683 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), in_code: true);
13684 }
13685 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
13686 OMP_CLAUSE_MAP);
13687 OMP_CLAUSE_DECL (nc) = decl;
13688 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13689 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
13690 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
13691 else
13692 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
13693 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
13694 OMP_CLAUSE_CHAIN (clause) = nc;
13695 }
13696 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
13697 && omp_privatize_by_reference (decl))
13698 {
13699 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
13700 OMP_CLAUSE_SIZE (clause)
13701 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
13702 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13703 gimplify_omp_ctxp = ctx->outer_context;
13704 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
13705 pre_p, NULL, is_gimple_val, fb_rvalue);
13706 gimplify_omp_ctxp = ctx;
13707 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
13708 OMP_CLAUSE_MAP);
13709 OMP_CLAUSE_DECL (nc) = decl;
13710 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13711 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
13712 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
13713 OMP_CLAUSE_CHAIN (clause) = nc;
13714 }
13715 else
13716 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
13717 }
13718 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
13719 {
13720 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
13721 OMP_CLAUSE_DECL (nc) = decl;
13722 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
13723 OMP_CLAUSE_CHAIN (nc) = chain;
13724 OMP_CLAUSE_CHAIN (clause) = nc;
13725 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13726 gimplify_omp_ctxp = ctx->outer_context;
13727 lang_hooks.decls.omp_finish_clause (nc, pre_p,
13728 (ctx->region_type & ORT_ACC) != 0);
13729 gimplify_omp_ctxp = ctx;
13730 }
13731 *list_p = clause;
13732 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13733 gimplify_omp_ctxp = ctx->outer_context;
13734 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
13735 in simd. Those are only added for the local vars inside of simd body
13736 and they don't need to be e.g. default constructible. */
13737 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
13738 lang_hooks.decls.omp_finish_clause (clause, pre_p,
13739 (ctx->region_type & ORT_ACC) != 0);
13740 if (gimplify_omp_ctxp)
13741 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
13742 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
13743 && DECL_P (OMP_CLAUSE_SIZE (clause)))
13744 omp_notice_variable (ctx: gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
13745 in_code: true);
13746 gimplify_omp_ctxp = ctx;
13747 return 0;
13748}
13749
13750static void
13751gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
13752 enum tree_code code)
13753{
13754 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13755 tree *orig_list_p = list_p;
13756 tree c, decl;
13757 bool has_inscan_reductions = false;
13758
13759 if (body)
13760 {
13761 struct gimplify_omp_ctx *octx;
13762 for (octx = ctx; octx; octx = octx->outer_context)
13763 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
13764 break;
13765 if (octx)
13766 {
13767 struct walk_stmt_info wi;
13768 memset (s: &wi, c: 0, n: sizeof (wi));
13769 walk_gimple_seq (body, omp_find_stores_stmt,
13770 omp_find_stores_op, &wi);
13771 }
13772 }
13773
13774 if (ctx->add_safelen1)
13775 {
13776 /* If there are VLAs in the body of simd loop, prevent
13777 vectorization. */
13778 gcc_assert (ctx->region_type == ORT_SIMD);
13779 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
13780 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
13781 OMP_CLAUSE_CHAIN (c) = *list_p;
13782 *list_p = c;
13783 list_p = &OMP_CLAUSE_CHAIN (c);
13784 }
13785
13786 if (ctx->region_type == ORT_WORKSHARE
13787 && ctx->outer_context
13788 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
13789 {
13790 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
13791 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13792 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
13793 {
13794 decl = OMP_CLAUSE_DECL (c);
13795 splay_tree_node n
13796 = splay_tree_lookup (ctx->outer_context->variables,
13797 (splay_tree_key) decl);
13798 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
13799 (splay_tree_key) decl));
13800 omp_add_variable (ctx, decl, flags: n->value);
13801 tree c2 = copy_node (c);
13802 OMP_CLAUSE_CHAIN (c2) = *list_p;
13803 *list_p = c2;
13804 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
13805 continue;
13806 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13807 OMP_CLAUSE_FIRSTPRIVATE);
13808 OMP_CLAUSE_DECL (c2) = decl;
13809 OMP_CLAUSE_CHAIN (c2) = *list_p;
13810 *list_p = c2;
13811 }
13812 }
13813
13814 if (code == OMP_TARGET
13815 || code == OMP_TARGET_DATA
13816 || code == OMP_TARGET_ENTER_DATA
13817 || code == OMP_TARGET_EXIT_DATA)
13818 {
13819 vec<omp_mapping_group> *groups;
13820 groups = omp_gather_mapping_groups (list_p);
13821 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap = NULL;
13822
13823 if (groups)
13824 {
13825 grpmap = omp_index_mapping_groups (groups);
13826
13827 omp_resolve_clause_dependencies (code, groups, grpmap);
13828 omp_build_struct_sibling_lists (code, region_type: ctx->region_type, groups,
13829 grpmap: &grpmap, list_p);
13830
13831 omp_mapping_group *outlist = NULL;
13832
13833 delete grpmap;
13834 delete groups;
13835
13836 /* Rebuild now we have struct sibling lists. */
13837 groups = omp_gather_mapping_groups (list_p);
13838 grpmap = omp_index_mapping_groups (groups);
13839
13840 bool enter_exit = (code == OMP_TARGET_ENTER_DATA
13841 || code == OMP_TARGET_EXIT_DATA);
13842
13843 outlist = omp_tsort_mapping_groups (groups, grpmap, enter_exit_data: enter_exit);
13844 outlist = omp_segregate_mapping_groups (inlist: outlist);
13845 list_p = omp_reorder_mapping_groups (groups, head: outlist, list_p);
13846
13847 delete grpmap;
13848 delete groups;
13849 }
13850 }
13851 else if (ctx->region_type & ORT_ACC)
13852 {
13853 vec<omp_mapping_group> *groups;
13854 groups = omp_gather_mapping_groups (list_p);
13855 if (groups)
13856 {
13857 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
13858 grpmap = omp_index_mapping_groups (groups);
13859
13860 oacc_resolve_clause_dependencies (groups, grpmap);
13861 omp_build_struct_sibling_lists (code, region_type: ctx->region_type, groups,
13862 grpmap: &grpmap, list_p);
13863
13864 delete groups;
13865 delete grpmap;
13866 }
13867 }
13868
13869 tree attach_list = NULL_TREE;
13870 tree *attach_tail = &attach_list;
13871
13872 tree *grp_start_p = NULL, grp_end = NULL_TREE;
13873
13874 while ((c = *list_p) != NULL)
13875 {
13876 splay_tree_node n;
13877 bool remove = false;
13878 bool move_attach = false;
13879
13880 if (grp_end && c == OMP_CLAUSE_CHAIN (grp_end))
13881 grp_end = NULL_TREE;
13882
13883 switch (OMP_CLAUSE_CODE (c))
13884 {
13885 case OMP_CLAUSE_FIRSTPRIVATE:
13886 if ((ctx->region_type & ORT_TARGET)
13887 && (ctx->region_type & ORT_ACC) == 0
13888 && TYPE_ATOMIC (strip_array_types
13889 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
13890 {
13891 error_at (OMP_CLAUSE_LOCATION (c),
13892 "%<_Atomic%> %qD in %<firstprivate%> clause on "
13893 "%<target%> construct", OMP_CLAUSE_DECL (c));
13894 remove = true;
13895 break;
13896 }
13897 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13898 {
13899 decl = OMP_CLAUSE_DECL (c);
13900 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13901 if ((n->value & GOVD_MAP) != 0)
13902 {
13903 remove = true;
13904 break;
13905 }
13906 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
13907 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
13908 }
13909 /* FALLTHRU */
13910 case OMP_CLAUSE_PRIVATE:
13911 case OMP_CLAUSE_SHARED:
13912 case OMP_CLAUSE_LINEAR:
13913 decl = OMP_CLAUSE_DECL (c);
13914 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13915 remove = !(n->value & GOVD_SEEN);
13916 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
13917 && code == OMP_PARALLEL
13918 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13919 remove = true;
13920 if (! remove)
13921 {
13922 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
13923 if ((n->value & GOVD_DEBUG_PRIVATE)
13924 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
13925 {
13926 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
13927 || ((n->value & GOVD_DATA_SHARE_CLASS)
13928 == GOVD_SHARED));
13929 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
13930 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
13931 }
13932 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
13933 && ctx->has_depend
13934 && DECL_P (decl))
13935 n->value |= GOVD_WRITTEN;
13936 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
13937 && (n->value & GOVD_WRITTEN) == 0
13938 && DECL_P (decl)
13939 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13940 OMP_CLAUSE_SHARED_READONLY (c) = 1;
13941 else if (DECL_P (decl)
13942 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
13943 && (n->value & GOVD_WRITTEN) != 0)
13944 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13945 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
13946 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13947 omp_mark_stores (ctx: gimplify_omp_ctxp->outer_context, decl);
13948 }
13949 else
13950 n->value &= ~GOVD_EXPLICIT;
13951 break;
13952
13953 case OMP_CLAUSE_LASTPRIVATE:
13954 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
13955 accurately reflect the presence of a FIRSTPRIVATE clause. */
13956 decl = OMP_CLAUSE_DECL (c);
13957 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13958 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
13959 = (n->value & GOVD_FIRSTPRIVATE) != 0;
13960 if (code == OMP_DISTRIBUTE
13961 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
13962 {
13963 remove = true;
13964 error_at (OMP_CLAUSE_LOCATION (c),
13965 "same variable used in %<firstprivate%> and "
13966 "%<lastprivate%> clauses on %<distribute%> "
13967 "construct");
13968 }
13969 if (!remove
13970 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13971 && DECL_P (decl)
13972 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13973 omp_mark_stores (ctx: gimplify_omp_ctxp->outer_context, decl);
13974 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
13975 remove = true;
13976 break;
13977
13978 case OMP_CLAUSE_ALIGNED:
13979 decl = OMP_CLAUSE_DECL (c);
13980 if (!is_global_var (t: decl))
13981 {
13982 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13983 remove = n == NULL || !(n->value & GOVD_SEEN);
13984 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
13985 {
13986 struct gimplify_omp_ctx *octx;
13987 if (n != NULL
13988 && (n->value & (GOVD_DATA_SHARE_CLASS
13989 & ~GOVD_FIRSTPRIVATE)))
13990 remove = true;
13991 else
13992 for (octx = ctx->outer_context; octx;
13993 octx = octx->outer_context)
13994 {
13995 n = splay_tree_lookup (octx->variables,
13996 (splay_tree_key) decl);
13997 if (n == NULL)
13998 continue;
13999 if (n->value & GOVD_LOCAL)
14000 break;
14001 /* We have to avoid assigning a shared variable
14002 to itself when trying to add
14003 __builtin_assume_aligned. */
14004 if (n->value & GOVD_SHARED)
14005 {
14006 remove = true;
14007 break;
14008 }
14009 }
14010 }
14011 }
14012 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
14013 {
14014 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14015 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14016 remove = true;
14017 }
14018 break;
14019
14020 case OMP_CLAUSE_HAS_DEVICE_ADDR:
14021 decl = OMP_CLAUSE_DECL (c);
14022 while (INDIRECT_REF_P (decl)
14023 || TREE_CODE (decl) == ARRAY_REF)
14024 decl = TREE_OPERAND (decl, 0);
14025 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14026 remove = n == NULL || !(n->value & GOVD_SEEN);
14027 break;
14028
14029 case OMP_CLAUSE_IS_DEVICE_PTR:
14030 case OMP_CLAUSE_NONTEMPORAL:
14031 decl = OMP_CLAUSE_DECL (c);
14032 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14033 remove = n == NULL || !(n->value & GOVD_SEEN);
14034 break;
14035
14036 case OMP_CLAUSE_MAP:
14037 decl = OMP_CLAUSE_DECL (c);
14038 if (!grp_end)
14039 {
14040 grp_start_p = list_p;
14041 grp_end = *omp_group_last (start_p: grp_start_p);
14042 }
14043 switch (OMP_CLAUSE_MAP_KIND (c))
14044 {
14045 case GOMP_MAP_PRESENT_ALLOC:
14046 case GOMP_MAP_PRESENT_TO:
14047 case GOMP_MAP_PRESENT_FROM:
14048 case GOMP_MAP_PRESENT_TOFROM:
14049 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_PRESENT);
14050 break;
14051 default:
14052 break;
14053 }
14054 switch (code)
14055 {
14056 case OACC_DATA:
14057 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
14058 break;
14059 /* Fallthrough. */
14060 case OACC_HOST_DATA:
14061 case OACC_ENTER_DATA:
14062 case OACC_EXIT_DATA:
14063 case OMP_TARGET_DATA:
14064 case OMP_TARGET_ENTER_DATA:
14065 case OMP_TARGET_EXIT_DATA:
14066 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
14067 || (OMP_CLAUSE_MAP_KIND (c)
14068 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
14069 /* For target {,enter ,exit }data only the array slice is
14070 mapped, but not the pointer to it. */
14071 remove = true;
14072 if (code == OMP_TARGET_EXIT_DATA
14073 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
14074 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER))
14075 remove = true;
14076 break;
14077 case OMP_TARGET:
14078 break;
14079 default:
14080 break;
14081 }
14082 if (remove)
14083 break;
14084 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
14085 {
14086 /* Sanity check: attach/detach map kinds use the size as a bias,
14087 and it's never right to use the decl size for such
14088 mappings. */
14089 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
14090 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
14091 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DETACH
14092 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
14093 && (OMP_CLAUSE_MAP_KIND (c)
14094 != GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
14095 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
14096 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
14097 }
14098 gimplify_omp_ctxp = ctx->outer_context;
14099 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, NULL,
14100 is_gimple_val, fb_rvalue) == GS_ERROR)
14101 {
14102 gimplify_omp_ctxp = ctx;
14103 remove = true;
14104 break;
14105 }
14106 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
14107 || (OMP_CLAUSE_MAP_KIND (c)
14108 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
14109 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14110 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
14111 {
14112 OMP_CLAUSE_SIZE (c)
14113 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
14114 allow_ssa: false);
14115 if ((ctx->region_type & ORT_TARGET) != 0)
14116 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
14117 flags: GOVD_FIRSTPRIVATE | GOVD_SEEN);
14118 }
14119 gimplify_omp_ctxp = ctx;
14120 /* Data clauses associated with reductions must be
14121 compatible with present_or_copy. Warn and adjust the clause
14122 if that is not the case. */
14123 if (ctx->region_type == ORT_ACC_PARALLEL
14124 || ctx->region_type == ORT_ACC_SERIAL)
14125 {
14126 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
14127 n = NULL;
14128
14129 if (DECL_P (t))
14130 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
14131
14132 if (n && (n->value & GOVD_REDUCTION))
14133 {
14134 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
14135
14136 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
14137 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
14138 && kind != GOMP_MAP_FORCE_PRESENT
14139 && kind != GOMP_MAP_POINTER)
14140 {
14141 warning_at (OMP_CLAUSE_LOCATION (c), 0,
14142 "incompatible data clause with reduction "
14143 "on %qE; promoting to %<present_or_copy%>",
14144 DECL_NAME (t));
14145 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
14146 }
14147 }
14148 }
14149 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
14150 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
14151 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
14152 {
14153 remove = true;
14154 break;
14155 }
14156 /* If we have a DECL_VALUE_EXPR (e.g. this is a class member and/or
14157 a variable captured in a lambda closure), look through that now
14158 before the DECL_P check below. (A code other than COMPONENT_REF,
14159 i.e. INDIRECT_REF, will be a VLA/variable-length array
14160 section. A global var may be a variable in a common block. We
14161 don't want to do this here for either of those.) */
14162 if ((ctx->region_type & ORT_ACC) == 0
14163 && DECL_P (decl)
14164 && !is_global_var (t: decl)
14165 && DECL_HAS_VALUE_EXPR_P (decl)
14166 && TREE_CODE (DECL_VALUE_EXPR (decl)) == COMPONENT_REF)
14167 decl = OMP_CLAUSE_DECL (c) = DECL_VALUE_EXPR (decl);
14168 if (TREE_CODE (decl) == TARGET_EXPR)
14169 {
14170 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
14171 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
14172 remove = true;
14173 }
14174 else if (!DECL_P (decl))
14175 {
14176 if ((ctx->region_type & ORT_TARGET) != 0
14177 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
14178 {
14179 if (INDIRECT_REF_P (decl)
14180 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
14181 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
14182 == REFERENCE_TYPE))
14183 decl = TREE_OPERAND (decl, 0);
14184 if (TREE_CODE (decl) == COMPONENT_REF)
14185 {
14186 while (TREE_CODE (decl) == COMPONENT_REF)
14187 decl = TREE_OPERAND (decl, 0);
14188 if (DECL_P (decl))
14189 {
14190 n = splay_tree_lookup (ctx->variables,
14191 (splay_tree_key) decl);
14192 if (!(n->value & GOVD_SEEN))
14193 remove = true;
14194 }
14195 }
14196 }
14197
14198 tree d = decl, *pd;
14199 if (TREE_CODE (d) == ARRAY_REF)
14200 {
14201 while (TREE_CODE (d) == ARRAY_REF)
14202 d = TREE_OPERAND (d, 0);
14203 if (TREE_CODE (d) == COMPONENT_REF
14204 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
14205 decl = d;
14206 }
14207 pd = &OMP_CLAUSE_DECL (c);
14208 if (d == decl
14209 && TREE_CODE (decl) == INDIRECT_REF
14210 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
14211 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
14212 == REFERENCE_TYPE)
14213 && (OMP_CLAUSE_MAP_KIND (c)
14214 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
14215 {
14216 pd = &TREE_OPERAND (decl, 0);
14217 decl = TREE_OPERAND (decl, 0);
14218 }
14219
14220 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14221 switch (code)
14222 {
14223 case OACC_ENTER_DATA:
14224 case OACC_EXIT_DATA:
14225 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
14226 == ARRAY_TYPE)
14227 remove = true;
14228 else if (code == OACC_ENTER_DATA)
14229 goto change_to_attach;
14230 /* Fallthrough. */
14231 case OMP_TARGET_EXIT_DATA:
14232 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DETACH);
14233 break;
14234 case OACC_UPDATE:
14235 /* An "attach/detach" operation on an update directive
14236 should behave as a GOMP_MAP_ALWAYS_POINTER. Note that
14237 both GOMP_MAP_ATTACH_DETACH and GOMP_MAP_ALWAYS_POINTER
14238 kinds depend on the previous mapping (for non-TARGET
14239 regions). */
14240 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
14241 break;
14242 default:
14243 change_to_attach:
14244 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ATTACH);
14245 if ((ctx->region_type & ORT_TARGET) != 0)
14246 move_attach = true;
14247 }
14248 else if ((ctx->region_type & ORT_TARGET) != 0
14249 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
14250 || (OMP_CLAUSE_MAP_KIND (c)
14251 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
14252 move_attach = true;
14253
14254 /* If we have e.g. map(struct: *var), don't gimplify the
14255 argument since omp-low.cc wants to see the decl itself. */
14256 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
14257 break;
14258
14259 /* We've already partly gimplified this in
14260 gimplify_scan_omp_clauses. Don't do any more. */
14261 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
14262 break;
14263
14264 gimplify_omp_ctxp = ctx->outer_context;
14265 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
14266 fb_lvalue) == GS_ERROR)
14267 remove = true;
14268 gimplify_omp_ctxp = ctx;
14269 break;
14270 }
14271
14272 if ((code == OMP_TARGET
14273 || code == OMP_TARGET_DATA
14274 || code == OMP_TARGET_ENTER_DATA
14275 || code == OMP_TARGET_EXIT_DATA)
14276 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14277 {
14278 bool firstprivatize = false;
14279
14280 for (struct gimplify_omp_ctx *octx = ctx->outer_context; octx;
14281 octx = octx->outer_context)
14282 {
14283 splay_tree_node n
14284 = splay_tree_lookup (octx->variables,
14285 (splay_tree_key) OMP_CLAUSE_DECL (c));
14286 /* If this is contained in an outer OpenMP region as a
14287 firstprivate value, remove the attach/detach. */
14288 if (n && (n->value & GOVD_FIRSTPRIVATE))
14289 {
14290 firstprivatize = true;
14291 break;
14292 }
14293 }
14294
14295 enum gomp_map_kind map_kind;
14296 if (firstprivatize)
14297 map_kind = GOMP_MAP_FIRSTPRIVATE_POINTER;
14298 else if (code == OMP_TARGET_EXIT_DATA)
14299 map_kind = GOMP_MAP_DETACH;
14300 else
14301 map_kind = GOMP_MAP_ATTACH;
14302 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
14303 }
14304 else if ((ctx->region_type & ORT_ACC) != 0
14305 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14306 {
14307 enum gomp_map_kind map_kind = (code == OACC_EXIT_DATA
14308 ? GOMP_MAP_DETACH
14309 : GOMP_MAP_ATTACH);
14310 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
14311 }
14312
14313 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14314 if ((ctx->region_type & ORT_TARGET) != 0
14315 && !(n->value & GOVD_SEEN)
14316 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
14317 && (!is_global_var (t: decl)
14318 || !lookup_attribute (attr_name: "omp declare target link",
14319 DECL_ATTRIBUTES (decl))))
14320 {
14321 remove = true;
14322 /* For struct element mapping, if struct is never referenced
14323 in target block and none of the mapping has always modifier,
14324 remove all the struct element mappings, which immediately
14325 follow the GOMP_MAP_STRUCT map clause. */
14326 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
14327 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
14328 {
14329 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
14330 while (cnt--)
14331 OMP_CLAUSE_CHAIN (c)
14332 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
14333 }
14334 }
14335 else if (DECL_SIZE (decl)
14336 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
14337 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
14338 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
14339 && (OMP_CLAUSE_MAP_KIND (c)
14340 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
14341 {
14342 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
14343 for these, TREE_CODE (DECL_SIZE (decl)) will always be
14344 INTEGER_CST. */
14345 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
14346
14347 tree decl2 = DECL_VALUE_EXPR (decl);
14348 gcc_assert (INDIRECT_REF_P (decl2));
14349 decl2 = TREE_OPERAND (decl2, 0);
14350 gcc_assert (DECL_P (decl2));
14351 tree mem = build_simple_mem_ref (decl2);
14352 OMP_CLAUSE_DECL (c) = mem;
14353 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
14354 if (ctx->outer_context)
14355 {
14356 omp_notice_variable (ctx: ctx->outer_context, decl: decl2, in_code: true);
14357 omp_notice_variable (ctx: ctx->outer_context,
14358 OMP_CLAUSE_SIZE (c), in_code: true);
14359 }
14360 if (((ctx->region_type & ORT_TARGET) != 0
14361 || !ctx->target_firstprivatize_array_bases)
14362 && ((n->value & GOVD_SEEN) == 0
14363 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
14364 {
14365 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14366 OMP_CLAUSE_MAP);
14367 OMP_CLAUSE_DECL (nc) = decl;
14368 OMP_CLAUSE_SIZE (nc) = size_zero_node;
14369 if (ctx->target_firstprivatize_array_bases)
14370 OMP_CLAUSE_SET_MAP_KIND (nc,
14371 GOMP_MAP_FIRSTPRIVATE_POINTER);
14372 else
14373 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
14374 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
14375 OMP_CLAUSE_CHAIN (c) = nc;
14376 c = nc;
14377 }
14378 }
14379 else
14380 {
14381 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
14382 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
14383 gcc_assert ((n->value & GOVD_SEEN) == 0
14384 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
14385 == 0));
14386 }
14387
14388 /* If we have a target region, we can push all the attaches to the
14389 end of the list (we may have standalone "attach" operations
14390 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
14391 the attachment point AND the pointed-to block have been mapped).
14392 If we have something else, e.g. "enter data", we need to keep
14393 "attach" nodes together with the previous node they attach to so
14394 that separate "exit data" operations work properly (see
14395 libgomp/target.c). */
14396 if ((ctx->region_type & ORT_TARGET) != 0
14397 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
14398 || (OMP_CLAUSE_MAP_KIND (c)
14399 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
14400 move_attach = true;
14401
14402 break;
14403
14404 case OMP_CLAUSE_TO:
14405 case OMP_CLAUSE_FROM:
14406 case OMP_CLAUSE__CACHE_:
14407 decl = OMP_CLAUSE_DECL (c);
14408 if (!DECL_P (decl))
14409 break;
14410 if (DECL_SIZE (decl)
14411 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
14412 {
14413 tree decl2 = DECL_VALUE_EXPR (decl);
14414 gcc_assert (INDIRECT_REF_P (decl2));
14415 decl2 = TREE_OPERAND (decl2, 0);
14416 gcc_assert (DECL_P (decl2));
14417 tree mem = build_simple_mem_ref (decl2);
14418 OMP_CLAUSE_DECL (c) = mem;
14419 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
14420 if (ctx->outer_context)
14421 {
14422 omp_notice_variable (ctx: ctx->outer_context, decl: decl2, in_code: true);
14423 omp_notice_variable (ctx: ctx->outer_context,
14424 OMP_CLAUSE_SIZE (c), in_code: true);
14425 }
14426 }
14427 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
14428 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
14429 break;
14430
14431 case OMP_CLAUSE_REDUCTION:
14432 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
14433 {
14434 decl = OMP_CLAUSE_DECL (c);
14435 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14436 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
14437 {
14438 remove = true;
14439 error_at (OMP_CLAUSE_LOCATION (c),
14440 "%qD specified in %<inscan%> %<reduction%> clause "
14441 "but not in %<scan%> directive clause", decl);
14442 break;
14443 }
14444 has_inscan_reductions = true;
14445 }
14446 /* FALLTHRU */
14447 case OMP_CLAUSE_IN_REDUCTION:
14448 case OMP_CLAUSE_TASK_REDUCTION:
14449 decl = OMP_CLAUSE_DECL (c);
14450 /* OpenACC reductions need a present_or_copy data clause.
14451 Add one if necessary. Emit error when the reduction is private. */
14452 if (ctx->region_type == ORT_ACC_PARALLEL
14453 || ctx->region_type == ORT_ACC_SERIAL)
14454 {
14455 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14456 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
14457 {
14458 remove = true;
14459 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
14460 "reduction on %qE", DECL_NAME (decl));
14461 }
14462 else if ((n->value & GOVD_MAP) == 0)
14463 {
14464 tree next = OMP_CLAUSE_CHAIN (c);
14465 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
14466 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
14467 OMP_CLAUSE_DECL (nc) = decl;
14468 OMP_CLAUSE_CHAIN (c) = nc;
14469 lang_hooks.decls.omp_finish_clause (nc, pre_p,
14470 (ctx->region_type
14471 & ORT_ACC) != 0);
14472 while (1)
14473 {
14474 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
14475 if (OMP_CLAUSE_CHAIN (nc) == NULL)
14476 break;
14477 nc = OMP_CLAUSE_CHAIN (nc);
14478 }
14479 OMP_CLAUSE_CHAIN (nc) = next;
14480 n->value |= GOVD_MAP;
14481 }
14482 }
14483 if (DECL_P (decl)
14484 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
14485 omp_mark_stores (ctx: gimplify_omp_ctxp->outer_context, decl);
14486 break;
14487
14488 case OMP_CLAUSE_ALLOCATE:
14489 decl = OMP_CLAUSE_DECL (c);
14490 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14491 if (n != NULL && !(n->value & GOVD_SEEN))
14492 {
14493 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
14494 != 0
14495 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
14496 remove = true;
14497 }
14498 if (!remove
14499 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14500 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
14501 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
14502 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
14503 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
14504 {
14505 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14506 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
14507 if (n == NULL)
14508 {
14509 enum omp_clause_default_kind default_kind
14510 = ctx->default_kind;
14511 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
14512 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
14513 in_code: true);
14514 ctx->default_kind = default_kind;
14515 }
14516 else
14517 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
14518 in_code: true);
14519 }
14520 break;
14521
14522 case OMP_CLAUSE_COPYIN:
14523 case OMP_CLAUSE_COPYPRIVATE:
14524 case OMP_CLAUSE_IF:
14525 case OMP_CLAUSE_SELF:
14526 case OMP_CLAUSE_NUM_THREADS:
14527 case OMP_CLAUSE_NUM_TEAMS:
14528 case OMP_CLAUSE_THREAD_LIMIT:
14529 case OMP_CLAUSE_DIST_SCHEDULE:
14530 case OMP_CLAUSE_DEVICE:
14531 case OMP_CLAUSE_SCHEDULE:
14532 case OMP_CLAUSE_NOWAIT:
14533 case OMP_CLAUSE_ORDERED:
14534 case OMP_CLAUSE_DEFAULT:
14535 case OMP_CLAUSE_UNTIED:
14536 case OMP_CLAUSE_COLLAPSE:
14537 case OMP_CLAUSE_FINAL:
14538 case OMP_CLAUSE_MERGEABLE:
14539 case OMP_CLAUSE_PROC_BIND:
14540 case OMP_CLAUSE_SAFELEN:
14541 case OMP_CLAUSE_SIMDLEN:
14542 case OMP_CLAUSE_DEPEND:
14543 case OMP_CLAUSE_DOACROSS:
14544 case OMP_CLAUSE_PRIORITY:
14545 case OMP_CLAUSE_GRAINSIZE:
14546 case OMP_CLAUSE_NUM_TASKS:
14547 case OMP_CLAUSE_NOGROUP:
14548 case OMP_CLAUSE_THREADS:
14549 case OMP_CLAUSE_SIMD:
14550 case OMP_CLAUSE_FILTER:
14551 case OMP_CLAUSE_HINT:
14552 case OMP_CLAUSE_DEFAULTMAP:
14553 case OMP_CLAUSE_ORDER:
14554 case OMP_CLAUSE_BIND:
14555 case OMP_CLAUSE_DETACH:
14556 case OMP_CLAUSE_USE_DEVICE_PTR:
14557 case OMP_CLAUSE_USE_DEVICE_ADDR:
14558 case OMP_CLAUSE_ASYNC:
14559 case OMP_CLAUSE_WAIT:
14560 case OMP_CLAUSE_INDEPENDENT:
14561 case OMP_CLAUSE_NUM_GANGS:
14562 case OMP_CLAUSE_NUM_WORKERS:
14563 case OMP_CLAUSE_VECTOR_LENGTH:
14564 case OMP_CLAUSE_GANG:
14565 case OMP_CLAUSE_WORKER:
14566 case OMP_CLAUSE_VECTOR:
14567 case OMP_CLAUSE_AUTO:
14568 case OMP_CLAUSE_SEQ:
14569 case OMP_CLAUSE_TILE:
14570 case OMP_CLAUSE_IF_PRESENT:
14571 case OMP_CLAUSE_FINALIZE:
14572 case OMP_CLAUSE_INCLUSIVE:
14573 case OMP_CLAUSE_EXCLUSIVE:
14574 break;
14575
14576 case OMP_CLAUSE_NOHOST:
14577 default:
14578 gcc_unreachable ();
14579 }
14580
14581 if (remove)
14582 *list_p = OMP_CLAUSE_CHAIN (c);
14583 else if (move_attach)
14584 {
14585 /* Remove attach node from here, separate out into its own list. */
14586 *attach_tail = c;
14587 *list_p = OMP_CLAUSE_CHAIN (c);
14588 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
14589 attach_tail = &OMP_CLAUSE_CHAIN (c);
14590 }
14591 else
14592 list_p = &OMP_CLAUSE_CHAIN (c);
14593 }
14594
14595 /* Splice attach nodes at the end of the list. */
14596 if (attach_list)
14597 {
14598 *list_p = attach_list;
14599 list_p = attach_tail;
14600 }
14601
14602 /* Add in any implicit data sharing. */
14603 struct gimplify_adjust_omp_clauses_data data;
14604 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
14605 {
14606 /* OpenMP. Implicit clauses are added at the start of the clause list,
14607 but after any non-map clauses. */
14608 tree *implicit_add_list_p = orig_list_p;
14609 while (*implicit_add_list_p
14610 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
14611 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
14612 data.list_p = implicit_add_list_p;
14613 }
14614 else
14615 /* OpenACC. */
14616 data.list_p = list_p;
14617 data.pre_p = pre_p;
14618 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
14619
14620 if (has_inscan_reductions)
14621 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
14622 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14623 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
14624 {
14625 error_at (OMP_CLAUSE_LOCATION (c),
14626 "%<inscan%> %<reduction%> clause used together with "
14627 "%<linear%> clause for a variable other than loop "
14628 "iterator");
14629 break;
14630 }
14631
14632 gimplify_omp_ctxp = ctx->outer_context;
14633 delete_omp_context (c: ctx);
14634}
14635
14636/* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
14637 -1 if unknown yet (simd is involved, won't be known until vectorization)
14638 and 1 if they do. If SCORES is non-NULL, it should point to an array
14639 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
14640 of the CONSTRUCTS (position -1 if it will never match) followed by
14641 number of constructs in the OpenMP context construct trait. If the
14642 score depends on whether it will be in a declare simd clone or not,
14643 the function returns 2 and there will be two sets of the scores, the first
14644 one for the case that it is not in a declare simd clone, the other
14645 that it is in a declare simd clone. */
14646
14647int
14648omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
14649 int *scores)
14650{
14651 int matched = 0, cnt = 0;
14652 bool simd_seen = false;
14653 bool target_seen = false;
14654 int declare_simd_cnt = -1;
14655 auto_vec<enum tree_code, 16> codes;
14656 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
14657 {
14658 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
14659 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
14660 == ORT_TARGET && ctx->code == OMP_TARGET)
14661 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
14662 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
14663 || (ctx->region_type == ORT_SIMD
14664 && ctx->code == OMP_SIMD
14665 && !omp_find_clause (clauses: ctx->clauses, kind: OMP_CLAUSE_BIND)))
14666 {
14667 ++cnt;
14668 if (scores)
14669 codes.safe_push (obj: ctx->code);
14670 else if (matched < nconstructs && ctx->code == constructs[matched])
14671 {
14672 if (ctx->code == OMP_SIMD)
14673 {
14674 if (matched)
14675 return 0;
14676 simd_seen = true;
14677 }
14678 ++matched;
14679 }
14680 if (ctx->code == OMP_TARGET)
14681 {
14682 if (scores == NULL)
14683 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
14684 target_seen = true;
14685 break;
14686 }
14687 }
14688 else if (ctx->region_type == ORT_WORKSHARE
14689 && ctx->code == OMP_LOOP
14690 && ctx->outer_context
14691 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
14692 && ctx->outer_context->outer_context
14693 && ctx->outer_context->outer_context->code == OMP_LOOP
14694 && ctx->outer_context->outer_context->distribute)
14695 ctx = ctx->outer_context->outer_context;
14696 ctx = ctx->outer_context;
14697 }
14698 if (!target_seen
14699 && lookup_attribute (attr_name: "omp declare simd",
14700 DECL_ATTRIBUTES (current_function_decl)))
14701 {
14702 /* Declare simd is a maybe case, it is supposed to be added only to the
14703 omp-simd-clone.cc added clones and not to the base function. */
14704 declare_simd_cnt = cnt++;
14705 if (scores)
14706 codes.safe_push (obj: OMP_SIMD);
14707 else if (cnt == 0
14708 && constructs[0] == OMP_SIMD)
14709 {
14710 gcc_assert (matched == 0);
14711 simd_seen = true;
14712 if (++matched == nconstructs)
14713 return -1;
14714 }
14715 }
14716 if (tree attr = lookup_attribute (attr_name: "omp declare variant variant",
14717 DECL_ATTRIBUTES (current_function_decl)))
14718 {
14719 tree selectors = TREE_VALUE (attr);
14720 int variant_nconstructs = list_length (selectors);
14721 enum tree_code *variant_constructs = NULL;
14722 if (!target_seen && variant_nconstructs)
14723 {
14724 variant_constructs
14725 = (enum tree_code *) alloca (variant_nconstructs
14726 * sizeof (enum tree_code));
14727 omp_construct_traits_to_codes (selectors, variant_nconstructs,
14728 variant_constructs);
14729 }
14730 for (int i = 0; i < variant_nconstructs; i++)
14731 {
14732 ++cnt;
14733 if (scores)
14734 codes.safe_push (obj: variant_constructs[i]);
14735 else if (matched < nconstructs
14736 && variant_constructs[i] == constructs[matched])
14737 {
14738 if (variant_constructs[i] == OMP_SIMD)
14739 {
14740 if (matched)
14741 return 0;
14742 simd_seen = true;
14743 }
14744 ++matched;
14745 }
14746 }
14747 }
14748 if (!target_seen
14749 && lookup_attribute (attr_name: "omp declare target block",
14750 DECL_ATTRIBUTES (current_function_decl)))
14751 {
14752 if (scores)
14753 codes.safe_push (obj: OMP_TARGET);
14754 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
14755 ++matched;
14756 }
14757 if (scores)
14758 {
14759 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
14760 {
14761 int j = codes.length () - 1;
14762 for (int i = nconstructs - 1; i >= 0; i--)
14763 {
14764 while (j >= 0
14765 && (pass != 0 || declare_simd_cnt != j)
14766 && constructs[i] != codes[j])
14767 --j;
14768 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
14769 *scores++ = j - 1;
14770 else
14771 *scores++ = j;
14772 }
14773 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
14774 ? codes.length () - 1 : codes.length ());
14775 }
14776 return declare_simd_cnt == -1 ? 1 : 2;
14777 }
14778 if (matched == nconstructs)
14779 return simd_seen ? -1 : 1;
14780 return 0;
14781}
14782
14783/* Gimplify OACC_CACHE. */
14784
14785static void
14786gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
14787{
14788 tree expr = *expr_p;
14789
14790 gimplify_scan_omp_clauses (list_p: &OACC_CACHE_CLAUSES (expr), pre_p, region_type: ORT_ACC,
14791 code: OACC_CACHE);
14792 gimplify_adjust_omp_clauses (pre_p, NULL, list_p: &OACC_CACHE_CLAUSES (expr),
14793 code: OACC_CACHE);
14794
14795 /* TODO: Do something sensible with this information. */
14796
14797 *expr_p = NULL_TREE;
14798}
14799
14800/* Helper function of gimplify_oacc_declare. The helper's purpose is to,
14801 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
14802 kind. The entry kind will replace the one in CLAUSE, while the exit
14803 kind will be used in a new omp_clause and returned to the caller. */
14804
14805static tree
14806gimplify_oacc_declare_1 (tree clause)
14807{
14808 HOST_WIDE_INT kind, new_op;
14809 bool ret = false;
14810 tree c = NULL;
14811
14812 kind = OMP_CLAUSE_MAP_KIND (clause);
14813
14814 switch (kind)
14815 {
14816 case GOMP_MAP_ALLOC:
14817 new_op = GOMP_MAP_RELEASE;
14818 ret = true;
14819 break;
14820
14821 case GOMP_MAP_FROM:
14822 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
14823 new_op = GOMP_MAP_FROM;
14824 ret = true;
14825 break;
14826
14827 case GOMP_MAP_TOFROM:
14828 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
14829 new_op = GOMP_MAP_FROM;
14830 ret = true;
14831 break;
14832
14833 case GOMP_MAP_DEVICE_RESIDENT:
14834 case GOMP_MAP_FORCE_DEVICEPTR:
14835 case GOMP_MAP_FORCE_PRESENT:
14836 case GOMP_MAP_LINK:
14837 case GOMP_MAP_POINTER:
14838 case GOMP_MAP_TO:
14839 break;
14840
14841 default:
14842 gcc_unreachable ();
14843 break;
14844 }
14845
14846 if (ret)
14847 {
14848 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
14849 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
14850 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
14851 }
14852
14853 return c;
14854}
14855
14856/* Gimplify OACC_DECLARE. */
14857
14858static void
14859gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
14860{
14861 tree expr = *expr_p;
14862 gomp_target *stmt;
14863 tree clauses, t, decl;
14864
14865 clauses = OACC_DECLARE_CLAUSES (expr);
14866
14867 gimplify_scan_omp_clauses (list_p: &clauses, pre_p, region_type: ORT_TARGET_DATA, code: OACC_DECLARE);
14868 gimplify_adjust_omp_clauses (pre_p, NULL, list_p: &clauses, code: OACC_DECLARE);
14869
14870 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
14871 {
14872 decl = OMP_CLAUSE_DECL (t);
14873
14874 if (TREE_CODE (decl) == MEM_REF)
14875 decl = TREE_OPERAND (decl, 0);
14876
14877 if (VAR_P (decl) && !is_oacc_declared (decl))
14878 {
14879 tree attr = get_identifier ("oacc declare target");
14880 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
14881 DECL_ATTRIBUTES (decl));
14882 }
14883
14884 if (VAR_P (decl)
14885 && !is_global_var (t: decl)
14886 && DECL_CONTEXT (decl) == current_function_decl)
14887 {
14888 tree c = gimplify_oacc_declare_1 (clause: t);
14889 if (c)
14890 {
14891 if (oacc_declare_returns == NULL)
14892 oacc_declare_returns = new hash_map<tree, tree>;
14893
14894 oacc_declare_returns->put (k: decl, v: c);
14895 }
14896 }
14897
14898 if (gimplify_omp_ctxp)
14899 omp_add_variable (ctx: gimplify_omp_ctxp, decl, flags: GOVD_SEEN);
14900 }
14901
14902 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
14903 clauses);
14904
14905 gimplify_seq_add_stmt (seq_p: pre_p, gs: stmt);
14906
14907 *expr_p = NULL_TREE;
14908}
14909
14910/* Gimplify the contents of an OMP_PARALLEL statement. This involves
14911 gimplification of the body, as well as scanning the body for used
14912 variables. We need to do this scan now, because variable-sized
14913 decls will be decomposed during gimplification. */
14914
14915static void
14916gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
14917{
14918 tree expr = *expr_p;
14919 gimple *g;
14920 gimple_seq body = NULL;
14921
14922 gimplify_scan_omp_clauses (list_p: &OMP_PARALLEL_CLAUSES (expr), pre_p,
14923 OMP_PARALLEL_COMBINED (expr)
14924 ? ORT_COMBINED_PARALLEL
14925 : ORT_PARALLEL, code: OMP_PARALLEL);
14926
14927 push_gimplify_context ();
14928
14929 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), seq_p: &body);
14930 if (gimple_code (g) == GIMPLE_BIND)
14931 pop_gimplify_context (body: g);
14932 else
14933 pop_gimplify_context (NULL);
14934
14935 gimplify_adjust_omp_clauses (pre_p, body, list_p: &OMP_PARALLEL_CLAUSES (expr),
14936 code: OMP_PARALLEL);
14937
14938 g = gimple_build_omp_parallel (body,
14939 OMP_PARALLEL_CLAUSES (expr),
14940 NULL_TREE, NULL_TREE);
14941 if (OMP_PARALLEL_COMBINED (expr))
14942 gimple_omp_set_subcode (s: g, subcode: GF_OMP_PARALLEL_COMBINED);
14943 gimplify_seq_add_stmt (seq_p: pre_p, gs: g);
14944 *expr_p = NULL_TREE;
14945}
14946
14947/* Gimplify the contents of an OMP_TASK statement. This involves
14948 gimplification of the body, as well as scanning the body for used
14949 variables. We need to do this scan now, because variable-sized
14950 decls will be decomposed during gimplification. */
14951
14952static void
14953gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
14954{
14955 tree expr = *expr_p;
14956 gimple *g;
14957 gimple_seq body = NULL;
14958 bool nowait = false;
14959 bool has_depend = false;
14960
14961 if (OMP_TASK_BODY (expr) == NULL_TREE)
14962 {
14963 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
14964 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
14965 {
14966 has_depend = true;
14967 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
14968 {
14969 error_at (OMP_CLAUSE_LOCATION (c),
14970 "%<mutexinoutset%> kind in %<depend%> clause on a "
14971 "%<taskwait%> construct");
14972 break;
14973 }
14974 }
14975 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
14976 nowait = true;
14977 if (nowait && !has_depend)
14978 {
14979 error_at (EXPR_LOCATION (expr),
14980 "%<taskwait%> construct with %<nowait%> clause but no "
14981 "%<depend%> clauses");
14982 *expr_p = NULL_TREE;
14983 return;
14984 }
14985 }
14986
14987 gimplify_scan_omp_clauses (list_p: &OMP_TASK_CLAUSES (expr), pre_p,
14988 region_type: omp_find_clause (OMP_TASK_CLAUSES (expr),
14989 kind: OMP_CLAUSE_UNTIED)
14990 ? ORT_UNTIED_TASK : ORT_TASK, code: OMP_TASK);
14991
14992 if (OMP_TASK_BODY (expr))
14993 {
14994 push_gimplify_context ();
14995
14996 g = gimplify_and_return_first (OMP_TASK_BODY (expr), seq_p: &body);
14997 if (gimple_code (g) == GIMPLE_BIND)
14998 pop_gimplify_context (body: g);
14999 else
15000 pop_gimplify_context (NULL);
15001 }
15002
15003 gimplify_adjust_omp_clauses (pre_p, body, list_p: &OMP_TASK_CLAUSES (expr),
15004 code: OMP_TASK);
15005
15006 g = gimple_build_omp_task (body,
15007 OMP_TASK_CLAUSES (expr),
15008 NULL_TREE, NULL_TREE,
15009 NULL_TREE, NULL_TREE, NULL_TREE);
15010 if (OMP_TASK_BODY (expr) == NULL_TREE)
15011 gimple_omp_task_set_taskwait_p (g, taskwait_p: true);
15012 gimplify_seq_add_stmt (seq_p: pre_p, gs: g);
15013 *expr_p = NULL_TREE;
15014}
15015
15016/* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
15017 force it into a temporary initialized in PRE_P and add firstprivate clause
15018 to ORIG_FOR_STMT. */
15019
15020static void
15021gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
15022 tree orig_for_stmt)
15023{
15024 if (*tp == NULL || is_gimple_constant (t: *tp))
15025 return;
15026
15027 *tp = get_initialized_tmp_var (val: *tp, pre_p, NULL, allow_ssa: false);
15028 /* Reference to pointer conversion is considered useless,
15029 but is significant for firstprivate clause. Force it
15030 here. */
15031 if (type
15032 && TREE_CODE (type) == POINTER_TYPE
15033 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
15034 {
15035 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
15036 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
15037 gimplify_and_add (t: m, seq_p: pre_p);
15038 *tp = v;
15039 }
15040
15041 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
15042 OMP_CLAUSE_DECL (c) = *tp;
15043 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
15044 OMP_FOR_CLAUSES (orig_for_stmt) = c;
15045}
15046
15047/* Helper function of gimplify_omp_for, find OMP_ORDERED with
15048 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
15049
15050static tree
15051find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
15052{
15053 switch (TREE_CODE (*tp))
15054 {
15055 case OMP_ORDERED:
15056 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
15057 return *tp;
15058 break;
15059 case OMP_SIMD:
15060 case OMP_PARALLEL:
15061 case OMP_TARGET:
15062 *walk_subtrees = 0;
15063 break;
15064 default:
15065 break;
15066 }
15067 return NULL_TREE;
15068}
15069
15070/* Gimplify the gross structure of an OMP_FOR statement. */
15071
15072static enum gimplify_status
15073gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
15074{
15075 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
15076 enum gimplify_status ret = GS_ALL_DONE;
15077 enum gimplify_status tret;
15078 gomp_for *gfor;
15079 gimple_seq for_body, for_pre_body;
15080 int i;
15081 bitmap has_decl_expr = NULL;
15082 enum omp_region_type ort = ORT_WORKSHARE;
15083 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
15084
15085 orig_for_stmt = for_stmt = *expr_p;
15086
15087 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_BIND)
15088 != NULL_TREE);
15089 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
15090 {
15091 tree *data[4] = { NULL, NULL, NULL, NULL };
15092 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
15093 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
15094 find_combined_omp_for, data, NULL);
15095 if (inner_for_stmt == NULL_TREE)
15096 {
15097 gcc_assert (seen_error ());
15098 *expr_p = NULL_TREE;
15099 return GS_ERROR;
15100 }
15101 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
15102 {
15103 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
15104 &OMP_FOR_PRE_BODY (for_stmt));
15105 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
15106 }
15107 if (OMP_FOR_PRE_BODY (inner_for_stmt))
15108 {
15109 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
15110 &OMP_FOR_PRE_BODY (for_stmt));
15111 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
15112 }
15113
15114 if (data[0])
15115 {
15116 /* We have some statements or variable declarations in between
15117 the composite construct directives. Move them around the
15118 inner_for_stmt. */
15119 data[0] = expr_p;
15120 for (i = 0; i < 3; i++)
15121 if (data[i])
15122 {
15123 tree t = *data[i];
15124 if (i < 2 && data[i + 1] == &OMP_BODY (t))
15125 data[i + 1] = data[i];
15126 *data[i] = OMP_BODY (t);
15127 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
15128 NULL_TREE, make_node (BLOCK));
15129 OMP_BODY (t) = body;
15130 append_to_statement_list_force (inner_for_stmt,
15131 &BIND_EXPR_BODY (body));
15132 *data[3] = t;
15133 data[3] = tsi_stmt_ptr (i: tsi_start (BIND_EXPR_BODY (body)));
15134 gcc_assert (*data[3] == inner_for_stmt);
15135 }
15136 return GS_OK;
15137 }
15138
15139 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
15140 if (!loop_p
15141 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
15142 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15143 i)) == TREE_LIST
15144 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15145 i)))
15146 {
15147 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
15148 /* Class iterators aren't allowed on OMP_SIMD, so the only
15149 case we need to solve is distribute parallel for. They are
15150 allowed on the loop construct, but that is already handled
15151 in gimplify_omp_loop. */
15152 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
15153 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
15154 && data[1]);
15155 tree orig_decl = TREE_PURPOSE (orig);
15156 tree last = TREE_VALUE (orig);
15157 tree *pc;
15158 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
15159 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
15160 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
15161 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
15162 && OMP_CLAUSE_DECL (*pc) == orig_decl)
15163 break;
15164 if (*pc == NULL_TREE)
15165 {
15166 tree *spc;
15167 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
15168 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
15169 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
15170 && OMP_CLAUSE_DECL (*spc) == orig_decl)
15171 break;
15172 if (*spc)
15173 {
15174 tree c = *spc;
15175 *spc = OMP_CLAUSE_CHAIN (c);
15176 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
15177 *pc = c;
15178 }
15179 }
15180 if (*pc == NULL_TREE)
15181 ;
15182 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
15183 {
15184 /* private clause will appear only on inner_for_stmt.
15185 Change it into firstprivate, and add private clause
15186 on for_stmt. */
15187 tree c = copy_node (*pc);
15188 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
15189 OMP_FOR_CLAUSES (for_stmt) = c;
15190 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
15191 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
15192 }
15193 else
15194 {
15195 /* lastprivate clause will appear on both inner_for_stmt
15196 and for_stmt. Add firstprivate clause to
15197 inner_for_stmt. */
15198 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
15199 OMP_CLAUSE_FIRSTPRIVATE);
15200 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
15201 OMP_CLAUSE_CHAIN (c) = *pc;
15202 *pc = c;
15203 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
15204 }
15205 tree c = build_omp_clause (UNKNOWN_LOCATION,
15206 OMP_CLAUSE_FIRSTPRIVATE);
15207 OMP_CLAUSE_DECL (c) = last;
15208 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15209 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15210 c = build_omp_clause (UNKNOWN_LOCATION,
15211 *pc ? OMP_CLAUSE_SHARED
15212 : OMP_CLAUSE_FIRSTPRIVATE);
15213 OMP_CLAUSE_DECL (c) = orig_decl;
15214 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15215 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15216 }
15217 /* Similarly, take care of C++ range for temporaries, those should
15218 be firstprivate on OMP_PARALLEL if any. */
15219 if (data[1])
15220 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
15221 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
15222 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15223 i)) == TREE_LIST
15224 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15225 i)))
15226 {
15227 tree orig
15228 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
15229 tree v = TREE_CHAIN (orig);
15230 tree c = build_omp_clause (UNKNOWN_LOCATION,
15231 OMP_CLAUSE_FIRSTPRIVATE);
15232 /* First add firstprivate clause for the __for_end artificial
15233 decl. */
15234 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
15235 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
15236 == REFERENCE_TYPE)
15237 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
15238 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15239 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15240 if (TREE_VEC_ELT (v, 0))
15241 {
15242 /* And now the same for __for_range artificial decl if it
15243 exists. */
15244 c = build_omp_clause (UNKNOWN_LOCATION,
15245 OMP_CLAUSE_FIRSTPRIVATE);
15246 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
15247 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
15248 == REFERENCE_TYPE)
15249 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
15250 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15251 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15252 }
15253 }
15254 }
15255
15256 switch (TREE_CODE (for_stmt))
15257 {
15258 case OMP_FOR:
15259 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
15260 {
15261 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15262 kind: OMP_CLAUSE_SCHEDULE))
15263 error_at (EXPR_LOCATION (for_stmt),
15264 "%qs clause may not appear on non-rectangular %qs",
15265 "schedule", lang_GNU_Fortran () ? "do" : "for");
15266 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_ORDERED))
15267 error_at (EXPR_LOCATION (for_stmt),
15268 "%qs clause may not appear on non-rectangular %qs",
15269 "ordered", lang_GNU_Fortran () ? "do" : "for");
15270 }
15271 break;
15272 case OMP_DISTRIBUTE:
15273 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
15274 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15275 kind: OMP_CLAUSE_DIST_SCHEDULE))
15276 error_at (EXPR_LOCATION (for_stmt),
15277 "%qs clause may not appear on non-rectangular %qs",
15278 "dist_schedule", "distribute");
15279 break;
15280 case OACC_LOOP:
15281 ort = ORT_ACC;
15282 break;
15283 case OMP_TASKLOOP:
15284 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
15285 {
15286 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15287 kind: OMP_CLAUSE_GRAINSIZE))
15288 error_at (EXPR_LOCATION (for_stmt),
15289 "%qs clause may not appear on non-rectangular %qs",
15290 "grainsize", "taskloop");
15291 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15292 kind: OMP_CLAUSE_NUM_TASKS))
15293 error_at (EXPR_LOCATION (for_stmt),
15294 "%qs clause may not appear on non-rectangular %qs",
15295 "num_tasks", "taskloop");
15296 }
15297 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_UNTIED))
15298 ort = ORT_UNTIED_TASKLOOP;
15299 else
15300 ort = ORT_TASKLOOP;
15301 break;
15302 case OMP_SIMD:
15303 ort = ORT_SIMD;
15304 break;
15305 default:
15306 gcc_unreachable ();
15307 }
15308
15309 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
15310 clause for the IV. */
15311 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
15312 {
15313 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
15314 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15315 decl = TREE_OPERAND (t, 0);
15316 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
15317 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
15318 && OMP_CLAUSE_DECL (c) == decl)
15319 {
15320 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
15321 break;
15322 }
15323 }
15324
15325 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
15326 gimplify_scan_omp_clauses (list_p: &OMP_FOR_CLAUSES (for_stmt), pre_p, region_type: ort,
15327 code: loop_p && TREE_CODE (for_stmt) != OMP_SIMD
15328 ? OMP_LOOP : TREE_CODE (for_stmt));
15329
15330 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
15331 gimplify_omp_ctxp->distribute = true;
15332
15333 /* Handle OMP_FOR_INIT. */
15334 for_pre_body = NULL;
15335 if ((ort == ORT_SIMD
15336 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
15337 && OMP_FOR_PRE_BODY (for_stmt))
15338 {
15339 has_decl_expr = BITMAP_ALLOC (NULL);
15340 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
15341 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
15342 {
15343 t = OMP_FOR_PRE_BODY (for_stmt);
15344 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
15345 }
15346 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
15347 {
15348 tree_stmt_iterator si;
15349 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (i: si);
15350 tsi_next (i: &si))
15351 {
15352 t = tsi_stmt (i: si);
15353 if (TREE_CODE (t) == DECL_EXPR
15354 && VAR_P (DECL_EXPR_DECL (t)))
15355 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
15356 }
15357 }
15358 }
15359 if (OMP_FOR_PRE_BODY (for_stmt))
15360 {
15361 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
15362 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), seq_p: &for_pre_body);
15363 else
15364 {
15365 struct gimplify_omp_ctx ctx;
15366 memset (s: &ctx, c: 0, n: sizeof (ctx));
15367 ctx.region_type = ORT_NONE;
15368 gimplify_omp_ctxp = &ctx;
15369 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), seq_p: &for_pre_body);
15370 gimplify_omp_ctxp = NULL;
15371 }
15372 }
15373 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
15374
15375 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
15376 for_stmt = inner_for_stmt;
15377
15378 /* For taskloop, need to gimplify the start, end and step before the
15379 taskloop, outside of the taskloop omp context. */
15380 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
15381 {
15382 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15383 {
15384 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15385 gimple_seq *for_pre_p = (gimple_seq_empty_p (s: for_pre_body)
15386 ? pre_p : &for_pre_body);
15387 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
15388 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15389 {
15390 tree v = TREE_OPERAND (t, 1);
15391 gimplify_omp_taskloop_expr (type, tp: &TREE_VEC_ELT (v, 1),
15392 pre_p: for_pre_p, orig_for_stmt);
15393 gimplify_omp_taskloop_expr (type, tp: &TREE_VEC_ELT (v, 2),
15394 pre_p: for_pre_p, orig_for_stmt);
15395 }
15396 else
15397 gimplify_omp_taskloop_expr (type, tp: &TREE_OPERAND (t, 1), pre_p: for_pre_p,
15398 orig_for_stmt);
15399
15400 /* Handle OMP_FOR_COND. */
15401 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
15402 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15403 {
15404 tree v = TREE_OPERAND (t, 1);
15405 gimplify_omp_taskloop_expr (type, tp: &TREE_VEC_ELT (v, 1),
15406 pre_p: for_pre_p, orig_for_stmt);
15407 gimplify_omp_taskloop_expr (type, tp: &TREE_VEC_ELT (v, 2),
15408 pre_p: for_pre_p, orig_for_stmt);
15409 }
15410 else
15411 gimplify_omp_taskloop_expr (type, tp: &TREE_OPERAND (t, 1), pre_p: for_pre_p,
15412 orig_for_stmt);
15413
15414 /* Handle OMP_FOR_INCR. */
15415 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15416 if (TREE_CODE (t) == MODIFY_EXPR)
15417 {
15418 decl = TREE_OPERAND (t, 0);
15419 t = TREE_OPERAND (t, 1);
15420 tree *tp = &TREE_OPERAND (t, 1);
15421 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
15422 tp = &TREE_OPERAND (t, 0);
15423
15424 gimplify_omp_taskloop_expr (NULL_TREE, tp, pre_p: for_pre_p,
15425 orig_for_stmt);
15426 }
15427 }
15428
15429 gimplify_scan_omp_clauses (list_p: &OMP_FOR_CLAUSES (orig_for_stmt), pre_p, region_type: ort,
15430 code: OMP_TASKLOOP);
15431 }
15432
15433 if (orig_for_stmt != for_stmt)
15434 gimplify_omp_ctxp->combined_loop = true;
15435
15436 for_body = NULL;
15437 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
15438 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
15439 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
15440 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
15441
15442 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_ORDERED);
15443 bool is_doacross = false;
15444 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
15445 find_standalone_omp_ordered, NULL))
15446 {
15447 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
15448 is_doacross = true;
15449 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
15450 gimplify_omp_ctxp->loop_iter_var.create (nelems: len * 2);
15451 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
15452 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
15453 {
15454 error_at (OMP_CLAUSE_LOCATION (*pc),
15455 "%<linear%> clause may not be specified together "
15456 "with %<ordered%> clause if stand-alone %<ordered%> "
15457 "construct is nested in it");
15458 *pc = OMP_CLAUSE_CHAIN (*pc);
15459 }
15460 else
15461 pc = &OMP_CLAUSE_CHAIN (*pc);
15462 }
15463 int collapse = 1, tile = 0;
15464 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_COLLAPSE);
15465 if (c)
15466 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
15467 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_TILE);
15468 if (c)
15469 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
15470 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_ALLOCATE);
15471 hash_set<tree> *allocate_uids = NULL;
15472 if (c)
15473 {
15474 allocate_uids = new hash_set<tree>;
15475 for (; c; c = OMP_CLAUSE_CHAIN (c))
15476 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
15477 allocate_uids->add (OMP_CLAUSE_DECL (c));
15478 }
15479 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15480 {
15481 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15482 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15483 decl = TREE_OPERAND (t, 0);
15484 gcc_assert (DECL_P (decl));
15485 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
15486 || POINTER_TYPE_P (TREE_TYPE (decl)));
15487 if (is_doacross)
15488 {
15489 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
15490 {
15491 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15492 if (TREE_CODE (orig_decl) == TREE_LIST)
15493 {
15494 orig_decl = TREE_PURPOSE (orig_decl);
15495 if (!orig_decl)
15496 orig_decl = decl;
15497 }
15498 gimplify_omp_ctxp->loop_iter_var.quick_push (obj: orig_decl);
15499 }
15500 else
15501 gimplify_omp_ctxp->loop_iter_var.quick_push (obj: decl);
15502 gimplify_omp_ctxp->loop_iter_var.quick_push (obj: decl);
15503 }
15504
15505 if (for_stmt == orig_for_stmt)
15506 {
15507 tree orig_decl = decl;
15508 if (OMP_FOR_ORIG_DECLS (for_stmt))
15509 {
15510 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15511 if (TREE_CODE (orig_decl) == TREE_LIST)
15512 {
15513 orig_decl = TREE_PURPOSE (orig_decl);
15514 if (!orig_decl)
15515 orig_decl = decl;
15516 }
15517 }
15518 if (is_global_var (t: orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
15519 error_at (EXPR_LOCATION (for_stmt),
15520 "threadprivate iteration variable %qD", orig_decl);
15521 }
15522
15523 /* Make sure the iteration variable is private. */
15524 tree c = NULL_TREE;
15525 tree c2 = NULL_TREE;
15526 if (orig_for_stmt != for_stmt)
15527 {
15528 /* Preserve this information until we gimplify the inner simd. */
15529 if (has_decl_expr
15530 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
15531 TREE_PRIVATE (t) = 1;
15532 }
15533 else if (ort == ORT_SIMD)
15534 {
15535 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15536 (splay_tree_key) decl);
15537 omp_is_private (ctx: gimplify_omp_ctxp, decl,
15538 simd: 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
15539 != 1));
15540 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
15541 {
15542 omp_notice_variable (ctx: gimplify_omp_ctxp, decl, in_code: true);
15543 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
15544 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15545 kind: OMP_CLAUSE_LASTPRIVATE);
15546 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
15547 kind: OMP_CLAUSE_LASTPRIVATE))
15548 if (OMP_CLAUSE_DECL (c3) == decl)
15549 {
15550 warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
15551 "conditional %<lastprivate%> on loop "
15552 "iterator %qD ignored", decl);
15553 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
15554 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
15555 }
15556 }
15557 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
15558 {
15559 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
15560 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
15561 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
15562 if ((has_decl_expr
15563 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
15564 || TREE_PRIVATE (t))
15565 {
15566 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
15567 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
15568 }
15569 struct gimplify_omp_ctx *outer
15570 = gimplify_omp_ctxp->outer_context;
15571 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
15572 {
15573 if (outer->region_type == ORT_WORKSHARE
15574 && outer->combined_loop)
15575 {
15576 n = splay_tree_lookup (outer->variables,
15577 (splay_tree_key)decl);
15578 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
15579 {
15580 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
15581 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
15582 }
15583 else
15584 {
15585 struct gimplify_omp_ctx *octx = outer->outer_context;
15586 if (octx
15587 && octx->region_type == ORT_COMBINED_PARALLEL
15588 && octx->outer_context
15589 && (octx->outer_context->region_type
15590 == ORT_WORKSHARE)
15591 && octx->outer_context->combined_loop)
15592 {
15593 octx = octx->outer_context;
15594 n = splay_tree_lookup (octx->variables,
15595 (splay_tree_key)decl);
15596 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
15597 {
15598 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
15599 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
15600 }
15601 }
15602 }
15603 }
15604 }
15605
15606 OMP_CLAUSE_DECL (c) = decl;
15607 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
15608 OMP_FOR_CLAUSES (for_stmt) = c;
15609 omp_add_variable (ctx: gimplify_omp_ctxp, decl, flags);
15610 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
15611 omp_lastprivate_for_combined_outer_constructs (octx: outer, decl,
15612 implicit_p: true);
15613 }
15614 else
15615 {
15616 bool lastprivate
15617 = (!has_decl_expr
15618 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
15619 if (TREE_PRIVATE (t))
15620 lastprivate = false;
15621 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
15622 {
15623 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15624 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
15625 lastprivate = false;
15626 }
15627
15628 struct gimplify_omp_ctx *outer
15629 = gimplify_omp_ctxp->outer_context;
15630 if (outer && lastprivate)
15631 omp_lastprivate_for_combined_outer_constructs (octx: outer, decl,
15632 implicit_p: true);
15633
15634 c = build_omp_clause (input_location,
15635 lastprivate ? OMP_CLAUSE_LASTPRIVATE
15636 : OMP_CLAUSE_PRIVATE);
15637 OMP_CLAUSE_DECL (c) = decl;
15638 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
15639 OMP_FOR_CLAUSES (for_stmt) = c;
15640 omp_add_variable (ctx: gimplify_omp_ctxp, decl,
15641 flags: (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
15642 | GOVD_EXPLICIT | GOVD_SEEN);
15643 c = NULL_TREE;
15644 }
15645 }
15646 else if (omp_is_private (ctx: gimplify_omp_ctxp, decl, simd: 0))
15647 {
15648 omp_notice_variable (ctx: gimplify_omp_ctxp, decl, in_code: true);
15649 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15650 (splay_tree_key) decl);
15651 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
15652 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15653 kind: OMP_CLAUSE_LASTPRIVATE);
15654 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
15655 kind: OMP_CLAUSE_LASTPRIVATE))
15656 if (OMP_CLAUSE_DECL (c3) == decl)
15657 {
15658 warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
15659 "conditional %<lastprivate%> on loop "
15660 "iterator %qD ignored", decl);
15661 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
15662 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
15663 }
15664 }
15665 else
15666 omp_add_variable (ctx: gimplify_omp_ctxp, decl, flags: GOVD_PRIVATE | GOVD_SEEN);
15667
15668 /* If DECL is not a gimple register, create a temporary variable to act
15669 as an iteration counter. This is valid, since DECL cannot be
15670 modified in the body of the loop. Similarly for any iteration vars
15671 in simd with collapse > 1 where the iterator vars must be
15672 lastprivate. And similarly for vars mentioned in allocate clauses. */
15673 if (orig_for_stmt != for_stmt)
15674 var = decl;
15675 else if (!is_gimple_reg (decl)
15676 || (ort == ORT_SIMD
15677 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
15678 || (allocate_uids && allocate_uids->contains (k: decl)))
15679 {
15680 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15681 /* Make sure omp_add_variable is not called on it prematurely.
15682 We call it ourselves a few lines later. */
15683 gimplify_omp_ctxp = NULL;
15684 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
15685 gimplify_omp_ctxp = ctx;
15686 TREE_OPERAND (t, 0) = var;
15687
15688 gimplify_seq_add_stmt (seq_p: &for_body, gs: gimple_build_assign (decl, var));
15689
15690 if (ort == ORT_SIMD
15691 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
15692 {
15693 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
15694 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
15695 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
15696 OMP_CLAUSE_DECL (c2) = var;
15697 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
15698 OMP_FOR_CLAUSES (for_stmt) = c2;
15699 omp_add_variable (ctx: gimplify_omp_ctxp, decl: var,
15700 flags: GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
15701 if (c == NULL_TREE)
15702 {
15703 c = c2;
15704 c2 = NULL_TREE;
15705 }
15706 }
15707 else
15708 omp_add_variable (ctx: gimplify_omp_ctxp, decl: var,
15709 flags: GOVD_PRIVATE | GOVD_SEEN);
15710 }
15711 else
15712 var = decl;
15713
15714 gimplify_omp_ctxp->in_for_exprs = true;
15715 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15716 {
15717 tree lb = TREE_OPERAND (t, 1);
15718 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
15719 is_gimple_val, fb_rvalue, false);
15720 ret = MIN (ret, tret);
15721 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
15722 is_gimple_val, fb_rvalue, false);
15723 }
15724 else
15725 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
15726 is_gimple_val, fb_rvalue, false);
15727 gimplify_omp_ctxp->in_for_exprs = false;
15728 ret = MIN (ret, tret);
15729 if (ret == GS_ERROR)
15730 return ret;
15731
15732 /* Handle OMP_FOR_COND. */
15733 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
15734 gcc_assert (COMPARISON_CLASS_P (t));
15735 gcc_assert (TREE_OPERAND (t, 0) == decl);
15736
15737 gimplify_omp_ctxp->in_for_exprs = true;
15738 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15739 {
15740 tree ub = TREE_OPERAND (t, 1);
15741 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
15742 is_gimple_val, fb_rvalue, false);
15743 ret = MIN (ret, tret);
15744 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
15745 is_gimple_val, fb_rvalue, false);
15746 }
15747 else
15748 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
15749 is_gimple_val, fb_rvalue, false);
15750 gimplify_omp_ctxp->in_for_exprs = false;
15751 ret = MIN (ret, tret);
15752
15753 /* Handle OMP_FOR_INCR. */
15754 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15755 switch (TREE_CODE (t))
15756 {
15757 case PREINCREMENT_EXPR:
15758 case POSTINCREMENT_EXPR:
15759 {
15760 tree decl = TREE_OPERAND (t, 0);
15761 /* c_omp_for_incr_canonicalize_ptr() should have been
15762 called to massage things appropriately. */
15763 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
15764
15765 if (orig_for_stmt != for_stmt)
15766 break;
15767 t = build_int_cst (TREE_TYPE (decl), 1);
15768 if (c)
15769 OMP_CLAUSE_LINEAR_STEP (c) = t;
15770 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
15771 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
15772 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
15773 break;
15774 }
15775
15776 case PREDECREMENT_EXPR:
15777 case POSTDECREMENT_EXPR:
15778 /* c_omp_for_incr_canonicalize_ptr() should have been
15779 called to massage things appropriately. */
15780 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
15781 if (orig_for_stmt != for_stmt)
15782 break;
15783 t = build_int_cst (TREE_TYPE (decl), -1);
15784 if (c)
15785 OMP_CLAUSE_LINEAR_STEP (c) = t;
15786 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
15787 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
15788 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
15789 break;
15790
15791 case MODIFY_EXPR:
15792 gcc_assert (TREE_OPERAND (t, 0) == decl);
15793 TREE_OPERAND (t, 0) = var;
15794
15795 t = TREE_OPERAND (t, 1);
15796 switch (TREE_CODE (t))
15797 {
15798 case PLUS_EXPR:
15799 if (TREE_OPERAND (t, 1) == decl)
15800 {
15801 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
15802 TREE_OPERAND (t, 0) = var;
15803 break;
15804 }
15805
15806 /* Fallthru. */
15807 case MINUS_EXPR:
15808 case POINTER_PLUS_EXPR:
15809 gcc_assert (TREE_OPERAND (t, 0) == decl);
15810 TREE_OPERAND (t, 0) = var;
15811 break;
15812 default:
15813 gcc_unreachable ();
15814 }
15815
15816 gimplify_omp_ctxp->in_for_exprs = true;
15817 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
15818 is_gimple_val, fb_rvalue, false);
15819 ret = MIN (ret, tret);
15820 if (c)
15821 {
15822 tree step = TREE_OPERAND (t, 1);
15823 tree stept = TREE_TYPE (decl);
15824 if (POINTER_TYPE_P (stept))
15825 stept = sizetype;
15826 step = fold_convert (stept, step);
15827 if (TREE_CODE (t) == MINUS_EXPR)
15828 step = fold_build1 (NEGATE_EXPR, stept, step);
15829 OMP_CLAUSE_LINEAR_STEP (c) = step;
15830 if (step != TREE_OPERAND (t, 1))
15831 {
15832 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
15833 &for_pre_body, NULL,
15834 is_gimple_val, fb_rvalue, false);
15835 ret = MIN (ret, tret);
15836 }
15837 }
15838 gimplify_omp_ctxp->in_for_exprs = false;
15839 break;
15840
15841 default:
15842 gcc_unreachable ();
15843 }
15844
15845 if (c2)
15846 {
15847 gcc_assert (c);
15848 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
15849 }
15850
15851 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
15852 {
15853 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
15854 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15855 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
15856 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
15857 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
15858 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
15859 && OMP_CLAUSE_DECL (c) == decl)
15860 {
15861 if (is_doacross && (collapse == 1 || i >= collapse))
15862 t = var;
15863 else
15864 {
15865 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15866 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15867 gcc_assert (TREE_OPERAND (t, 0) == var);
15868 t = TREE_OPERAND (t, 1);
15869 gcc_assert (TREE_CODE (t) == PLUS_EXPR
15870 || TREE_CODE (t) == MINUS_EXPR
15871 || TREE_CODE (t) == POINTER_PLUS_EXPR);
15872 gcc_assert (TREE_OPERAND (t, 0) == var);
15873 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
15874 is_doacross ? var : decl,
15875 TREE_OPERAND (t, 1));
15876 }
15877 gimple_seq *seq;
15878 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
15879 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
15880 else
15881 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
15882 push_gimplify_context ();
15883 gimplify_assign (decl, t, seq);
15884 gimple *bind = NULL;
15885 if (gimplify_ctxp->temps)
15886 {
15887 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
15888 *seq = NULL;
15889 gimplify_seq_add_stmt (seq_p: seq, gs: bind);
15890 }
15891 pop_gimplify_context (body: bind);
15892 }
15893 }
15894 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
15895 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
15896 {
15897 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
15898 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15899 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
15900 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
15901 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
15902 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
15903 gcc_assert (COMPARISON_CLASS_P (t));
15904 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
15905 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
15906 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
15907 }
15908 }
15909
15910 BITMAP_FREE (has_decl_expr);
15911 delete allocate_uids;
15912
15913 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
15914 || (loop_p && orig_for_stmt == for_stmt))
15915 {
15916 push_gimplify_context ();
15917 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
15918 {
15919 OMP_FOR_BODY (orig_for_stmt)
15920 = build3 (BIND_EXPR, void_type_node, NULL,
15921 OMP_FOR_BODY (orig_for_stmt), NULL);
15922 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
15923 }
15924 }
15925
15926 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
15927 seq_p: &for_body);
15928
15929 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
15930 || (loop_p && orig_for_stmt == for_stmt))
15931 {
15932 if (gimple_code (g) == GIMPLE_BIND)
15933 pop_gimplify_context (body: g);
15934 else
15935 pop_gimplify_context (NULL);
15936 }
15937
15938 if (orig_for_stmt != for_stmt)
15939 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15940 {
15941 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15942 decl = TREE_OPERAND (t, 0);
15943 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15944 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
15945 gimplify_omp_ctxp = ctx->outer_context;
15946 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
15947 gimplify_omp_ctxp = ctx;
15948 omp_add_variable (ctx: gimplify_omp_ctxp, decl: var, flags: GOVD_PRIVATE | GOVD_SEEN);
15949 TREE_OPERAND (t, 0) = var;
15950 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15951 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
15952 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
15953 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
15954 for (int j = i + 1;
15955 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
15956 {
15957 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
15958 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15959 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
15960 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
15961 {
15962 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
15963 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
15964 }
15965 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
15966 gcc_assert (COMPARISON_CLASS_P (t));
15967 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
15968 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
15969 {
15970 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
15971 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
15972 }
15973 }
15974 }
15975
15976 gimplify_adjust_omp_clauses (pre_p, body: for_body,
15977 list_p: &OMP_FOR_CLAUSES (orig_for_stmt),
15978 TREE_CODE (orig_for_stmt));
15979
15980 int kind;
15981 switch (TREE_CODE (orig_for_stmt))
15982 {
15983 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
15984 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
15985 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
15986 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
15987 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
15988 default:
15989 gcc_unreachable ();
15990 }
15991 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
15992 {
15993 gimplify_seq_add_seq (dst_p: pre_p, src: for_pre_body);
15994 for_pre_body = NULL;
15995 }
15996 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
15997 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
15998 for_pre_body);
15999 if (orig_for_stmt != for_stmt)
16000 gimple_omp_for_set_combined_p (g: gfor, combined_p: true);
16001 if (gimplify_omp_ctxp
16002 && (gimplify_omp_ctxp->combined_loop
16003 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
16004 && gimplify_omp_ctxp->outer_context
16005 && gimplify_omp_ctxp->outer_context->combined_loop)))
16006 {
16007 gimple_omp_for_set_combined_into_p (g: gfor, combined_p: true);
16008 if (gimplify_omp_ctxp->combined_loop)
16009 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
16010 else
16011 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
16012 }
16013
16014 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
16015 {
16016 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
16017 gimple_omp_for_set_index (gs: gfor, i, TREE_OPERAND (t, 0));
16018 gimple_omp_for_set_initial (gs: gfor, i, TREE_OPERAND (t, 1));
16019 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
16020 gimple_omp_for_set_cond (gs: gfor, i, TREE_CODE (t));
16021 gimple_omp_for_set_final (gs: gfor, i, TREE_OPERAND (t, 1));
16022 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
16023 gimple_omp_for_set_incr (gs: gfor, i, TREE_OPERAND (t, 1));
16024 }
16025
16026 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
16027 constructs with GIMPLE_OMP_TASK sandwiched in between them.
16028 The outer taskloop stands for computing the number of iterations,
16029 counts for collapsed loops and holding taskloop specific clauses.
16030 The task construct stands for the effect of data sharing on the
16031 explicit task it creates and the inner taskloop stands for expansion
16032 of the static loop inside of the explicit task construct. */
16033 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
16034 {
16035 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gs: gfor);
16036 tree task_clauses = NULL_TREE;
16037 tree c = *gfor_clauses_ptr;
16038 tree *gtask_clauses_ptr = &task_clauses;
16039 tree outer_for_clauses = NULL_TREE;
16040 tree *gforo_clauses_ptr = &outer_for_clauses;
16041 bitmap lastprivate_uids = NULL;
16042 if (omp_find_clause (clauses: c, kind: OMP_CLAUSE_ALLOCATE))
16043 {
16044 c = omp_find_clause (clauses: c, kind: OMP_CLAUSE_LASTPRIVATE);
16045 if (c)
16046 {
16047 lastprivate_uids = BITMAP_ALLOC (NULL);
16048 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
16049 kind: OMP_CLAUSE_LASTPRIVATE))
16050 bitmap_set_bit (lastprivate_uids,
16051 DECL_UID (OMP_CLAUSE_DECL (c)));
16052 }
16053 c = *gfor_clauses_ptr;
16054 }
16055 for (; c; c = OMP_CLAUSE_CHAIN (c))
16056 switch (OMP_CLAUSE_CODE (c))
16057 {
16058 /* These clauses are allowed on task, move them there. */
16059 case OMP_CLAUSE_SHARED:
16060 case OMP_CLAUSE_FIRSTPRIVATE:
16061 case OMP_CLAUSE_DEFAULT:
16062 case OMP_CLAUSE_IF:
16063 case OMP_CLAUSE_UNTIED:
16064 case OMP_CLAUSE_FINAL:
16065 case OMP_CLAUSE_MERGEABLE:
16066 case OMP_CLAUSE_PRIORITY:
16067 case OMP_CLAUSE_REDUCTION:
16068 case OMP_CLAUSE_IN_REDUCTION:
16069 *gtask_clauses_ptr = c;
16070 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16071 break;
16072 case OMP_CLAUSE_PRIVATE:
16073 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
16074 {
16075 /* We want private on outer for and firstprivate
16076 on task. */
16077 *gtask_clauses_ptr
16078 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16079 OMP_CLAUSE_FIRSTPRIVATE);
16080 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
16081 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
16082 openacc);
16083 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16084 *gforo_clauses_ptr = c;
16085 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16086 }
16087 else
16088 {
16089 *gtask_clauses_ptr = c;
16090 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16091 }
16092 break;
16093 /* These clauses go into outer taskloop clauses. */
16094 case OMP_CLAUSE_GRAINSIZE:
16095 case OMP_CLAUSE_NUM_TASKS:
16096 case OMP_CLAUSE_NOGROUP:
16097 *gforo_clauses_ptr = c;
16098 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16099 break;
16100 /* Collapse clause we duplicate on both taskloops. */
16101 case OMP_CLAUSE_COLLAPSE:
16102 *gfor_clauses_ptr = c;
16103 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16104 *gforo_clauses_ptr = copy_node (c);
16105 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
16106 break;
16107 /* For lastprivate, keep the clause on inner taskloop, and add
16108 a shared clause on task. If the same decl is also firstprivate,
16109 add also firstprivate clause on the inner taskloop. */
16110 case OMP_CLAUSE_LASTPRIVATE:
16111 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
16112 {
16113 /* For taskloop C++ lastprivate IVs, we want:
16114 1) private on outer taskloop
16115 2) firstprivate and shared on task
16116 3) lastprivate on inner taskloop */
16117 *gtask_clauses_ptr
16118 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16119 OMP_CLAUSE_FIRSTPRIVATE);
16120 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
16121 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
16122 openacc);
16123 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16124 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
16125 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16126 OMP_CLAUSE_PRIVATE);
16127 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
16128 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
16129 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
16130 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
16131 }
16132 *gfor_clauses_ptr = c;
16133 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16134 *gtask_clauses_ptr
16135 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
16136 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
16137 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
16138 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
16139 gtask_clauses_ptr
16140 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16141 break;
16142 /* Allocate clause we duplicate on task and inner taskloop
16143 if the decl is lastprivate, otherwise just put on task. */
16144 case OMP_CLAUSE_ALLOCATE:
16145 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
16146 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
16147 {
16148 /* Additionally, put firstprivate clause on task
16149 for the allocator if it is not constant. */
16150 *gtask_clauses_ptr
16151 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16152 OMP_CLAUSE_FIRSTPRIVATE);
16153 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
16154 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
16155 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16156 }
16157 if (lastprivate_uids
16158 && bitmap_bit_p (lastprivate_uids,
16159 DECL_UID (OMP_CLAUSE_DECL (c))))
16160 {
16161 *gfor_clauses_ptr = c;
16162 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16163 *gtask_clauses_ptr = copy_node (c);
16164 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16165 }
16166 else
16167 {
16168 *gtask_clauses_ptr = c;
16169 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16170 }
16171 break;
16172 default:
16173 gcc_unreachable ();
16174 }
16175 *gfor_clauses_ptr = NULL_TREE;
16176 *gtask_clauses_ptr = NULL_TREE;
16177 *gforo_clauses_ptr = NULL_TREE;
16178 BITMAP_FREE (lastprivate_uids);
16179 gimple_set_location (g: gfor, location: input_location);
16180 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
16181 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
16182 NULL_TREE, NULL_TREE, NULL_TREE);
16183 gimple_set_location (g, location: input_location);
16184 gimple_omp_task_set_taskloop_p (g, taskloop_p: true);
16185 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
16186 gomp_for *gforo
16187 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
16188 gimple_omp_for_collapse (gs: gfor),
16189 gimple_omp_for_pre_body (gs: gfor));
16190 gimple_omp_for_set_pre_body (gs: gfor, NULL);
16191 gimple_omp_for_set_combined_p (g: gforo, combined_p: true);
16192 gimple_omp_for_set_combined_into_p (g: gfor, combined_p: true);
16193 for (i = 0; i < (int) gimple_omp_for_collapse (gs: gfor); i++)
16194 {
16195 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
16196 tree v = create_tmp_var (type);
16197 gimple_omp_for_set_index (gs: gforo, i, index: v);
16198 t = unshare_expr (expr: gimple_omp_for_initial (gs: gfor, i));
16199 gimple_omp_for_set_initial (gs: gforo, i, initial: t);
16200 gimple_omp_for_set_cond (gs: gforo, i,
16201 cond: gimple_omp_for_cond (gs: gfor, i));
16202 t = unshare_expr (expr: gimple_omp_for_final (gs: gfor, i));
16203 gimple_omp_for_set_final (gs: gforo, i, final: t);
16204 t = unshare_expr (expr: gimple_omp_for_incr (gs: gfor, i));
16205 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
16206 TREE_OPERAND (t, 0) = v;
16207 gimple_omp_for_set_incr (gs: gforo, i, incr: t);
16208 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
16209 OMP_CLAUSE_DECL (t) = v;
16210 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gs: gforo);
16211 gimple_omp_for_set_clauses (gs: gforo, clauses: t);
16212 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
16213 {
16214 tree *p1 = NULL, *p2 = NULL;
16215 t = gimple_omp_for_initial (gs: gforo, i);
16216 if (TREE_CODE (t) == TREE_VEC)
16217 p1 = &TREE_VEC_ELT (t, 0);
16218 t = gimple_omp_for_final (gs: gforo, i);
16219 if (TREE_CODE (t) == TREE_VEC)
16220 {
16221 if (p1)
16222 p2 = &TREE_VEC_ELT (t, 0);
16223 else
16224 p1 = &TREE_VEC_ELT (t, 0);
16225 }
16226 if (p1)
16227 {
16228 int j;
16229 for (j = 0; j < i; j++)
16230 if (*p1 == gimple_omp_for_index (gs: gfor, i: j))
16231 {
16232 *p1 = gimple_omp_for_index (gs: gforo, i: j);
16233 if (p2)
16234 *p2 = *p1;
16235 break;
16236 }
16237 gcc_assert (j < i);
16238 }
16239 }
16240 }
16241 gimplify_seq_add_stmt (seq_p: pre_p, gs: gforo);
16242 }
16243 else
16244 gimplify_seq_add_stmt (seq_p: pre_p, gs: gfor);
16245
16246 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
16247 {
16248 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
16249 unsigned lastprivate_conditional = 0;
16250 while (ctx
16251 && (ctx->region_type == ORT_TARGET_DATA
16252 || ctx->region_type == ORT_TASKGROUP))
16253 ctx = ctx->outer_context;
16254 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
16255 for (tree c = gimple_omp_for_clauses (gs: gfor);
16256 c; c = OMP_CLAUSE_CHAIN (c))
16257 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
16258 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
16259 ++lastprivate_conditional;
16260 if (lastprivate_conditional)
16261 {
16262 struct omp_for_data fd;
16263 omp_extract_for_data (for_stmt: gfor, fd: &fd, NULL);
16264 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
16265 lastprivate_conditional);
16266 tree var = create_tmp_var_raw (type);
16267 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
16268 OMP_CLAUSE_DECL (c) = var;
16269 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gs: gfor);
16270 gimple_omp_for_set_clauses (gs: gfor, clauses: c);
16271 omp_add_variable (ctx, decl: var, flags: GOVD_CONDTEMP | GOVD_SEEN);
16272 }
16273 }
16274 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
16275 {
16276 unsigned lastprivate_conditional = 0;
16277 for (tree c = gimple_omp_for_clauses (gs: gfor); c; c = OMP_CLAUSE_CHAIN (c))
16278 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
16279 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
16280 ++lastprivate_conditional;
16281 if (lastprivate_conditional)
16282 {
16283 struct omp_for_data fd;
16284 omp_extract_for_data (for_stmt: gfor, fd: &fd, NULL);
16285 tree type = unsigned_type_for (fd.iter_type);
16286 while (lastprivate_conditional--)
16287 {
16288 tree c = build_omp_clause (UNKNOWN_LOCATION,
16289 OMP_CLAUSE__CONDTEMP_);
16290 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
16291 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gs: gfor);
16292 gimple_omp_for_set_clauses (gs: gfor, clauses: c);
16293 }
16294 }
16295 }
16296
16297 if (ret != GS_ALL_DONE)
16298 return GS_ERROR;
16299 *expr_p = NULL_TREE;
16300 return GS_ALL_DONE;
16301}
16302
16303/* Helper for gimplify_omp_loop, called through walk_tree. */
16304
16305static tree
16306note_no_context_vars (tree *tp, int *, void *data)
16307{
16308 if (VAR_P (*tp)
16309 && DECL_CONTEXT (*tp) == NULL_TREE
16310 && !is_global_var (t: *tp))
16311 {
16312 vec<tree> *d = (vec<tree> *) data;
16313 d->safe_push (obj: *tp);
16314 DECL_CONTEXT (*tp) = current_function_decl;
16315 }
16316 return NULL_TREE;
16317}
16318
16319/* Gimplify the gross structure of an OMP_LOOP statement. */
16320
16321static enum gimplify_status
16322gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
16323{
16324 tree for_stmt = *expr_p;
16325 tree clauses = OMP_FOR_CLAUSES (for_stmt);
16326 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
16327 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
16328 int i;
16329
16330 /* If order is not present, the behavior is as if order(concurrent)
16331 appeared. */
16332 tree order = omp_find_clause (clauses, kind: OMP_CLAUSE_ORDER);
16333 if (order == NULL_TREE)
16334 {
16335 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
16336 OMP_CLAUSE_CHAIN (order) = clauses;
16337 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
16338 }
16339
16340 tree bind = omp_find_clause (clauses, kind: OMP_CLAUSE_BIND);
16341 if (bind == NULL_TREE)
16342 {
16343 if (!flag_openmp) /* flag_openmp_simd */
16344 ;
16345 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
16346 kind = OMP_CLAUSE_BIND_TEAMS;
16347 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
16348 kind = OMP_CLAUSE_BIND_PARALLEL;
16349 else
16350 {
16351 for (; octx; octx = octx->outer_context)
16352 {
16353 if ((octx->region_type & ORT_ACC) != 0
16354 || octx->region_type == ORT_NONE
16355 || octx->region_type == ORT_IMPLICIT_TARGET)
16356 continue;
16357 break;
16358 }
16359 if (octx == NULL && !in_omp_construct)
16360 error_at (EXPR_LOCATION (for_stmt),
16361 "%<bind%> clause not specified on a %<loop%> "
16362 "construct not nested inside another OpenMP construct");
16363 }
16364 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
16365 OMP_CLAUSE_CHAIN (bind) = clauses;
16366 OMP_CLAUSE_BIND_KIND (bind) = kind;
16367 OMP_FOR_CLAUSES (for_stmt) = bind;
16368 }
16369 else
16370 switch (OMP_CLAUSE_BIND_KIND (bind))
16371 {
16372 case OMP_CLAUSE_BIND_THREAD:
16373 break;
16374 case OMP_CLAUSE_BIND_PARALLEL:
16375 if (!flag_openmp) /* flag_openmp_simd */
16376 {
16377 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16378 break;
16379 }
16380 for (; octx; octx = octx->outer_context)
16381 if (octx->region_type == ORT_SIMD
16382 && omp_find_clause (clauses: octx->clauses, kind: OMP_CLAUSE_BIND) == NULL_TREE)
16383 {
16384 error_at (EXPR_LOCATION (for_stmt),
16385 "%<bind(parallel)%> on a %<loop%> construct nested "
16386 "inside %<simd%> construct");
16387 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16388 break;
16389 }
16390 kind = OMP_CLAUSE_BIND_PARALLEL;
16391 break;
16392 case OMP_CLAUSE_BIND_TEAMS:
16393 if (!flag_openmp) /* flag_openmp_simd */
16394 {
16395 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16396 break;
16397 }
16398 if ((octx
16399 && octx->region_type != ORT_IMPLICIT_TARGET
16400 && octx->region_type != ORT_NONE
16401 && (octx->region_type & ORT_TEAMS) == 0)
16402 || in_omp_construct)
16403 {
16404 error_at (EXPR_LOCATION (for_stmt),
16405 "%<bind(teams)%> on a %<loop%> region not strictly "
16406 "nested inside of a %<teams%> region");
16407 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16408 break;
16409 }
16410 kind = OMP_CLAUSE_BIND_TEAMS;
16411 break;
16412 default:
16413 gcc_unreachable ();
16414 }
16415
16416 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
16417 switch (OMP_CLAUSE_CODE (*pc))
16418 {
16419 case OMP_CLAUSE_REDUCTION:
16420 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
16421 {
16422 error_at (OMP_CLAUSE_LOCATION (*pc),
16423 "%<inscan%> %<reduction%> clause on "
16424 "%qs construct", "loop");
16425 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
16426 }
16427 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
16428 {
16429 error_at (OMP_CLAUSE_LOCATION (*pc),
16430 "invalid %<task%> reduction modifier on construct "
16431 "other than %<parallel%>, %qs or %<sections%>",
16432 lang_GNU_Fortran () ? "do" : "for");
16433 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
16434 }
16435 pc = &OMP_CLAUSE_CHAIN (*pc);
16436 break;
16437 case OMP_CLAUSE_LASTPRIVATE:
16438 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
16439 {
16440 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
16441 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
16442 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
16443 break;
16444 if (OMP_FOR_ORIG_DECLS (for_stmt)
16445 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
16446 i)) == TREE_LIST
16447 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
16448 i)))
16449 {
16450 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
16451 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
16452 break;
16453 }
16454 }
16455 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
16456 {
16457 error_at (OMP_CLAUSE_LOCATION (*pc),
16458 "%<lastprivate%> clause on a %<loop%> construct refers "
16459 "to a variable %qD which is not the loop iterator",
16460 OMP_CLAUSE_DECL (*pc));
16461 *pc = OMP_CLAUSE_CHAIN (*pc);
16462 break;
16463 }
16464 pc = &OMP_CLAUSE_CHAIN (*pc);
16465 break;
16466 default:
16467 pc = &OMP_CLAUSE_CHAIN (*pc);
16468 break;
16469 }
16470
16471 TREE_SET_CODE (for_stmt, OMP_SIMD);
16472
16473 int last;
16474 switch (kind)
16475 {
16476 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
16477 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
16478 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
16479 }
16480 for (int pass = 1; pass <= last; pass++)
16481 {
16482 if (pass == 2)
16483 {
16484 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
16485 make_node (BLOCK));
16486 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
16487 *expr_p = make_node (OMP_PARALLEL);
16488 TREE_TYPE (*expr_p) = void_type_node;
16489 OMP_PARALLEL_BODY (*expr_p) = bind;
16490 OMP_PARALLEL_COMBINED (*expr_p) = 1;
16491 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
16492 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
16493 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
16494 if (OMP_FOR_ORIG_DECLS (for_stmt)
16495 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
16496 == TREE_LIST))
16497 {
16498 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
16499 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
16500 {
16501 *pc = build_omp_clause (UNKNOWN_LOCATION,
16502 OMP_CLAUSE_FIRSTPRIVATE);
16503 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
16504 pc = &OMP_CLAUSE_CHAIN (*pc);
16505 }
16506 }
16507 }
16508 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
16509 tree *pc = &OMP_FOR_CLAUSES (t);
16510 TREE_TYPE (t) = void_type_node;
16511 OMP_FOR_BODY (t) = *expr_p;
16512 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
16513 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
16514 switch (OMP_CLAUSE_CODE (c))
16515 {
16516 case OMP_CLAUSE_BIND:
16517 case OMP_CLAUSE_ORDER:
16518 case OMP_CLAUSE_COLLAPSE:
16519 *pc = copy_node (c);
16520 pc = &OMP_CLAUSE_CHAIN (*pc);
16521 break;
16522 case OMP_CLAUSE_PRIVATE:
16523 case OMP_CLAUSE_FIRSTPRIVATE:
16524 /* Only needed on innermost. */
16525 break;
16526 case OMP_CLAUSE_LASTPRIVATE:
16527 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
16528 {
16529 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16530 OMP_CLAUSE_FIRSTPRIVATE);
16531 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
16532 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
16533 pc = &OMP_CLAUSE_CHAIN (*pc);
16534 }
16535 *pc = copy_node (c);
16536 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
16537 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
16538 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
16539 {
16540 if (pass != last)
16541 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
16542 else
16543 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
16544 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
16545 }
16546 pc = &OMP_CLAUSE_CHAIN (*pc);
16547 break;
16548 case OMP_CLAUSE_REDUCTION:
16549 *pc = copy_node (c);
16550 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
16551 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
16552 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
16553 {
16554 auto_vec<tree> no_context_vars;
16555 int walk_subtrees = 0;
16556 note_no_context_vars (tp: &OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
16557 &walk_subtrees, data: &no_context_vars);
16558 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
16559 note_no_context_vars (tp: &p, &walk_subtrees, data: &no_context_vars);
16560 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
16561 note_no_context_vars,
16562 &no_context_vars);
16563 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
16564 note_no_context_vars,
16565 &no_context_vars);
16566
16567 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
16568 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
16569 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
16570 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
16571 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
16572
16573 hash_map<tree, tree> decl_map;
16574 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
16575 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
16576 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
16577 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
16578 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
16579 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
16580
16581 copy_body_data id;
16582 memset (s: &id, c: 0, n: sizeof (id));
16583 id.src_fn = current_function_decl;
16584 id.dst_fn = current_function_decl;
16585 id.src_cfun = cfun;
16586 id.decl_map = &decl_map;
16587 id.copy_decl = copy_decl_no_change;
16588 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
16589 id.transform_new_cfg = true;
16590 id.transform_return_to_modify = false;
16591 id.eh_lp_nr = 0;
16592 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
16593 &id, NULL);
16594 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
16595 &id, NULL);
16596
16597 for (tree d : no_context_vars)
16598 {
16599 DECL_CONTEXT (d) = NULL_TREE;
16600 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
16601 }
16602 }
16603 else
16604 {
16605 OMP_CLAUSE_REDUCTION_INIT (*pc)
16606 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
16607 OMP_CLAUSE_REDUCTION_MERGE (*pc)
16608 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
16609 }
16610 pc = &OMP_CLAUSE_CHAIN (*pc);
16611 break;
16612 default:
16613 gcc_unreachable ();
16614 }
16615 *pc = NULL_TREE;
16616 *expr_p = t;
16617 }
16618 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
16619}
16620
16621
16622/* Helper function of optimize_target_teams, find OMP_TEAMS inside
16623 of OMP_TARGET's body. */
16624
16625static tree
16626find_omp_teams (tree *tp, int *walk_subtrees, void *)
16627{
16628 *walk_subtrees = 0;
16629 switch (TREE_CODE (*tp))
16630 {
16631 case OMP_TEAMS:
16632 return *tp;
16633 case BIND_EXPR:
16634 case STATEMENT_LIST:
16635 *walk_subtrees = 1;
16636 break;
16637 default:
16638 break;
16639 }
16640 return NULL_TREE;
16641}
16642
16643/* Helper function of optimize_target_teams, determine if the expression
16644 can be computed safely before the target construct on the host. */
16645
16646static tree
16647computable_teams_clause (tree *tp, int *walk_subtrees, void *)
16648{
16649 splay_tree_node n;
16650
16651 if (TYPE_P (*tp))
16652 {
16653 *walk_subtrees = 0;
16654 return NULL_TREE;
16655 }
16656 switch (TREE_CODE (*tp))
16657 {
16658 case VAR_DECL:
16659 case PARM_DECL:
16660 case RESULT_DECL:
16661 *walk_subtrees = 0;
16662 if (error_operand_p (t: *tp)
16663 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
16664 || DECL_HAS_VALUE_EXPR_P (*tp)
16665 || DECL_THREAD_LOCAL_P (*tp)
16666 || TREE_SIDE_EFFECTS (*tp)
16667 || TREE_THIS_VOLATILE (*tp))
16668 return *tp;
16669 if (is_global_var (t: *tp)
16670 && (lookup_attribute (attr_name: "omp declare target", DECL_ATTRIBUTES (*tp))
16671 || lookup_attribute (attr_name: "omp declare target link",
16672 DECL_ATTRIBUTES (*tp))))
16673 return *tp;
16674 if (VAR_P (*tp)
16675 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
16676 && !is_global_var (t: *tp)
16677 && decl_function_context (*tp) == current_function_decl)
16678 return *tp;
16679 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
16680 (splay_tree_key) *tp);
16681 if (n == NULL)
16682 {
16683 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
16684 return NULL_TREE;
16685 return *tp;
16686 }
16687 else if (n->value & GOVD_LOCAL)
16688 return *tp;
16689 else if (n->value & GOVD_FIRSTPRIVATE)
16690 return NULL_TREE;
16691 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
16692 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
16693 return NULL_TREE;
16694 return *tp;
16695 case INTEGER_CST:
16696 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
16697 return *tp;
16698 return NULL_TREE;
16699 case TARGET_EXPR:
16700 if (TARGET_EXPR_INITIAL (*tp)
16701 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
16702 return *tp;
16703 return computable_teams_clause (tp: &TARGET_EXPR_SLOT (*tp),
16704 walk_subtrees, NULL);
16705 /* Allow some reasonable subset of integral arithmetics. */
16706 case PLUS_EXPR:
16707 case MINUS_EXPR:
16708 case MULT_EXPR:
16709 case TRUNC_DIV_EXPR:
16710 case CEIL_DIV_EXPR:
16711 case FLOOR_DIV_EXPR:
16712 case ROUND_DIV_EXPR:
16713 case TRUNC_MOD_EXPR:
16714 case CEIL_MOD_EXPR:
16715 case FLOOR_MOD_EXPR:
16716 case ROUND_MOD_EXPR:
16717 case RDIV_EXPR:
16718 case EXACT_DIV_EXPR:
16719 case MIN_EXPR:
16720 case MAX_EXPR:
16721 case LSHIFT_EXPR:
16722 case RSHIFT_EXPR:
16723 case BIT_IOR_EXPR:
16724 case BIT_XOR_EXPR:
16725 case BIT_AND_EXPR:
16726 case NEGATE_EXPR:
16727 case ABS_EXPR:
16728 case BIT_NOT_EXPR:
16729 case NON_LVALUE_EXPR:
16730 CASE_CONVERT:
16731 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
16732 return *tp;
16733 return NULL_TREE;
16734 /* And disallow anything else, except for comparisons. */
16735 default:
16736 if (COMPARISON_CLASS_P (*tp))
16737 return NULL_TREE;
16738 return *tp;
16739 }
16740}
16741
16742/* Try to determine if the num_teams and/or thread_limit expressions
16743 can have their values determined already before entering the
16744 target construct.
16745 INTEGER_CSTs trivially are,
16746 integral decls that are firstprivate (explicitly or implicitly)
16747 or explicitly map(always, to:) or map(always, tofrom:) on the target
16748 region too, and expressions involving simple arithmetics on those
16749 too, function calls are not ok, dereferencing something neither etc.
16750 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
16751 EXPR based on what we find:
16752 0 stands for clause not specified at all, use implementation default
16753 -1 stands for value that can't be determined easily before entering
16754 the target construct.
16755 -2 means that no explicit teams construct was specified
16756 If teams construct is not present at all, use 1 for num_teams
16757 and 0 for thread_limit (only one team is involved, and the thread
16758 limit is implementation defined. */
16759
16760static void
16761optimize_target_teams (tree target, gimple_seq *pre_p)
16762{
16763 tree body = OMP_BODY (target);
16764 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
16765 tree num_teams_lower = NULL_TREE;
16766 tree num_teams_upper = integer_zero_node;
16767 tree thread_limit = integer_zero_node;
16768 location_t num_teams_loc = EXPR_LOCATION (target);
16769 location_t thread_limit_loc = EXPR_LOCATION (target);
16770 tree c, *p, expr;
16771 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
16772
16773 if (teams == NULL_TREE)
16774 num_teams_upper = build_int_cst (integer_type_node, -2);
16775 else
16776 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
16777 {
16778 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
16779 {
16780 p = &num_teams_upper;
16781 num_teams_loc = OMP_CLAUSE_LOCATION (c);
16782 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
16783 {
16784 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
16785 if (TREE_CODE (expr) == INTEGER_CST)
16786 num_teams_lower = expr;
16787 else if (walk_tree (&expr, computable_teams_clause,
16788 NULL, NULL))
16789 num_teams_lower = integer_minus_one_node;
16790 else
16791 {
16792 num_teams_lower = expr;
16793 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
16794 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
16795 is_gimple_val, fb_rvalue, false)
16796 == GS_ERROR)
16797 {
16798 gimplify_omp_ctxp = target_ctx;
16799 num_teams_lower = integer_minus_one_node;
16800 }
16801 else
16802 {
16803 gimplify_omp_ctxp = target_ctx;
16804 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
16805 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
16806 = num_teams_lower;
16807 }
16808 }
16809 }
16810 }
16811 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
16812 {
16813 p = &thread_limit;
16814 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
16815 }
16816 else
16817 continue;
16818 expr = OMP_CLAUSE_OPERAND (c, 0);
16819 if (TREE_CODE (expr) == INTEGER_CST)
16820 {
16821 *p = expr;
16822 continue;
16823 }
16824 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
16825 {
16826 *p = integer_minus_one_node;
16827 continue;
16828 }
16829 *p = expr;
16830 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
16831 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
16832 == GS_ERROR)
16833 {
16834 gimplify_omp_ctxp = target_ctx;
16835 *p = integer_minus_one_node;
16836 continue;
16837 }
16838 gimplify_omp_ctxp = target_ctx;
16839 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
16840 OMP_CLAUSE_OPERAND (c, 0) = *p;
16841 }
16842 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), kind: OMP_CLAUSE_THREAD_LIMIT))
16843 {
16844 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
16845 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
16846 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
16847 OMP_TARGET_CLAUSES (target) = c;
16848 }
16849 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
16850 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
16851 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
16852 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
16853 OMP_TARGET_CLAUSES (target) = c;
16854}
16855
16856/* Gimplify the gross structure of several OMP constructs. */
16857
16858static void
16859gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
16860{
16861 tree expr = *expr_p;
16862 gimple *stmt;
16863 gimple_seq body = NULL;
16864 enum omp_region_type ort;
16865
16866 switch (TREE_CODE (expr))
16867 {
16868 case OMP_SECTIONS:
16869 case OMP_SINGLE:
16870 ort = ORT_WORKSHARE;
16871 break;
16872 case OMP_SCOPE:
16873 ort = ORT_TASKGROUP;
16874 break;
16875 case OMP_TARGET:
16876 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
16877 break;
16878 case OACC_KERNELS:
16879 ort = ORT_ACC_KERNELS;
16880 break;
16881 case OACC_PARALLEL:
16882 ort = ORT_ACC_PARALLEL;
16883 break;
16884 case OACC_SERIAL:
16885 ort = ORT_ACC_SERIAL;
16886 break;
16887 case OACC_DATA:
16888 ort = ORT_ACC_DATA;
16889 break;
16890 case OMP_TARGET_DATA:
16891 ort = ORT_TARGET_DATA;
16892 break;
16893 case OMP_TEAMS:
16894 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
16895 if (gimplify_omp_ctxp == NULL
16896 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
16897 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
16898 break;
16899 case OACC_HOST_DATA:
16900 ort = ORT_ACC_HOST_DATA;
16901 break;
16902 default:
16903 gcc_unreachable ();
16904 }
16905
16906 bool save_in_omp_construct = in_omp_construct;
16907 if ((ort & ORT_ACC) == 0)
16908 in_omp_construct = false;
16909 gimplify_scan_omp_clauses (list_p: &OMP_CLAUSES (expr), pre_p, region_type: ort,
16910 TREE_CODE (expr));
16911 if (TREE_CODE (expr) == OMP_TARGET)
16912 optimize_target_teams (target: expr, pre_p);
16913 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
16914 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
16915 {
16916 push_gimplify_context ();
16917 gimple *g = gimplify_and_return_first (OMP_BODY (expr), seq_p: &body);
16918 if (gimple_code (g) == GIMPLE_BIND)
16919 pop_gimplify_context (body: g);
16920 else
16921 pop_gimplify_context (NULL);
16922 if ((ort & ORT_TARGET_DATA) != 0)
16923 {
16924 enum built_in_function end_ix;
16925 switch (TREE_CODE (expr))
16926 {
16927 case OACC_DATA:
16928 case OACC_HOST_DATA:
16929 end_ix = BUILT_IN_GOACC_DATA_END;
16930 break;
16931 case OMP_TARGET_DATA:
16932 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
16933 break;
16934 default:
16935 gcc_unreachable ();
16936 }
16937 tree fn = builtin_decl_explicit (fncode: end_ix);
16938 g = gimple_build_call (fn, 0);
16939 gimple_seq cleanup = NULL;
16940 gimple_seq_add_stmt (&cleanup, g);
16941 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
16942 body = NULL;
16943 gimple_seq_add_stmt (&body, g);
16944 }
16945 }
16946 else
16947 gimplify_and_add (OMP_BODY (expr), seq_p: &body);
16948 gimplify_adjust_omp_clauses (pre_p, body, list_p: &OMP_CLAUSES (expr),
16949 TREE_CODE (expr));
16950 in_omp_construct = save_in_omp_construct;
16951
16952 switch (TREE_CODE (expr))
16953 {
16954 case OACC_DATA:
16955 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
16956 OMP_CLAUSES (expr));
16957 break;
16958 case OACC_HOST_DATA:
16959 if (omp_find_clause (OMP_CLAUSES (expr), kind: OMP_CLAUSE_IF_PRESENT))
16960 {
16961 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16962 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
16963 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
16964 }
16965
16966 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
16967 OMP_CLAUSES (expr));
16968 break;
16969 case OACC_KERNELS:
16970 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
16971 OMP_CLAUSES (expr));
16972 break;
16973 case OACC_PARALLEL:
16974 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
16975 OMP_CLAUSES (expr));
16976 break;
16977 case OACC_SERIAL:
16978 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
16979 OMP_CLAUSES (expr));
16980 break;
16981 case OMP_SECTIONS:
16982 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
16983 break;
16984 case OMP_SINGLE:
16985 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
16986 break;
16987 case OMP_SCOPE:
16988 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
16989 break;
16990 case OMP_TARGET:
16991 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
16992 OMP_CLAUSES (expr));
16993 break;
16994 case OMP_TARGET_DATA:
16995 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
16996 to be evaluated before the use_device_{ptr,addr} clauses if they
16997 refer to the same variables. */
16998 {
16999 tree use_device_clauses;
17000 tree *pc, *uc = &use_device_clauses;
17001 for (pc = &OMP_CLAUSES (expr); *pc; )
17002 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
17003 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
17004 {
17005 *uc = *pc;
17006 *pc = OMP_CLAUSE_CHAIN (*pc);
17007 uc = &OMP_CLAUSE_CHAIN (*uc);
17008 }
17009 else
17010 pc = &OMP_CLAUSE_CHAIN (*pc);
17011 *uc = NULL_TREE;
17012 *pc = use_device_clauses;
17013 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
17014 OMP_CLAUSES (expr));
17015 }
17016 break;
17017 case OMP_TEAMS:
17018 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
17019 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
17020 gimple_omp_teams_set_host (omp_teams_stmt: as_a <gomp_teams *> (p: stmt), value: true);
17021 break;
17022 default:
17023 gcc_unreachable ();
17024 }
17025
17026 gimplify_seq_add_stmt (seq_p: pre_p, gs: stmt);
17027 *expr_p = NULL_TREE;
17028}
17029
17030/* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
17031 target update constructs. */
17032
17033static void
17034gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
17035{
17036 tree expr = *expr_p;
17037 int kind;
17038 gomp_target *stmt;
17039 enum omp_region_type ort = ORT_WORKSHARE;
17040
17041 switch (TREE_CODE (expr))
17042 {
17043 case OACC_ENTER_DATA:
17044 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
17045 ort = ORT_ACC;
17046 break;
17047 case OACC_EXIT_DATA:
17048 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
17049 ort = ORT_ACC;
17050 break;
17051 case OACC_UPDATE:
17052 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
17053 ort = ORT_ACC;
17054 break;
17055 case OMP_TARGET_UPDATE:
17056 kind = GF_OMP_TARGET_KIND_UPDATE;
17057 break;
17058 case OMP_TARGET_ENTER_DATA:
17059 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
17060 break;
17061 case OMP_TARGET_EXIT_DATA:
17062 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
17063 break;
17064 default:
17065 gcc_unreachable ();
17066 }
17067 gimplify_scan_omp_clauses (list_p: &OMP_STANDALONE_CLAUSES (expr), pre_p,
17068 region_type: ort, TREE_CODE (expr));
17069 gimplify_adjust_omp_clauses (pre_p, NULL, list_p: &OMP_STANDALONE_CLAUSES (expr),
17070 TREE_CODE (expr));
17071 if (TREE_CODE (expr) == OACC_UPDATE
17072 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
17073 kind: OMP_CLAUSE_IF_PRESENT))
17074 {
17075 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
17076 clause. */
17077 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
17078 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
17079 switch (OMP_CLAUSE_MAP_KIND (c))
17080 {
17081 case GOMP_MAP_FORCE_TO:
17082 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
17083 break;
17084 case GOMP_MAP_FORCE_FROM:
17085 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
17086 break;
17087 default:
17088 break;
17089 }
17090 }
17091 else if (TREE_CODE (expr) == OACC_EXIT_DATA
17092 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
17093 kind: OMP_CLAUSE_FINALIZE))
17094 {
17095 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
17096 semantics. */
17097 bool have_clause = false;
17098 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
17099 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
17100 switch (OMP_CLAUSE_MAP_KIND (c))
17101 {
17102 case GOMP_MAP_FROM:
17103 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
17104 have_clause = true;
17105 break;
17106 case GOMP_MAP_RELEASE:
17107 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
17108 have_clause = true;
17109 break;
17110 case GOMP_MAP_TO_PSET:
17111 /* Fortran arrays with descriptors must map that descriptor when
17112 doing standalone "attach" operations (in OpenACC). In that
17113 case GOMP_MAP_TO_PSET appears by itself with no preceding
17114 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
17115 break;
17116 case GOMP_MAP_POINTER:
17117 /* TODO PR92929: we may see these here, but they'll always follow
17118 one of the clauses above, and will be handled by libgomp as
17119 one group, so no handling required here. */
17120 gcc_assert (have_clause);
17121 break;
17122 case GOMP_MAP_DETACH:
17123 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
17124 have_clause = false;
17125 break;
17126 case GOMP_MAP_STRUCT:
17127 case GOMP_MAP_STRUCT_UNORD:
17128 have_clause = false;
17129 break;
17130 default:
17131 gcc_unreachable ();
17132 }
17133 }
17134 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
17135
17136 gimplify_seq_add_stmt (seq_p: pre_p, gs: stmt);
17137 *expr_p = NULL_TREE;
17138}
17139
17140/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
17141 stabilized the lhs of the atomic operation as *ADDR. Return true if
17142 EXPR is this stabilized form. */
17143
17144static bool
17145goa_lhs_expr_p (tree expr, tree addr)
17146{
17147 /* Also include casts to other type variants. The C front end is fond
17148 of adding these for e.g. volatile variables. This is like
17149 STRIP_TYPE_NOPS but includes the main variant lookup. */
17150 STRIP_USELESS_TYPE_CONVERSION (expr);
17151
17152 if (INDIRECT_REF_P (expr))
17153 {
17154 expr = TREE_OPERAND (expr, 0);
17155 while (expr != addr
17156 && (CONVERT_EXPR_P (expr)
17157 || TREE_CODE (expr) == NON_LVALUE_EXPR)
17158 && TREE_CODE (expr) == TREE_CODE (addr)
17159 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
17160 {
17161 expr = TREE_OPERAND (expr, 0);
17162 addr = TREE_OPERAND (addr, 0);
17163 }
17164 if (expr == addr)
17165 return true;
17166 return (TREE_CODE (addr) == ADDR_EXPR
17167 && TREE_CODE (expr) == ADDR_EXPR
17168 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
17169 }
17170 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
17171 return true;
17172 return false;
17173}
17174
17175/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
17176 expression does not involve the lhs, evaluate it into a temporary.
17177 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
17178 or -1 if an error was encountered. */
17179
17180static int
17181goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
17182 tree lhs_var, tree &target_expr, bool rhs, int depth)
17183{
17184 tree expr = *expr_p;
17185 int saw_lhs = 0;
17186
17187 if (goa_lhs_expr_p (expr, addr: lhs_addr))
17188 {
17189 if (pre_p)
17190 *expr_p = lhs_var;
17191 return 1;
17192 }
17193 if (is_gimple_val (expr))
17194 return 0;
17195
17196 /* Maximum depth of lhs in expression is for the
17197 __builtin_clear_padding (...), __builtin_clear_padding (...),
17198 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
17199 if (++depth > 7)
17200 goto finish;
17201
17202 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
17203 {
17204 case tcc_binary:
17205 case tcc_comparison:
17206 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 1), pre_p, lhs_addr,
17207 lhs_var, target_expr, rhs: true, depth);
17208 /* FALLTHRU */
17209 case tcc_unary:
17210 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p, lhs_addr,
17211 lhs_var, target_expr, rhs: true, depth);
17212 break;
17213 case tcc_expression:
17214 switch (TREE_CODE (expr))
17215 {
17216 case TRUTH_ANDIF_EXPR:
17217 case TRUTH_ORIF_EXPR:
17218 case TRUTH_AND_EXPR:
17219 case TRUTH_OR_EXPR:
17220 case TRUTH_XOR_EXPR:
17221 case BIT_INSERT_EXPR:
17222 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 1), pre_p,
17223 lhs_addr, lhs_var, target_expr, rhs: true,
17224 depth);
17225 /* FALLTHRU */
17226 case TRUTH_NOT_EXPR:
17227 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p,
17228 lhs_addr, lhs_var, target_expr, rhs: true,
17229 depth);
17230 break;
17231 case MODIFY_EXPR:
17232 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
17233 target_expr, rhs: true, depth))
17234 break;
17235 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 1), pre_p,
17236 lhs_addr, lhs_var, target_expr, rhs: true,
17237 depth);
17238 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p,
17239 lhs_addr, lhs_var, target_expr, rhs: false,
17240 depth);
17241 break;
17242 /* FALLTHRU */
17243 case ADDR_EXPR:
17244 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
17245 target_expr, rhs: true, depth))
17246 break;
17247 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p,
17248 lhs_addr, lhs_var, target_expr, rhs: false,
17249 depth);
17250 break;
17251 case COMPOUND_EXPR:
17252 /* Break out any preevaluations from cp_build_modify_expr. */
17253 for (; TREE_CODE (expr) == COMPOUND_EXPR;
17254 expr = TREE_OPERAND (expr, 1))
17255 {
17256 /* Special-case __builtin_clear_padding call before
17257 __builtin_memcmp. */
17258 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
17259 {
17260 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
17261 if (fndecl
17262 && fndecl_built_in_p (node: fndecl, name1: BUILT_IN_CLEAR_PADDING)
17263 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
17264 && (!pre_p
17265 || goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), NULL,
17266 lhs_addr, lhs_var,
17267 target_expr, rhs: true, depth)))
17268 {
17269 if (pre_p)
17270 *expr_p = expr;
17271 saw_lhs = goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0),
17272 pre_p, lhs_addr, lhs_var,
17273 target_expr, rhs: true, depth);
17274 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 1),
17275 pre_p, lhs_addr, lhs_var,
17276 target_expr, rhs, depth);
17277 return saw_lhs;
17278 }
17279 }
17280
17281 if (pre_p)
17282 gimplify_stmt (stmt_p: &TREE_OPERAND (expr, 0), seq_p: pre_p);
17283 }
17284 if (!pre_p)
17285 return goa_stabilize_expr (expr_p: &expr, pre_p, lhs_addr, lhs_var,
17286 target_expr, rhs, depth);
17287 *expr_p = expr;
17288 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
17289 target_expr, rhs, depth);
17290 case COND_EXPR:
17291 if (!goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), NULL, lhs_addr,
17292 lhs_var, target_expr, rhs: true, depth))
17293 break;
17294 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p,
17295 lhs_addr, lhs_var, target_expr, rhs: true,
17296 depth);
17297 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 1), pre_p,
17298 lhs_addr, lhs_var, target_expr, rhs: true,
17299 depth);
17300 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 2), pre_p,
17301 lhs_addr, lhs_var, target_expr, rhs: true,
17302 depth);
17303 break;
17304 case TARGET_EXPR:
17305 if (TARGET_EXPR_INITIAL (expr))
17306 {
17307 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
17308 lhs_var, target_expr, rhs: true,
17309 depth))
17310 break;
17311 if (expr == target_expr)
17312 saw_lhs = 1;
17313 else
17314 {
17315 saw_lhs = goa_stabilize_expr (expr_p: &TARGET_EXPR_INITIAL (expr),
17316 pre_p, lhs_addr, lhs_var,
17317 target_expr, rhs: true, depth);
17318 if (saw_lhs && target_expr == NULL_TREE && pre_p)
17319 target_expr = expr;
17320 }
17321 }
17322 break;
17323 default:
17324 break;
17325 }
17326 break;
17327 case tcc_reference:
17328 if (TREE_CODE (expr) == BIT_FIELD_REF
17329 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
17330 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p,
17331 lhs_addr, lhs_var, target_expr, rhs: true,
17332 depth);
17333 break;
17334 case tcc_vl_exp:
17335 if (TREE_CODE (expr) == CALL_EXPR)
17336 {
17337 if (tree fndecl = get_callee_fndecl (expr))
17338 if (fndecl_built_in_p (node: fndecl, name1: BUILT_IN_CLEAR_PADDING,
17339 names: BUILT_IN_MEMCMP))
17340 {
17341 int nargs = call_expr_nargs (expr);
17342 for (int i = 0; i < nargs; i++)
17343 saw_lhs |= goa_stabilize_expr (expr_p: &CALL_EXPR_ARG (expr, i),
17344 pre_p, lhs_addr, lhs_var,
17345 target_expr, rhs: true, depth);
17346 }
17347 }
17348 break;
17349 default:
17350 break;
17351 }
17352
17353 finish:
17354 if (saw_lhs == 0 && pre_p)
17355 {
17356 enum gimplify_status gs;
17357 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
17358 {
17359 gimplify_stmt (stmt_p: &expr, seq_p: pre_p);
17360 return saw_lhs;
17361 }
17362 else if (rhs)
17363 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
17364 else
17365 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
17366 if (gs != GS_ALL_DONE)
17367 saw_lhs = -1;
17368 }
17369
17370 return saw_lhs;
17371}
17372
17373/* Gimplify an OMP_ATOMIC statement. */
17374
17375static enum gimplify_status
17376gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
17377{
17378 tree addr = TREE_OPERAND (*expr_p, 0);
17379 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
17380 ? NULL : TREE_OPERAND (*expr_p, 1);
17381 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
17382 tree tmp_load;
17383 gomp_atomic_load *loadstmt;
17384 gomp_atomic_store *storestmt;
17385 tree target_expr = NULL_TREE;
17386
17387 tmp_load = create_tmp_reg (type);
17388 if (rhs
17389 && goa_stabilize_expr (expr_p: &rhs, pre_p, lhs_addr: addr, lhs_var: tmp_load, target_expr,
17390 rhs: true, depth: 0) < 0)
17391 return GS_ERROR;
17392
17393 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
17394 != GS_ALL_DONE)
17395 return GS_ERROR;
17396
17397 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
17398 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
17399 gimplify_seq_add_stmt (seq_p: pre_p, gs: loadstmt);
17400 if (rhs)
17401 {
17402 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
17403 representatives. Use BIT_FIELD_REF on the lhs instead. */
17404 tree rhsarg = rhs;
17405 if (TREE_CODE (rhs) == COND_EXPR)
17406 rhsarg = TREE_OPERAND (rhs, 1);
17407 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
17408 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
17409 {
17410 tree bitpos = TREE_OPERAND (rhsarg, 2);
17411 tree op1 = TREE_OPERAND (rhsarg, 1);
17412 tree bitsize;
17413 tree tmp_store = tmp_load;
17414 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
17415 tmp_store = get_initialized_tmp_var (val: tmp_load, pre_p);
17416 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
17417 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
17418 else
17419 bitsize = TYPE_SIZE (TREE_TYPE (op1));
17420 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
17421 tree t = build2_loc (EXPR_LOCATION (rhsarg),
17422 code: MODIFY_EXPR, void_type_node,
17423 arg0: build3_loc (EXPR_LOCATION (rhsarg),
17424 code: BIT_FIELD_REF, TREE_TYPE (op1),
17425 arg0: tmp_store, arg1: bitsize, arg2: bitpos), arg1: op1);
17426 if (TREE_CODE (rhs) == COND_EXPR)
17427 t = build3_loc (EXPR_LOCATION (rhs), code: COND_EXPR, void_type_node,
17428 TREE_OPERAND (rhs, 0), arg1: t, void_node);
17429 gimplify_and_add (t, seq_p: pre_p);
17430 rhs = tmp_store;
17431 }
17432 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
17433 if (TREE_CODE (rhs) == COND_EXPR)
17434 gimplify_ctxp->allow_rhs_cond_expr = true;
17435 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
17436 is_gimple_val, fb_rvalue);
17437 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
17438 if (gs != GS_ALL_DONE)
17439 return GS_ERROR;
17440 }
17441
17442 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
17443 rhs = tmp_load;
17444 storestmt
17445 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
17446 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
17447 {
17448 gimple_omp_atomic_set_weak (g: loadstmt);
17449 gimple_omp_atomic_set_weak (g: storestmt);
17450 }
17451 gimplify_seq_add_stmt (seq_p: pre_p, gs: storestmt);
17452 switch (TREE_CODE (*expr_p))
17453 {
17454 case OMP_ATOMIC_READ:
17455 case OMP_ATOMIC_CAPTURE_OLD:
17456 *expr_p = tmp_load;
17457 gimple_omp_atomic_set_need_value (g: loadstmt);
17458 break;
17459 case OMP_ATOMIC_CAPTURE_NEW:
17460 *expr_p = rhs;
17461 gimple_omp_atomic_set_need_value (g: storestmt);
17462 break;
17463 default:
17464 *expr_p = NULL;
17465 break;
17466 }
17467
17468 return GS_ALL_DONE;
17469}
17470
17471/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
17472 body, and adding some EH bits. */
17473
17474static enum gimplify_status
17475gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
17476{
17477 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
17478 gimple *body_stmt;
17479 gtransaction *trans_stmt;
17480 gimple_seq body = NULL;
17481 int subcode = 0;
17482
17483 /* Wrap the transaction body in a BIND_EXPR so we have a context
17484 where to put decls for OMP. */
17485 if (TREE_CODE (tbody) != BIND_EXPR)
17486 {
17487 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
17488 TREE_SIDE_EFFECTS (bind) = 1;
17489 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
17490 TRANSACTION_EXPR_BODY (expr) = bind;
17491 }
17492
17493 push_gimplify_context ();
17494 temp = voidify_wrapper_expr (wrapper: *expr_p, NULL);
17495
17496 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), seq_p: &body);
17497 pop_gimplify_context (body: body_stmt);
17498
17499 trans_stmt = gimple_build_transaction (body);
17500 if (TRANSACTION_EXPR_OUTER (expr))
17501 subcode = GTMA_IS_OUTER;
17502 else if (TRANSACTION_EXPR_RELAXED (expr))
17503 subcode = GTMA_IS_RELAXED;
17504 gimple_transaction_set_subcode (transaction_stmt: trans_stmt, subcode);
17505
17506 gimplify_seq_add_stmt (seq_p: pre_p, gs: trans_stmt);
17507
17508 if (temp)
17509 {
17510 *expr_p = temp;
17511 return GS_OK;
17512 }
17513
17514 *expr_p = NULL_TREE;
17515 return GS_ALL_DONE;
17516}
17517
17518/* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
17519 is the OMP_BODY of the original EXPR (which has already been
17520 gimplified so it's not present in the EXPR).
17521
17522 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
17523
17524static gimple *
17525gimplify_omp_ordered (tree expr, gimple_seq body)
17526{
17527 tree c, decls;
17528 int failures = 0;
17529 unsigned int i;
17530 tree source_c = NULL_TREE;
17531 tree sink_c = NULL_TREE;
17532
17533 if (gimplify_omp_ctxp)
17534 {
17535 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
17536 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
17537 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
17538 {
17539 error_at (OMP_CLAUSE_LOCATION (c),
17540 "%<ordered%> construct with %qs clause must be "
17541 "closely nested inside a loop with %<ordered%> clause",
17542 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
17543 failures++;
17544 }
17545 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
17546 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
17547 {
17548 bool fail = false;
17549 sink_c = c;
17550 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
17551 continue; /* omp_cur_iteration - 1 */
17552 for (decls = OMP_CLAUSE_DECL (c), i = 0;
17553 decls && TREE_CODE (decls) == TREE_LIST;
17554 decls = TREE_CHAIN (decls), ++i)
17555 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
17556 continue;
17557 else if (TREE_VALUE (decls)
17558 != gimplify_omp_ctxp->loop_iter_var[2 * i])
17559 {
17560 error_at (OMP_CLAUSE_LOCATION (c),
17561 "variable %qE is not an iteration "
17562 "of outermost loop %d, expected %qE",
17563 TREE_VALUE (decls), i + 1,
17564 gimplify_omp_ctxp->loop_iter_var[2 * i]);
17565 fail = true;
17566 failures++;
17567 }
17568 else
17569 TREE_VALUE (decls)
17570 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
17571 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
17572 {
17573 error_at (OMP_CLAUSE_LOCATION (c),
17574 "number of variables in %qs clause with "
17575 "%<sink%> modifier does not match number of "
17576 "iteration variables",
17577 OMP_CLAUSE_DOACROSS_DEPEND (c)
17578 ? "depend" : "doacross");
17579 failures++;
17580 }
17581 }
17582 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
17583 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
17584 {
17585 if (source_c)
17586 {
17587 error_at (OMP_CLAUSE_LOCATION (c),
17588 "more than one %qs clause with %<source%> "
17589 "modifier on an %<ordered%> construct",
17590 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
17591 ? "depend" : "doacross");
17592 failures++;
17593 }
17594 else
17595 source_c = c;
17596 }
17597 }
17598 if (source_c && sink_c)
17599 {
17600 error_at (OMP_CLAUSE_LOCATION (source_c),
17601 "%qs clause with %<source%> modifier specified "
17602 "together with %qs clauses with %<sink%> modifier "
17603 "on the same construct",
17604 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
17605 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
17606 failures++;
17607 }
17608
17609 if (failures)
17610 return gimple_build_nop ();
17611 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
17612}
17613
17614/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
17615 expression produces a value to be used as an operand inside a GIMPLE
17616 statement, the value will be stored back in *EXPR_P. This value will
17617 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
17618 an SSA_NAME. The corresponding sequence of GIMPLE statements is
17619 emitted in PRE_P and POST_P.
17620
17621 Additionally, this process may overwrite parts of the input
17622 expression during gimplification. Ideally, it should be
17623 possible to do non-destructive gimplification.
17624
17625 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
17626 the expression needs to evaluate to a value to be used as
17627 an operand in a GIMPLE statement, this value will be stored in
17628 *EXPR_P on exit. This happens when the caller specifies one
17629 of fb_lvalue or fb_rvalue fallback flags.
17630
17631 PRE_P will contain the sequence of GIMPLE statements corresponding
17632 to the evaluation of EXPR and all the side-effects that must
17633 be executed before the main expression. On exit, the last
17634 statement of PRE_P is the core statement being gimplified. For
17635 instance, when gimplifying 'if (++a)' the last statement in
17636 PRE_P will be 'if (t.1)' where t.1 is the result of
17637 pre-incrementing 'a'.
17638
17639 POST_P will contain the sequence of GIMPLE statements corresponding
17640 to the evaluation of all the side-effects that must be executed
17641 after the main expression. If this is NULL, the post
17642 side-effects are stored at the end of PRE_P.
17643
17644 The reason why the output is split in two is to handle post
17645 side-effects explicitly. In some cases, an expression may have
17646 inner and outer post side-effects which need to be emitted in
17647 an order different from the one given by the recursive
17648 traversal. For instance, for the expression (*p--)++ the post
17649 side-effects of '--' must actually occur *after* the post
17650 side-effects of '++'. However, gimplification will first visit
17651 the inner expression, so if a separate POST sequence was not
17652 used, the resulting sequence would be:
17653
17654 1 t.1 = *p
17655 2 p = p - 1
17656 3 t.2 = t.1 + 1
17657 4 *p = t.2
17658
17659 However, the post-decrement operation in line #2 must not be
17660 evaluated until after the store to *p at line #4, so the
17661 correct sequence should be:
17662
17663 1 t.1 = *p
17664 2 t.2 = t.1 + 1
17665 3 *p = t.2
17666 4 p = p - 1
17667
17668 So, by specifying a separate post queue, it is possible
17669 to emit the post side-effects in the correct order.
17670 If POST_P is NULL, an internal queue will be used. Before
17671 returning to the caller, the sequence POST_P is appended to
17672 the main output sequence PRE_P.
17673
17674 GIMPLE_TEST_F points to a function that takes a tree T and
17675 returns nonzero if T is in the GIMPLE form requested by the
17676 caller. The GIMPLE predicates are in gimple.cc.
17677
17678 FALLBACK tells the function what sort of a temporary we want if
17679 gimplification cannot produce an expression that complies with
17680 GIMPLE_TEST_F.
17681
17682 fb_none means that no temporary should be generated
17683 fb_rvalue means that an rvalue is OK to generate
17684 fb_lvalue means that an lvalue is OK to generate
17685 fb_either means that either is OK, but an lvalue is preferable.
17686 fb_mayfail means that gimplification may fail (in which case
17687 GS_ERROR will be returned)
17688
17689 The return value is either GS_ERROR or GS_ALL_DONE, since this
17690 function iterates until EXPR is completely gimplified or an error
17691 occurs. */
17692
17693enum gimplify_status
17694gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17695 bool (*gimple_test_f) (tree), fallback_t fallback)
17696{
17697 tree tmp;
17698 gimple_seq internal_pre = NULL;
17699 gimple_seq internal_post = NULL;
17700 tree save_expr;
17701 bool is_statement;
17702 location_t saved_location;
17703 enum gimplify_status ret;
17704 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
17705 tree label;
17706
17707 save_expr = *expr_p;
17708 if (save_expr == NULL_TREE)
17709 return GS_ALL_DONE;
17710
17711 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
17712 is_statement = gimple_test_f == is_gimple_stmt;
17713 if (is_statement)
17714 gcc_assert (pre_p);
17715
17716 /* Consistency checks. */
17717 if (gimple_test_f == is_gimple_reg)
17718 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
17719 else if (gimple_test_f == is_gimple_val
17720 || gimple_test_f == is_gimple_call_addr
17721 || gimple_test_f == is_gimple_condexpr_for_cond
17722 || gimple_test_f == is_gimple_mem_rhs
17723 || gimple_test_f == is_gimple_mem_rhs_or_call
17724 || gimple_test_f == is_gimple_reg_rhs
17725 || gimple_test_f == is_gimple_reg_rhs_or_call
17726 || gimple_test_f == is_gimple_asm_val
17727 || gimple_test_f == is_gimple_mem_ref_addr)
17728 gcc_assert (fallback & fb_rvalue);
17729 else if (gimple_test_f == is_gimple_min_lval
17730 || gimple_test_f == is_gimple_lvalue)
17731 gcc_assert (fallback & fb_lvalue);
17732 else if (gimple_test_f == is_gimple_addressable)
17733 gcc_assert (fallback & fb_either);
17734 else if (gimple_test_f == is_gimple_stmt)
17735 gcc_assert (fallback == fb_none);
17736 else
17737 {
17738 /* We should have recognized the GIMPLE_TEST_F predicate to
17739 know what kind of fallback to use in case a temporary is
17740 needed to hold the value or address of *EXPR_P. */
17741 gcc_unreachable ();
17742 }
17743
17744 /* We used to check the predicate here and return immediately if it
17745 succeeds. This is wrong; the design is for gimplification to be
17746 idempotent, and for the predicates to only test for valid forms, not
17747 whether they are fully simplified. */
17748 if (pre_p == NULL)
17749 pre_p = &internal_pre;
17750
17751 if (post_p == NULL)
17752 post_p = &internal_post;
17753
17754 /* Remember the last statements added to PRE_P and POST_P. Every
17755 new statement added by the gimplification helpers needs to be
17756 annotated with location information. To centralize the
17757 responsibility, we remember the last statement that had been
17758 added to both queues before gimplifying *EXPR_P. If
17759 gimplification produces new statements in PRE_P and POST_P, those
17760 statements will be annotated with the same location information
17761 as *EXPR_P. */
17762 pre_last_gsi = gsi_last (seq&: *pre_p);
17763 post_last_gsi = gsi_last (seq&: *post_p);
17764
17765 saved_location = input_location;
17766 if (save_expr != error_mark_node
17767 && EXPR_HAS_LOCATION (*expr_p))
17768 input_location = EXPR_LOCATION (*expr_p);
17769
17770 /* Loop over the specific gimplifiers until the toplevel node
17771 remains the same. */
17772 do
17773 {
17774 /* Strip away as many useless type conversions as possible
17775 at the toplevel. */
17776 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
17777
17778 /* Remember the expr. */
17779 save_expr = *expr_p;
17780
17781 /* Die, die, die, my darling. */
17782 if (error_operand_p (t: save_expr))
17783 {
17784 ret = GS_ERROR;
17785 break;
17786 }
17787
17788 /* Do any language-specific gimplification. */
17789 ret = ((enum gimplify_status)
17790 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
17791 if (ret == GS_OK)
17792 {
17793 if (*expr_p == NULL_TREE)
17794 break;
17795 if (*expr_p != save_expr)
17796 continue;
17797 }
17798 else if (ret != GS_UNHANDLED)
17799 break;
17800
17801 /* Make sure that all the cases set 'ret' appropriately. */
17802 ret = GS_UNHANDLED;
17803 switch (TREE_CODE (*expr_p))
17804 {
17805 /* First deal with the special cases. */
17806
17807 case POSTINCREMENT_EXPR:
17808 case POSTDECREMENT_EXPR:
17809 case PREINCREMENT_EXPR:
17810 case PREDECREMENT_EXPR:
17811 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
17812 want_value: fallback != fb_none,
17813 TREE_TYPE (*expr_p));
17814 break;
17815
17816 case VIEW_CONVERT_EXPR:
17817 if ((fallback & fb_rvalue)
17818 && is_gimple_reg_type (TREE_TYPE (*expr_p))
17819 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
17820 {
17821 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
17822 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17823 recalculate_side_effects (t: *expr_p);
17824 break;
17825 }
17826 /* Fallthru. */
17827
17828 case ARRAY_REF:
17829 case ARRAY_RANGE_REF:
17830 case REALPART_EXPR:
17831 case IMAGPART_EXPR:
17832 case COMPONENT_REF:
17833 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
17834 fallback: fallback ? fallback : fb_rvalue);
17835 break;
17836
17837 case COND_EXPR:
17838 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
17839
17840 /* C99 code may assign to an array in a structure value of a
17841 conditional expression, and this has undefined behavior
17842 only on execution, so create a temporary if an lvalue is
17843 required. */
17844 if (fallback == fb_lvalue)
17845 {
17846 *expr_p = get_initialized_tmp_var (val: *expr_p, pre_p, post_p, allow_ssa: false);
17847 mark_addressable (*expr_p);
17848 ret = GS_OK;
17849 }
17850 break;
17851
17852 case CALL_EXPR:
17853 ret = gimplify_call_expr (expr_p, pre_p, want_value: fallback != fb_none);
17854
17855 /* C99 code may assign to an array in a structure returned
17856 from a function, and this has undefined behavior only on
17857 execution, so create a temporary if an lvalue is
17858 required. */
17859 if (fallback == fb_lvalue)
17860 {
17861 *expr_p = get_initialized_tmp_var (val: *expr_p, pre_p, post_p, allow_ssa: false);
17862 mark_addressable (*expr_p);
17863 ret = GS_OK;
17864 }
17865 break;
17866
17867 case TREE_LIST:
17868 gcc_unreachable ();
17869
17870 case OMP_ARRAY_SECTION:
17871 gcc_unreachable ();
17872
17873 case COMPOUND_EXPR:
17874 ret = gimplify_compound_expr (expr_p, pre_p, want_value: fallback != fb_none);
17875 break;
17876
17877 case COMPOUND_LITERAL_EXPR:
17878 ret = gimplify_compound_literal_expr (expr_p, pre_p,
17879 gimple_test_f, fallback);
17880 break;
17881
17882 case MODIFY_EXPR:
17883 case INIT_EXPR:
17884 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
17885 want_value: fallback != fb_none);
17886 break;
17887
17888 case TRUTH_ANDIF_EXPR:
17889 case TRUTH_ORIF_EXPR:
17890 {
17891 /* Preserve the original type of the expression and the
17892 source location of the outer expression. */
17893 tree org_type = TREE_TYPE (*expr_p);
17894 *expr_p = gimple_boolify (expr: *expr_p);
17895 *expr_p = build3_loc (loc: input_location, code: COND_EXPR,
17896 type: org_type, arg0: *expr_p,
17897 arg1: fold_convert_loc
17898 (input_location,
17899 org_type, boolean_true_node),
17900 arg2: fold_convert_loc
17901 (input_location,
17902 org_type, boolean_false_node));
17903 ret = GS_OK;
17904 break;
17905 }
17906
17907 case TRUTH_NOT_EXPR:
17908 {
17909 tree type = TREE_TYPE (*expr_p);
17910 /* The parsers are careful to generate TRUTH_NOT_EXPR
17911 only with operands that are always zero or one.
17912 We do not fold here but handle the only interesting case
17913 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
17914 *expr_p = gimple_boolify (expr: *expr_p);
17915 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
17916 *expr_p = build1_loc (loc: input_location, code: BIT_NOT_EXPR,
17917 TREE_TYPE (*expr_p),
17918 TREE_OPERAND (*expr_p, 0));
17919 else
17920 *expr_p = build2_loc (loc: input_location, code: BIT_XOR_EXPR,
17921 TREE_TYPE (*expr_p),
17922 TREE_OPERAND (*expr_p, 0),
17923 arg1: build_int_cst (TREE_TYPE (*expr_p), 1));
17924 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
17925 *expr_p = fold_convert_loc (input_location, type, *expr_p);
17926 ret = GS_OK;
17927 break;
17928 }
17929
17930 case ADDR_EXPR:
17931 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
17932 break;
17933
17934 case ANNOTATE_EXPR:
17935 {
17936 tree cond = TREE_OPERAND (*expr_p, 0);
17937 tree kind = TREE_OPERAND (*expr_p, 1);
17938 tree data = TREE_OPERAND (*expr_p, 2);
17939 tree type = TREE_TYPE (cond);
17940 if (!INTEGRAL_TYPE_P (type))
17941 {
17942 *expr_p = cond;
17943 ret = GS_OK;
17944 break;
17945 }
17946 tree tmp = create_tmp_var (type);
17947 gimplify_arg (arg_p: &cond, pre_p, EXPR_LOCATION (*expr_p));
17948 gcall *call
17949 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
17950 gimple_call_set_lhs (gs: call, lhs: tmp);
17951 gimplify_seq_add_stmt (seq_p: pre_p, gs: call);
17952 *expr_p = tmp;
17953 ret = GS_ALL_DONE;
17954 break;
17955 }
17956
17957 case VA_ARG_EXPR:
17958 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
17959 break;
17960
17961 CASE_CONVERT:
17962 if (IS_EMPTY_STMT (*expr_p))
17963 {
17964 ret = GS_ALL_DONE;
17965 break;
17966 }
17967
17968 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
17969 || fallback == fb_none)
17970 {
17971 /* Just strip a conversion to void (or in void context) and
17972 try again. */
17973 *expr_p = TREE_OPERAND (*expr_p, 0);
17974 ret = GS_OK;
17975 break;
17976 }
17977
17978 ret = gimplify_conversion (expr_p);
17979 if (ret == GS_ERROR)
17980 break;
17981 if (*expr_p != save_expr)
17982 break;
17983 /* FALLTHRU */
17984
17985 case FIX_TRUNC_EXPR:
17986 /* unary_expr: ... | '(' cast ')' val | ... */
17987 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17988 gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17989 recalculate_side_effects (t: *expr_p);
17990 break;
17991
17992 case INDIRECT_REF:
17993 {
17994 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
17995 bool notrap = TREE_THIS_NOTRAP (*expr_p);
17996 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
17997
17998 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
17999 if (*expr_p != save_expr)
18000 {
18001 ret = GS_OK;
18002 break;
18003 }
18004
18005 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p, post_p,
18006 gimple_test_f: is_gimple_reg, fallback: fb_rvalue);
18007 if (ret == GS_ERROR)
18008 break;
18009
18010 recalculate_side_effects (t: *expr_p);
18011 *expr_p = fold_build2_loc (input_location, MEM_REF,
18012 TREE_TYPE (*expr_p),
18013 TREE_OPERAND (*expr_p, 0),
18014 build_int_cst (saved_ptr_type, 0));
18015 TREE_THIS_VOLATILE (*expr_p) = volatilep;
18016 TREE_THIS_NOTRAP (*expr_p) = notrap;
18017 ret = GS_OK;
18018 break;
18019 }
18020
18021 /* We arrive here through the various re-gimplifcation paths. */
18022 case MEM_REF:
18023 /* First try re-folding the whole thing. */
18024 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
18025 TREE_OPERAND (*expr_p, 0),
18026 TREE_OPERAND (*expr_p, 1));
18027 if (tmp)
18028 {
18029 REF_REVERSE_STORAGE_ORDER (tmp)
18030 = REF_REVERSE_STORAGE_ORDER (*expr_p);
18031 *expr_p = tmp;
18032 recalculate_side_effects (t: *expr_p);
18033 ret = GS_OK;
18034 break;
18035 }
18036 /* Avoid re-gimplifying the address operand if it is already
18037 in suitable form. Re-gimplifying would mark the address
18038 operand addressable. Always gimplify when not in SSA form
18039 as we still may have to gimplify decls with value-exprs. */
18040 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
18041 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
18042 {
18043 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p, post_p,
18044 gimple_test_f: is_gimple_mem_ref_addr, fallback: fb_rvalue);
18045 if (ret == GS_ERROR)
18046 break;
18047 }
18048 recalculate_side_effects (t: *expr_p);
18049 ret = GS_ALL_DONE;
18050 break;
18051
18052 /* Constants need not be gimplified. */
18053 case INTEGER_CST:
18054 case REAL_CST:
18055 case FIXED_CST:
18056 case STRING_CST:
18057 case COMPLEX_CST:
18058 case VECTOR_CST:
18059 /* Drop the overflow flag on constants, we do not want
18060 that in the GIMPLE IL. */
18061 if (TREE_OVERFLOW_P (*expr_p))
18062 *expr_p = drop_tree_overflow (*expr_p);
18063 ret = GS_ALL_DONE;
18064 break;
18065
18066 case CONST_DECL:
18067 /* If we require an lvalue, such as for ADDR_EXPR, retain the
18068 CONST_DECL node. Otherwise the decl is replaceable by its
18069 value. */
18070 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
18071 if (fallback & fb_lvalue)
18072 ret = GS_ALL_DONE;
18073 else
18074 {
18075 *expr_p = DECL_INITIAL (*expr_p);
18076 ret = GS_OK;
18077 }
18078 break;
18079
18080 case DECL_EXPR:
18081 ret = gimplify_decl_expr (stmt_p: expr_p, seq_p: pre_p);
18082 break;
18083
18084 case BIND_EXPR:
18085 ret = gimplify_bind_expr (expr_p, pre_p);
18086 break;
18087
18088 case LOOP_EXPR:
18089 ret = gimplify_loop_expr (expr_p, pre_p);
18090 break;
18091
18092 case SWITCH_EXPR:
18093 ret = gimplify_switch_expr (expr_p, pre_p);
18094 break;
18095
18096 case EXIT_EXPR:
18097 ret = gimplify_exit_expr (expr_p);
18098 break;
18099
18100 case GOTO_EXPR:
18101 /* If the target is not LABEL, then it is a computed jump
18102 and the target needs to be gimplified. */
18103 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
18104 {
18105 ret = gimplify_expr (expr_p: &GOTO_DESTINATION (*expr_p), pre_p,
18106 NULL, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18107 if (ret == GS_ERROR)
18108 break;
18109 }
18110 gimplify_seq_add_stmt (seq_p: pre_p,
18111 gs: gimple_build_goto (GOTO_DESTINATION (*expr_p)));
18112 ret = GS_ALL_DONE;
18113 break;
18114
18115 case PREDICT_EXPR:
18116 gimplify_seq_add_stmt (seq_p: pre_p,
18117 gs: gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
18118 PREDICT_EXPR_OUTCOME (*expr_p)));
18119 ret = GS_ALL_DONE;
18120 break;
18121
18122 case LABEL_EXPR:
18123 ret = gimplify_label_expr (expr_p, pre_p);
18124 label = LABEL_EXPR_LABEL (*expr_p);
18125 gcc_assert (decl_function_context (label) == current_function_decl);
18126
18127 /* If the label is used in a goto statement, or address of the label
18128 is taken, we need to unpoison all variables that were seen so far.
18129 Doing so would prevent us from reporting a false positives. */
18130 if (asan_poisoned_variables
18131 && asan_used_labels != NULL
18132 && asan_used_labels->contains (k: label)
18133 && !gimplify_omp_ctxp)
18134 asan_poison_variables (variables: asan_poisoned_variables, poison: false, seq_p: pre_p);
18135 break;
18136
18137 case CASE_LABEL_EXPR:
18138 ret = gimplify_case_label_expr (expr_p, pre_p);
18139
18140 if (gimplify_ctxp->live_switch_vars)
18141 asan_poison_variables (variables: gimplify_ctxp->live_switch_vars, poison: false,
18142 seq_p: pre_p);
18143 break;
18144
18145 case RETURN_EXPR:
18146 ret = gimplify_return_expr (stmt: *expr_p, pre_p);
18147 break;
18148
18149 case CONSTRUCTOR:
18150 /* Don't reduce this in place; let gimplify_init_constructor work its
18151 magic. Buf if we're just elaborating this for side effects, just
18152 gimplify any element that has side-effects. */
18153 if (fallback == fb_none)
18154 {
18155 unsigned HOST_WIDE_INT ix;
18156 tree val;
18157 tree temp = NULL_TREE;
18158 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
18159 if (TREE_SIDE_EFFECTS (val))
18160 append_to_statement_list (val, &temp);
18161
18162 *expr_p = temp;
18163 ret = temp ? GS_OK : GS_ALL_DONE;
18164 }
18165 /* C99 code may assign to an array in a constructed
18166 structure or union, and this has undefined behavior only
18167 on execution, so create a temporary if an lvalue is
18168 required. */
18169 else if (fallback == fb_lvalue)
18170 {
18171 *expr_p = get_initialized_tmp_var (val: *expr_p, pre_p, post_p, allow_ssa: false);
18172 mark_addressable (*expr_p);
18173 ret = GS_OK;
18174 }
18175 else
18176 ret = GS_ALL_DONE;
18177 break;
18178
18179 /* The following are special cases that are not handled by the
18180 original GIMPLE grammar. */
18181
18182 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
18183 eliminated. */
18184 case SAVE_EXPR:
18185 ret = gimplify_save_expr (expr_p, pre_p, post_p);
18186 break;
18187
18188 case BIT_FIELD_REF:
18189 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
18190 post_p, gimple_test_f: is_gimple_lvalue, fallback: fb_either);
18191 recalculate_side_effects (t: *expr_p);
18192 break;
18193
18194 case TARGET_MEM_REF:
18195 {
18196 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
18197
18198 if (TMR_BASE (*expr_p))
18199 r0 = gimplify_expr (expr_p: &TMR_BASE (*expr_p), pre_p,
18200 post_p, gimple_test_f: is_gimple_mem_ref_addr, fallback: fb_either);
18201 if (TMR_INDEX (*expr_p))
18202 r1 = gimplify_expr (expr_p: &TMR_INDEX (*expr_p), pre_p,
18203 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18204 if (TMR_INDEX2 (*expr_p))
18205 r1 = gimplify_expr (expr_p: &TMR_INDEX2 (*expr_p), pre_p,
18206 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18207 /* TMR_STEP and TMR_OFFSET are always integer constants. */
18208 ret = MIN (r0, r1);
18209 }
18210 break;
18211
18212 case NON_LVALUE_EXPR:
18213 /* This should have been stripped above. */
18214 gcc_unreachable ();
18215
18216 case ASM_EXPR:
18217 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
18218 break;
18219
18220 case TRY_FINALLY_EXPR:
18221 case TRY_CATCH_EXPR:
18222 {
18223 gimple_seq eval, cleanup;
18224 gtry *try_;
18225
18226 /* Calls to destructors are generated automatically in FINALLY/CATCH
18227 block. They should have location as UNKNOWN_LOCATION. However,
18228 gimplify_call_expr will reset these call stmts to input_location
18229 if it finds stmt's location is unknown. To prevent resetting for
18230 destructors, we set the input_location to unknown.
18231 Note that this only affects the destructor calls in FINALLY/CATCH
18232 block, and will automatically reset to its original value by the
18233 end of gimplify_expr. */
18234 input_location = UNKNOWN_LOCATION;
18235 eval = cleanup = NULL;
18236 gimplify_and_add (TREE_OPERAND (*expr_p, 0), seq_p: &eval);
18237 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
18238 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
18239 {
18240 gimple_seq n = NULL, e = NULL;
18241 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
18242 0), seq_p: &n);
18243 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
18244 1), seq_p: &e);
18245 if (!gimple_seq_empty_p (s: n) && !gimple_seq_empty_p (s: e))
18246 {
18247 geh_else *stmt = gimple_build_eh_else (n, e);
18248 gimple_seq_add_stmt (&cleanup, stmt);
18249 }
18250 }
18251 else
18252 gimplify_and_add (TREE_OPERAND (*expr_p, 1), seq_p: &cleanup);
18253 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
18254 if (gimple_seq_empty_p (s: cleanup))
18255 {
18256 gimple_seq_add_seq (pre_p, eval);
18257 ret = GS_ALL_DONE;
18258 break;
18259 }
18260 try_ = gimple_build_try (eval, cleanup,
18261 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
18262 ? GIMPLE_TRY_FINALLY
18263 : GIMPLE_TRY_CATCH);
18264 if (EXPR_HAS_LOCATION (save_expr))
18265 gimple_set_location (g: try_, EXPR_LOCATION (save_expr));
18266 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
18267 gimple_set_location (g: try_, location: saved_location);
18268 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
18269 gimple_try_set_catch_is_cleanup (g: try_,
18270 TRY_CATCH_IS_CLEANUP (*expr_p));
18271 gimplify_seq_add_stmt (seq_p: pre_p, gs: try_);
18272 ret = GS_ALL_DONE;
18273 break;
18274 }
18275
18276 case CLEANUP_POINT_EXPR:
18277 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
18278 break;
18279
18280 case TARGET_EXPR:
18281 ret = gimplify_target_expr (expr_p, pre_p, post_p);
18282 break;
18283
18284 case CATCH_EXPR:
18285 {
18286 gimple *c;
18287 gimple_seq handler = NULL;
18288 gimplify_and_add (CATCH_BODY (*expr_p), seq_p: &handler);
18289 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
18290 gimplify_seq_add_stmt (seq_p: pre_p, gs: c);
18291 ret = GS_ALL_DONE;
18292 break;
18293 }
18294
18295 case EH_FILTER_EXPR:
18296 {
18297 gimple *ehf;
18298 gimple_seq failure = NULL;
18299
18300 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), seq_p: &failure);
18301 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
18302 copy_warning (ehf, *expr_p);
18303 gimplify_seq_add_stmt (seq_p: pre_p, gs: ehf);
18304 ret = GS_ALL_DONE;
18305 break;
18306 }
18307
18308 case OBJ_TYPE_REF:
18309 {
18310 enum gimplify_status r0, r1;
18311 r0 = gimplify_expr (expr_p: &OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
18312 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18313 r1 = gimplify_expr (expr_p: &OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
18314 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18315 TREE_SIDE_EFFECTS (*expr_p) = 0;
18316 ret = MIN (r0, r1);
18317 }
18318 break;
18319
18320 case LABEL_DECL:
18321 /* We get here when taking the address of a label. We mark
18322 the label as "forced"; meaning it can never be removed and
18323 it is a potential target for any computed goto. */
18324 FORCED_LABEL (*expr_p) = 1;
18325 ret = GS_ALL_DONE;
18326 break;
18327
18328 case STATEMENT_LIST:
18329 ret = gimplify_statement_list (expr_p, pre_p);
18330 break;
18331
18332 case WITH_SIZE_EXPR:
18333 {
18334 gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
18335 post_p: post_p == &internal_post ? NULL : post_p,
18336 gimple_test_f, fallback);
18337 gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 1), pre_p, post_p,
18338 gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18339 ret = GS_ALL_DONE;
18340 }
18341 break;
18342
18343 case VAR_DECL:
18344 case PARM_DECL:
18345 ret = gimplify_var_or_parm_decl (expr_p);
18346 break;
18347
18348 case RESULT_DECL:
18349 /* When within an OMP context, notice uses of variables. */
18350 if (gimplify_omp_ctxp)
18351 omp_notice_variable (ctx: gimplify_omp_ctxp, decl: *expr_p, in_code: true);
18352 ret = GS_ALL_DONE;
18353 break;
18354
18355 case DEBUG_EXPR_DECL:
18356 gcc_unreachable ();
18357
18358 case DEBUG_BEGIN_STMT:
18359 gimplify_seq_add_stmt (seq_p: pre_p,
18360 gs: gimple_build_debug_begin_stmt
18361 (TREE_BLOCK (*expr_p),
18362 EXPR_LOCATION (*expr_p)));
18363 ret = GS_ALL_DONE;
18364 *expr_p = NULL;
18365 break;
18366
18367 case SSA_NAME:
18368 /* Allow callbacks into the gimplifier during optimization. */
18369 ret = GS_ALL_DONE;
18370 break;
18371
18372 case OMP_PARALLEL:
18373 gimplify_omp_parallel (expr_p, pre_p);
18374 ret = GS_ALL_DONE;
18375 break;
18376
18377 case OMP_TASK:
18378 gimplify_omp_task (expr_p, pre_p);
18379 ret = GS_ALL_DONE;
18380 break;
18381
18382 case OMP_SIMD:
18383 {
18384 /* Temporarily disable into_ssa, as scan_omp_simd
18385 which calls copy_gimple_seq_and_replace_locals can't deal
18386 with SSA_NAMEs defined outside of the body properly. */
18387 bool saved_into_ssa = gimplify_ctxp->into_ssa;
18388 gimplify_ctxp->into_ssa = false;
18389 ret = gimplify_omp_for (expr_p, pre_p);
18390 gimplify_ctxp->into_ssa = saved_into_ssa;
18391 break;
18392 }
18393
18394 case OMP_FOR:
18395 case OMP_DISTRIBUTE:
18396 case OMP_TASKLOOP:
18397 case OACC_LOOP:
18398 ret = gimplify_omp_for (expr_p, pre_p);
18399 break;
18400
18401 case OMP_LOOP:
18402 ret = gimplify_omp_loop (expr_p, pre_p);
18403 break;
18404
18405 case OACC_CACHE:
18406 gimplify_oacc_cache (expr_p, pre_p);
18407 ret = GS_ALL_DONE;
18408 break;
18409
18410 case OACC_DECLARE:
18411 gimplify_oacc_declare (expr_p, pre_p);
18412 ret = GS_ALL_DONE;
18413 break;
18414
18415 case OACC_HOST_DATA:
18416 case OACC_DATA:
18417 case OACC_KERNELS:
18418 case OACC_PARALLEL:
18419 case OACC_SERIAL:
18420 case OMP_SCOPE:
18421 case OMP_SECTIONS:
18422 case OMP_SINGLE:
18423 case OMP_TARGET:
18424 case OMP_TARGET_DATA:
18425 case OMP_TEAMS:
18426 gimplify_omp_workshare (expr_p, pre_p);
18427 ret = GS_ALL_DONE;
18428 break;
18429
18430 case OACC_ENTER_DATA:
18431 case OACC_EXIT_DATA:
18432 case OACC_UPDATE:
18433 case OMP_TARGET_UPDATE:
18434 case OMP_TARGET_ENTER_DATA:
18435 case OMP_TARGET_EXIT_DATA:
18436 gimplify_omp_target_update (expr_p, pre_p);
18437 ret = GS_ALL_DONE;
18438 break;
18439
18440 case OMP_SECTION:
18441 case OMP_STRUCTURED_BLOCK:
18442 case OMP_MASTER:
18443 case OMP_MASKED:
18444 case OMP_ORDERED:
18445 case OMP_CRITICAL:
18446 case OMP_SCAN:
18447 {
18448 gimple_seq body = NULL;
18449 gimple *g;
18450 bool saved_in_omp_construct = in_omp_construct;
18451
18452 in_omp_construct = true;
18453 gimplify_and_add (OMP_BODY (*expr_p), seq_p: &body);
18454 in_omp_construct = saved_in_omp_construct;
18455 switch (TREE_CODE (*expr_p))
18456 {
18457 case OMP_SECTION:
18458 g = gimple_build_omp_section (body);
18459 break;
18460 case OMP_STRUCTURED_BLOCK:
18461 g = gimple_build_omp_structured_block (body);
18462 break;
18463 case OMP_MASTER:
18464 g = gimple_build_omp_master (body);
18465 break;
18466 case OMP_ORDERED:
18467 g = gimplify_omp_ordered (expr: *expr_p, body);
18468 if (OMP_BODY (*expr_p) == NULL_TREE
18469 && gimple_code (g) == GIMPLE_OMP_ORDERED)
18470 gimple_omp_ordered_standalone (g);
18471 break;
18472 case OMP_MASKED:
18473 gimplify_scan_omp_clauses (list_p: &OMP_MASKED_CLAUSES (*expr_p),
18474 pre_p, region_type: ORT_WORKSHARE, code: OMP_MASKED);
18475 gimplify_adjust_omp_clauses (pre_p, body,
18476 list_p: &OMP_MASKED_CLAUSES (*expr_p),
18477 code: OMP_MASKED);
18478 g = gimple_build_omp_masked (body,
18479 OMP_MASKED_CLAUSES (*expr_p));
18480 break;
18481 case OMP_CRITICAL:
18482 gimplify_scan_omp_clauses (list_p: &OMP_CRITICAL_CLAUSES (*expr_p),
18483 pre_p, region_type: ORT_WORKSHARE, code: OMP_CRITICAL);
18484 gimplify_adjust_omp_clauses (pre_p, body,
18485 list_p: &OMP_CRITICAL_CLAUSES (*expr_p),
18486 code: OMP_CRITICAL);
18487 g = gimple_build_omp_critical (body,
18488 OMP_CRITICAL_NAME (*expr_p),
18489 OMP_CRITICAL_CLAUSES (*expr_p));
18490 break;
18491 case OMP_SCAN:
18492 gimplify_scan_omp_clauses (list_p: &OMP_SCAN_CLAUSES (*expr_p),
18493 pre_p, region_type: ORT_WORKSHARE, code: OMP_SCAN);
18494 gimplify_adjust_omp_clauses (pre_p, body,
18495 list_p: &OMP_SCAN_CLAUSES (*expr_p),
18496 code: OMP_SCAN);
18497 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
18498 break;
18499 default:
18500 gcc_unreachable ();
18501 }
18502 gimplify_seq_add_stmt (seq_p: pre_p, gs: g);
18503 ret = GS_ALL_DONE;
18504 break;
18505 }
18506
18507 case OMP_TASKGROUP:
18508 {
18509 gimple_seq body = NULL;
18510
18511 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
18512 bool saved_in_omp_construct = in_omp_construct;
18513 gimplify_scan_omp_clauses (list_p: pclauses, pre_p, region_type: ORT_TASKGROUP,
18514 code: OMP_TASKGROUP);
18515 gimplify_adjust_omp_clauses (pre_p, NULL, list_p: pclauses, code: OMP_TASKGROUP);
18516
18517 in_omp_construct = true;
18518 gimplify_and_add (OMP_BODY (*expr_p), seq_p: &body);
18519 in_omp_construct = saved_in_omp_construct;
18520 gimple_seq cleanup = NULL;
18521 tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKGROUP_END);
18522 gimple *g = gimple_build_call (fn, 0);
18523 gimple_seq_add_stmt (&cleanup, g);
18524 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
18525 body = NULL;
18526 gimple_seq_add_stmt (&body, g);
18527 g = gimple_build_omp_taskgroup (body, *pclauses);
18528 gimplify_seq_add_stmt (seq_p: pre_p, gs: g);
18529 ret = GS_ALL_DONE;
18530 break;
18531 }
18532
18533 case OMP_ATOMIC:
18534 case OMP_ATOMIC_READ:
18535 case OMP_ATOMIC_CAPTURE_OLD:
18536 case OMP_ATOMIC_CAPTURE_NEW:
18537 ret = gimplify_omp_atomic (expr_p, pre_p);
18538 break;
18539
18540 case TRANSACTION_EXPR:
18541 ret = gimplify_transaction (expr_p, pre_p);
18542 break;
18543
18544 case TRUTH_AND_EXPR:
18545 case TRUTH_OR_EXPR:
18546 case TRUTH_XOR_EXPR:
18547 {
18548 tree orig_type = TREE_TYPE (*expr_p);
18549 tree new_type, xop0, xop1;
18550 *expr_p = gimple_boolify (expr: *expr_p);
18551 new_type = TREE_TYPE (*expr_p);
18552 if (!useless_type_conversion_p (orig_type, new_type))
18553 {
18554 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
18555 ret = GS_OK;
18556 break;
18557 }
18558
18559 /* Boolified binary truth expressions are semantically equivalent
18560 to bitwise binary expressions. Canonicalize them to the
18561 bitwise variant. */
18562 switch (TREE_CODE (*expr_p))
18563 {
18564 case TRUTH_AND_EXPR:
18565 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
18566 break;
18567 case TRUTH_OR_EXPR:
18568 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
18569 break;
18570 case TRUTH_XOR_EXPR:
18571 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
18572 break;
18573 default:
18574 break;
18575 }
18576 /* Now make sure that operands have compatible type to
18577 expression's new_type. */
18578 xop0 = TREE_OPERAND (*expr_p, 0);
18579 xop1 = TREE_OPERAND (*expr_p, 1);
18580 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
18581 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
18582 new_type,
18583 xop0);
18584 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
18585 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
18586 new_type,
18587 xop1);
18588 /* Continue classified as tcc_binary. */
18589 goto expr_2;
18590 }
18591
18592 case VEC_COND_EXPR:
18593 goto expr_3;
18594
18595 case VEC_PERM_EXPR:
18596 /* Classified as tcc_expression. */
18597 goto expr_3;
18598
18599 case BIT_INSERT_EXPR:
18600 /* Argument 3 is a constant. */
18601 goto expr_2;
18602
18603 case POINTER_PLUS_EXPR:
18604 {
18605 enum gimplify_status r0, r1;
18606 r0 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
18607 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18608 r1 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 1), pre_p,
18609 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18610 recalculate_side_effects (t: *expr_p);
18611 ret = MIN (r0, r1);
18612 break;
18613 }
18614
18615 default:
18616 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
18617 {
18618 case tcc_comparison:
18619 /* Handle comparison of objects of non scalar mode aggregates
18620 with a call to memcmp. It would be nice to only have to do
18621 this for variable-sized objects, but then we'd have to allow
18622 the same nest of reference nodes we allow for MODIFY_EXPR and
18623 that's too complex.
18624
18625 Compare scalar mode aggregates as scalar mode values. Using
18626 memcmp for them would be very inefficient at best, and is
18627 plain wrong if bitfields are involved. */
18628 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
18629 ret = GS_ERROR;
18630 else
18631 {
18632 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
18633
18634 /* Vector comparisons need no boolification. */
18635 if (TREE_CODE (type) == VECTOR_TYPE)
18636 goto expr_2;
18637 else if (!AGGREGATE_TYPE_P (type))
18638 {
18639 tree org_type = TREE_TYPE (*expr_p);
18640 *expr_p = gimple_boolify (expr: *expr_p);
18641 if (!useless_type_conversion_p (org_type,
18642 TREE_TYPE (*expr_p)))
18643 {
18644 *expr_p = fold_convert_loc (input_location,
18645 org_type, *expr_p);
18646 ret = GS_OK;
18647 }
18648 else
18649 goto expr_2;
18650 }
18651 else if (TYPE_MODE (type) != BLKmode)
18652 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
18653 else
18654 ret = gimplify_variable_sized_compare (expr_p);
18655 }
18656 break;
18657
18658 /* If *EXPR_P does not need to be special-cased, handle it
18659 according to its class. */
18660 case tcc_unary:
18661 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
18662 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18663 break;
18664
18665 case tcc_binary:
18666 expr_2:
18667 {
18668 enum gimplify_status r0, r1;
18669
18670 r0 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
18671 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18672 r1 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 1), pre_p,
18673 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18674
18675 ret = MIN (r0, r1);
18676 break;
18677 }
18678
18679 expr_3:
18680 {
18681 enum gimplify_status r0, r1, r2;
18682
18683 r0 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
18684 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18685 r1 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 1), pre_p,
18686 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18687 r2 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 2), pre_p,
18688 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
18689
18690 ret = MIN (MIN (r0, r1), r2);
18691 break;
18692 }
18693
18694 case tcc_declaration:
18695 case tcc_constant:
18696 ret = GS_ALL_DONE;
18697 goto dont_recalculate;
18698
18699 default:
18700 gcc_unreachable ();
18701 }
18702
18703 recalculate_side_effects (t: *expr_p);
18704
18705 dont_recalculate:
18706 break;
18707 }
18708
18709 gcc_assert (*expr_p || ret != GS_OK);
18710 }
18711 while (ret == GS_OK);
18712
18713 /* If we encountered an error_mark somewhere nested inside, either
18714 stub out the statement or propagate the error back out. */
18715 if (ret == GS_ERROR)
18716 {
18717 if (is_statement)
18718 *expr_p = NULL;
18719 goto out;
18720 }
18721
18722 /* This was only valid as a return value from the langhook, which
18723 we handled. Make sure it doesn't escape from any other context. */
18724 gcc_assert (ret != GS_UNHANDLED);
18725
18726 if (fallback == fb_none && *expr_p && !is_gimple_stmt (t: *expr_p))
18727 {
18728 /* We aren't looking for a value, and we don't have a valid
18729 statement. If it doesn't have side-effects, throw it away.
18730 We can also get here with code such as "*&&L;", where L is
18731 a LABEL_DECL that is marked as FORCED_LABEL. */
18732 if (TREE_CODE (*expr_p) == LABEL_DECL
18733 || !TREE_SIDE_EFFECTS (*expr_p))
18734 *expr_p = NULL;
18735 else if (!TREE_THIS_VOLATILE (*expr_p))
18736 {
18737 /* This is probably a _REF that contains something nested that
18738 has side effects. Recurse through the operands to find it. */
18739 enum tree_code code = TREE_CODE (*expr_p);
18740
18741 switch (code)
18742 {
18743 case COMPONENT_REF:
18744 case REALPART_EXPR:
18745 case IMAGPART_EXPR:
18746 case VIEW_CONVERT_EXPR:
18747 gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p, post_p,
18748 gimple_test_f, fallback);
18749 break;
18750
18751 case ARRAY_REF:
18752 case ARRAY_RANGE_REF:
18753 gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p, post_p,
18754 gimple_test_f, fallback);
18755 gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 1), pre_p, post_p,
18756 gimple_test_f, fallback);
18757 break;
18758
18759 default:
18760 /* Anything else with side-effects must be converted to
18761 a valid statement before we get here. */
18762 gcc_unreachable ();
18763 }
18764
18765 *expr_p = NULL;
18766 }
18767 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
18768 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
18769 && !is_empty_type (TREE_TYPE (*expr_p)))
18770 {
18771 /* Historically, the compiler has treated a bare reference
18772 to a non-BLKmode volatile lvalue as forcing a load. */
18773 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
18774
18775 /* Normally, we do not want to create a temporary for a
18776 TREE_ADDRESSABLE type because such a type should not be
18777 copied by bitwise-assignment. However, we make an
18778 exception here, as all we are doing here is ensuring that
18779 we read the bytes that make up the type. We use
18780 create_tmp_var_raw because create_tmp_var will abort when
18781 given a TREE_ADDRESSABLE type. */
18782 tree tmp = create_tmp_var_raw (type, "vol");
18783 gimple_add_tmp_var (tmp);
18784 gimplify_assign (tmp, *expr_p, pre_p);
18785 *expr_p = NULL;
18786 }
18787 else
18788 /* We can't do anything useful with a volatile reference to
18789 an incomplete type, so just throw it away. Likewise for
18790 a BLKmode type, since any implicit inner load should
18791 already have been turned into an explicit one by the
18792 gimplification process. */
18793 *expr_p = NULL;
18794 }
18795
18796 /* If we are gimplifying at the statement level, we're done. Tack
18797 everything together and return. */
18798 if (fallback == fb_none || is_statement)
18799 {
18800 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
18801 it out for GC to reclaim it. */
18802 *expr_p = NULL_TREE;
18803
18804 if (!gimple_seq_empty_p (s: internal_pre)
18805 || !gimple_seq_empty_p (s: internal_post))
18806 {
18807 gimplify_seq_add_seq (dst_p: &internal_pre, src: internal_post);
18808 gimplify_seq_add_seq (dst_p: pre_p, src: internal_pre);
18809 }
18810
18811 /* The result of gimplifying *EXPR_P is going to be the last few
18812 statements in *PRE_P and *POST_P. Add location information
18813 to all the statements that were added by the gimplification
18814 helpers. */
18815 if (!gimple_seq_empty_p (s: *pre_p))
18816 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
18817
18818 if (!gimple_seq_empty_p (s: *post_p))
18819 annotate_all_with_location_after (*post_p, post_last_gsi,
18820 input_location);
18821
18822 goto out;
18823 }
18824
18825#ifdef ENABLE_GIMPLE_CHECKING
18826 if (*expr_p)
18827 {
18828 enum tree_code code = TREE_CODE (*expr_p);
18829 /* These expressions should already be in gimple IR form. */
18830 gcc_assert (code != MODIFY_EXPR
18831 && code != ASM_EXPR
18832 && code != BIND_EXPR
18833 && code != CATCH_EXPR
18834 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
18835 && code != EH_FILTER_EXPR
18836 && code != GOTO_EXPR
18837 && code != LABEL_EXPR
18838 && code != LOOP_EXPR
18839 && code != SWITCH_EXPR
18840 && code != TRY_FINALLY_EXPR
18841 && code != EH_ELSE_EXPR
18842 && code != OACC_PARALLEL
18843 && code != OACC_KERNELS
18844 && code != OACC_SERIAL
18845 && code != OACC_DATA
18846 && code != OACC_HOST_DATA
18847 && code != OACC_DECLARE
18848 && code != OACC_UPDATE
18849 && code != OACC_ENTER_DATA
18850 && code != OACC_EXIT_DATA
18851 && code != OACC_CACHE
18852 && code != OMP_CRITICAL
18853 && code != OMP_FOR
18854 && code != OACC_LOOP
18855 && code != OMP_MASTER
18856 && code != OMP_MASKED
18857 && code != OMP_TASKGROUP
18858 && code != OMP_ORDERED
18859 && code != OMP_PARALLEL
18860 && code != OMP_SCAN
18861 && code != OMP_SECTIONS
18862 && code != OMP_SECTION
18863 && code != OMP_STRUCTURED_BLOCK
18864 && code != OMP_SINGLE
18865 && code != OMP_SCOPE);
18866 }
18867#endif
18868
18869 /* Otherwise we're gimplifying a subexpression, so the resulting
18870 value is interesting. If it's a valid operand that matches
18871 GIMPLE_TEST_F, we're done. Unless we are handling some
18872 post-effects internally; if that's the case, we need to copy into
18873 a temporary before adding the post-effects to POST_P. */
18874 if (gimple_seq_empty_p (s: internal_post) && (*gimple_test_f) (*expr_p))
18875 goto out;
18876
18877 /* Otherwise, we need to create a new temporary for the gimplified
18878 expression. */
18879
18880 /* We can't return an lvalue if we have an internal postqueue. The
18881 object the lvalue refers to would (probably) be modified by the
18882 postqueue; we need to copy the value out first, which means an
18883 rvalue. */
18884 if ((fallback & fb_lvalue)
18885 && gimple_seq_empty_p (s: internal_post)
18886 && is_gimple_addressable (t: *expr_p))
18887 {
18888 /* An lvalue will do. Take the address of the expression, store it
18889 in a temporary, and replace the expression with an INDIRECT_REF of
18890 that temporary. */
18891 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
18892 unsigned int ref_align = get_object_alignment (*expr_p);
18893 tree ref_type = TREE_TYPE (*expr_p);
18894 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
18895 gimplify_expr (expr_p: &tmp, pre_p, post_p, gimple_test_f: is_gimple_reg, fallback: fb_rvalue);
18896 if (TYPE_ALIGN (ref_type) != ref_align)
18897 ref_type = build_aligned_type (ref_type, ref_align);
18898 *expr_p = build2 (MEM_REF, ref_type,
18899 tmp, build_zero_cst (ref_alias_type));
18900 }
18901 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (t: *expr_p))
18902 {
18903 /* An rvalue will do. Assign the gimplified expression into a
18904 new temporary TMP and replace the original expression with
18905 TMP. First, make sure that the expression has a type so that
18906 it can be assigned into a temporary. */
18907 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
18908 *expr_p = get_formal_tmp_var (val: *expr_p, pre_p);
18909 }
18910 else
18911 {
18912#ifdef ENABLE_GIMPLE_CHECKING
18913 if (!(fallback & fb_mayfail))
18914 {
18915 fprintf (stderr, format: "gimplification failed:\n");
18916 print_generic_expr (stderr, *expr_p);
18917 debug_tree (*expr_p);
18918 internal_error ("gimplification failed");
18919 }
18920#endif
18921 gcc_assert (fallback & fb_mayfail);
18922
18923 /* If this is an asm statement, and the user asked for the
18924 impossible, don't die. Fail and let gimplify_asm_expr
18925 issue an error. */
18926 ret = GS_ERROR;
18927 goto out;
18928 }
18929
18930 /* Make sure the temporary matches our predicate. */
18931 gcc_assert ((*gimple_test_f) (*expr_p));
18932
18933 if (!gimple_seq_empty_p (s: internal_post))
18934 {
18935 annotate_all_with_location (internal_post, input_location);
18936 gimplify_seq_add_seq (dst_p: pre_p, src: internal_post);
18937 }
18938
18939 out:
18940 input_location = saved_location;
18941 return ret;
18942}
18943
18944/* Like gimplify_expr but make sure the gimplified result is not itself
18945 a SSA name (but a decl if it were). Temporaries required by
18946 evaluating *EXPR_P may be still SSA names. */
18947
18948static enum gimplify_status
18949gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
18950 bool (*gimple_test_f) (tree), fallback_t fallback,
18951 bool allow_ssa)
18952{
18953 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
18954 gimple_test_f, fallback);
18955 if (! allow_ssa
18956 && TREE_CODE (*expr_p) == SSA_NAME)
18957 *expr_p = get_initialized_tmp_var (val: *expr_p, pre_p, NULL, allow_ssa: false);
18958 return ret;
18959}
18960
18961/* Look through TYPE for variable-sized objects and gimplify each such
18962 size that we find. Add to LIST_P any statements generated. */
18963
18964void
18965gimplify_type_sizes (tree type, gimple_seq *list_p)
18966{
18967 if (type == NULL || type == error_mark_node)
18968 return;
18969
18970 const bool ignored_p
18971 = TYPE_NAME (type)
18972 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
18973 && DECL_IGNORED_P (TYPE_NAME (type));
18974 tree t;
18975
18976 /* We first do the main variant, then copy into any other variants. */
18977 type = TYPE_MAIN_VARIANT (type);
18978
18979 /* Avoid infinite recursion. */
18980 if (TYPE_SIZES_GIMPLIFIED (type))
18981 return;
18982
18983 TYPE_SIZES_GIMPLIFIED (type) = 1;
18984
18985 switch (TREE_CODE (type))
18986 {
18987 case INTEGER_TYPE:
18988 case ENUMERAL_TYPE:
18989 case BOOLEAN_TYPE:
18990 case REAL_TYPE:
18991 case FIXED_POINT_TYPE:
18992 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
18993 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
18994
18995 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
18996 {
18997 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
18998 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
18999 }
19000 break;
19001
19002 case ARRAY_TYPE:
19003 /* These types may not have declarations, so handle them here. */
19004 gimplify_type_sizes (TREE_TYPE (type), list_p);
19005 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
19006 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
19007 with assigned stack slots, for -O1+ -g they should be tracked
19008 by VTA. */
19009 if (!ignored_p
19010 && TYPE_DOMAIN (type)
19011 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
19012 {
19013 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
19014 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
19015 DECL_IGNORED_P (t) = 0;
19016 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
19017 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
19018 DECL_IGNORED_P (t) = 0;
19019 }
19020 break;
19021
19022 case RECORD_TYPE:
19023 case UNION_TYPE:
19024 case QUAL_UNION_TYPE:
19025 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
19026 if (TREE_CODE (field) == FIELD_DECL)
19027 {
19028 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
19029 /* Likewise, ensure variable offsets aren't removed. */
19030 if (!ignored_p
19031 && (t = DECL_FIELD_OFFSET (field))
19032 && VAR_P (t)
19033 && DECL_ARTIFICIAL (t))
19034 DECL_IGNORED_P (t) = 0;
19035 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
19036 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
19037 gimplify_type_sizes (TREE_TYPE (field), list_p);
19038 }
19039 break;
19040
19041 case POINTER_TYPE:
19042 case REFERENCE_TYPE:
19043 /* We used to recurse on the pointed-to type here, which turned out to
19044 be incorrect because its definition might refer to variables not
19045 yet initialized at this point if a forward declaration is involved.
19046
19047 It was actually useful for anonymous pointed-to types to ensure
19048 that the sizes evaluation dominates every possible later use of the
19049 values. Restricting to such types here would be safe since there
19050 is no possible forward declaration around, but would introduce an
19051 undesirable middle-end semantic to anonymity. We then defer to
19052 front-ends the responsibility of ensuring that the sizes are
19053 evaluated both early and late enough, e.g. by attaching artificial
19054 type declarations to the tree. */
19055 break;
19056
19057 default:
19058 break;
19059 }
19060
19061 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
19062 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
19063
19064 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
19065 {
19066 TYPE_SIZE (t) = TYPE_SIZE (type);
19067 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
19068 TYPE_SIZES_GIMPLIFIED (t) = 1;
19069 }
19070}
19071
19072/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
19073 a size or position, has had all of its SAVE_EXPRs evaluated.
19074 We add any required statements to *STMT_P. */
19075
19076void
19077gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
19078{
19079 tree expr = *expr_p;
19080
19081 /* We don't do anything if the value isn't there, is constant, or contains
19082 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
19083 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
19084 will want to replace it with a new variable, but that will cause problems
19085 if this type is from outside the function. It's OK to have that here. */
19086 if (expr == NULL_TREE
19087 || is_gimple_constant (t: expr)
19088 || VAR_P (expr)
19089 || CONTAINS_PLACEHOLDER_P (expr))
19090 return;
19091
19092 *expr_p = unshare_expr (expr);
19093
19094 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
19095 if the def vanishes. */
19096 gimplify_expr (expr_p, pre_p: stmt_p, NULL, gimple_test_f: is_gimple_val, fallback: fb_rvalue, allow_ssa: false);
19097
19098 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
19099 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
19100 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
19101 if (is_gimple_constant (t: *expr_p))
19102 *expr_p = get_initialized_tmp_var (val: *expr_p, pre_p: stmt_p, NULL, allow_ssa: false);
19103}
19104
19105/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
19106 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
19107 is true, also gimplify the parameters. */
19108
19109gbind *
19110gimplify_body (tree fndecl, bool do_parms)
19111{
19112 location_t saved_location = input_location;
19113 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
19114 gimple *outer_stmt;
19115 gbind *outer_bind;
19116
19117 timevar_push (tv: TV_TREE_GIMPLIFY);
19118
19119 init_tree_ssa (cfun);
19120
19121 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
19122 gimplification. */
19123 default_rtl_profile ();
19124
19125 gcc_assert (gimplify_ctxp == NULL);
19126 push_gimplify_context (in_ssa: true);
19127
19128 if (flag_openacc || flag_openmp)
19129 {
19130 gcc_assert (gimplify_omp_ctxp == NULL);
19131 if (lookup_attribute (attr_name: "omp declare target", DECL_ATTRIBUTES (fndecl)))
19132 gimplify_omp_ctxp = new_omp_context (region_type: ORT_IMPLICIT_TARGET);
19133 }
19134
19135 /* Unshare most shared trees in the body and in that of any nested functions.
19136 It would seem we don't have to do this for nested functions because
19137 they are supposed to be output and then the outer function gimplified
19138 first, but the g++ front end doesn't always do it that way. */
19139 unshare_body (fndecl);
19140 unvisit_body (fndecl);
19141
19142 /* Make sure input_location isn't set to something weird. */
19143 input_location = DECL_SOURCE_LOCATION (fndecl);
19144
19145 /* Resolve callee-copies. This has to be done before processing
19146 the body so that DECL_VALUE_EXPR gets processed correctly. */
19147 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
19148
19149 /* Gimplify the function's body. */
19150 seq = NULL;
19151 gimplify_stmt (stmt_p: &DECL_SAVED_TREE (fndecl), seq_p: &seq);
19152 outer_stmt = gimple_seq_first_nondebug_stmt (s: seq);
19153 if (!outer_stmt)
19154 {
19155 outer_stmt = gimple_build_nop ();
19156 gimplify_seq_add_stmt (seq_p: &seq, gs: outer_stmt);
19157 }
19158
19159 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
19160 not the case, wrap everything in a GIMPLE_BIND to make it so. */
19161 if (gimple_code (g: outer_stmt) == GIMPLE_BIND
19162 && (gimple_seq_first_nondebug_stmt (s: seq)
19163 == gimple_seq_last_nondebug_stmt (s: seq)))
19164 {
19165 outer_bind = as_a <gbind *> (p: outer_stmt);
19166 if (gimple_seq_first_stmt (s: seq) != outer_stmt
19167 || gimple_seq_last_stmt (s: seq) != outer_stmt)
19168 {
19169 /* If there are debug stmts before or after outer_stmt, move them
19170 inside of outer_bind body. */
19171 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
19172 gimple_seq second_seq = NULL;
19173 if (gimple_seq_first_stmt (s: seq) != outer_stmt
19174 && gimple_seq_last_stmt (s: seq) != outer_stmt)
19175 {
19176 second_seq = gsi_split_seq_after (gsi);
19177 gsi_remove (&gsi, false);
19178 }
19179 else if (gimple_seq_first_stmt (s: seq) != outer_stmt)
19180 gsi_remove (&gsi, false);
19181 else
19182 {
19183 gsi_remove (&gsi, false);
19184 second_seq = seq;
19185 seq = NULL;
19186 }
19187 gimple_seq_add_seq_without_update (&seq,
19188 gimple_bind_body (gs: outer_bind));
19189 gimple_seq_add_seq_without_update (&seq, second_seq);
19190 gimple_bind_set_body (bind_stmt: outer_bind, seq);
19191 }
19192 }
19193 else
19194 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
19195
19196 DECL_SAVED_TREE (fndecl) = NULL_TREE;
19197
19198 /* If we had callee-copies statements, insert them at the beginning
19199 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
19200 if (!gimple_seq_empty_p (s: parm_stmts))
19201 {
19202 tree parm;
19203
19204 gimplify_seq_add_seq (dst_p: &parm_stmts, src: gimple_bind_body (gs: outer_bind));
19205 if (parm_cleanup)
19206 {
19207 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
19208 GIMPLE_TRY_FINALLY);
19209 parm_stmts = NULL;
19210 gimple_seq_add_stmt (&parm_stmts, g);
19211 }
19212 gimple_bind_set_body (bind_stmt: outer_bind, seq: parm_stmts);
19213
19214 for (parm = DECL_ARGUMENTS (current_function_decl);
19215 parm; parm = DECL_CHAIN (parm))
19216 if (DECL_HAS_VALUE_EXPR_P (parm))
19217 {
19218 DECL_HAS_VALUE_EXPR_P (parm) = 0;
19219 DECL_IGNORED_P (parm) = 0;
19220 }
19221 }
19222
19223 if ((flag_openacc || flag_openmp || flag_openmp_simd)
19224 && gimplify_omp_ctxp)
19225 {
19226 delete_omp_context (c: gimplify_omp_ctxp);
19227 gimplify_omp_ctxp = NULL;
19228 }
19229
19230 pop_gimplify_context (body: outer_bind);
19231 gcc_assert (gimplify_ctxp == NULL);
19232
19233 if (flag_checking && !seen_error ())
19234 verify_gimple_in_seq (gimple_bind_body (gs: outer_bind));
19235
19236 timevar_pop (tv: TV_TREE_GIMPLIFY);
19237 input_location = saved_location;
19238
19239 return outer_bind;
19240}
19241
19242typedef char *char_p; /* For DEF_VEC_P. */
19243
19244/* Return whether we should exclude FNDECL from instrumentation. */
19245
19246static bool
19247flag_instrument_functions_exclude_p (tree fndecl)
19248{
19249 vec<char_p> *v;
19250
19251 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
19252 if (v && v->length () > 0)
19253 {
19254 const char *name;
19255 int i;
19256 char *s;
19257
19258 name = lang_hooks.decl_printable_name (fndecl, 1);
19259 FOR_EACH_VEC_ELT (*v, i, s)
19260 if (strstr (haystack: name, needle: s) != NULL)
19261 return true;
19262 }
19263
19264 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
19265 if (v && v->length () > 0)
19266 {
19267 const char *name;
19268 int i;
19269 char *s;
19270
19271 name = DECL_SOURCE_FILE (fndecl);
19272 FOR_EACH_VEC_ELT (*v, i, s)
19273 if (strstr (haystack: name, needle: s) != NULL)
19274 return true;
19275 }
19276
19277 return false;
19278}
19279
19280/* Build a call to the instrumentation function FNCODE and add it to SEQ.
19281 If COND_VAR is not NULL, it is a boolean variable guarding the call to
19282 the instrumentation function. IF STMT is not NULL, it is a statement
19283 to be executed just before the call to the instrumentation function. */
19284
19285static void
19286build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
19287 tree cond_var, gimple *stmt)
19288{
19289 /* The instrumentation hooks aren't going to call the instrumented
19290 function and the address they receive is expected to be matchable
19291 against symbol addresses. Make sure we don't create a trampoline,
19292 in case the current function is nested. */
19293 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
19294 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
19295
19296 tree label_true, label_false;
19297 if (cond_var)
19298 {
19299 label_true = create_artificial_label (UNKNOWN_LOCATION);
19300 label_false = create_artificial_label (UNKNOWN_LOCATION);
19301 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
19302 label_true, label_false);
19303 gimplify_seq_add_stmt (seq_p: seq, gs: cond);
19304 gimplify_seq_add_stmt (seq_p: seq, gs: gimple_build_label (label: label_true));
19305 gimplify_seq_add_stmt (seq_p: seq, gs: gimple_build_predict (predictor: PRED_COLD_LABEL,
19306 outcome: NOT_TAKEN));
19307 }
19308
19309 if (stmt)
19310 gimplify_seq_add_stmt (seq_p: seq, gs: stmt);
19311
19312 tree x = builtin_decl_implicit (fncode: BUILT_IN_RETURN_ADDRESS);
19313 gcall *call = gimple_build_call (x, 1, integer_zero_node);
19314 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
19315 gimple_call_set_lhs (gs: call, lhs: tmp_var);
19316 gimplify_seq_add_stmt (seq_p: seq, gs: call);
19317 x = builtin_decl_implicit (fncode);
19318 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
19319 gimplify_seq_add_stmt (seq_p: seq, gs: call);
19320
19321 if (cond_var)
19322 gimplify_seq_add_stmt (seq_p: seq, gs: gimple_build_label (label: label_false));
19323}
19324
19325/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
19326 node for the function we want to gimplify.
19327
19328 Return the sequence of GIMPLE statements corresponding to the body
19329 of FNDECL. */
19330
19331void
19332gimplify_function_tree (tree fndecl)
19333{
19334 gimple_seq seq;
19335 gbind *bind;
19336
19337 gcc_assert (!gimple_body (fndecl));
19338
19339 if (DECL_STRUCT_FUNCTION (fndecl))
19340 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
19341 else
19342 push_struct_function (fndecl);
19343
19344 reset_cond_uid ();
19345
19346 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
19347 if necessary. */
19348 cfun->curr_properties |= PROP_gimple_lva;
19349
19350 if (asan_sanitize_use_after_scope ())
19351 asan_poisoned_variables = new hash_set<tree> ();
19352 bind = gimplify_body (fndecl, do_parms: true);
19353 if (asan_poisoned_variables)
19354 {
19355 delete asan_poisoned_variables;
19356 asan_poisoned_variables = NULL;
19357 }
19358
19359 /* The tree body of the function is no longer needed, replace it
19360 with the new GIMPLE body. */
19361 seq = NULL;
19362 gimple_seq_add_stmt (&seq, bind);
19363 gimple_set_body (fndecl, seq);
19364
19365 /* If we're instrumenting function entry/exit, then prepend the call to
19366 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
19367 catch the exit hook. */
19368 /* ??? Add some way to ignore exceptions for this TFE. */
19369 if (flag_instrument_function_entry_exit
19370 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
19371 /* Do not instrument extern inline functions. */
19372 && !(DECL_DECLARED_INLINE_P (fndecl)
19373 && DECL_EXTERNAL (fndecl)
19374 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
19375 && !flag_instrument_functions_exclude_p (fndecl))
19376 {
19377 gimple_seq body = NULL, cleanup = NULL;
19378 gassign *assign;
19379 tree cond_var;
19380
19381 /* If -finstrument-functions-once is specified, generate:
19382
19383 static volatile bool C.0 = false;
19384 bool tmp_called;
19385
19386 tmp_called = C.0;
19387 if (!tmp_called)
19388 {
19389 C.0 = true;
19390 [call profiling enter function]
19391 }
19392
19393 without specific protection for data races. */
19394 if (flag_instrument_function_entry_exit > 1)
19395 {
19396 tree first_var
19397 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
19398 VAR_DECL,
19399 create_tmp_var_name ("C"),
19400 boolean_type_node);
19401 DECL_ARTIFICIAL (first_var) = 1;
19402 DECL_IGNORED_P (first_var) = 1;
19403 TREE_STATIC (first_var) = 1;
19404 TREE_THIS_VOLATILE (first_var) = 1;
19405 TREE_USED (first_var) = 1;
19406 DECL_INITIAL (first_var) = boolean_false_node;
19407 varpool_node::add (decl: first_var);
19408
19409 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
19410 assign = gimple_build_assign (cond_var, first_var);
19411 gimplify_seq_add_stmt (seq_p: &body, gs: assign);
19412
19413 assign = gimple_build_assign (first_var, boolean_true_node);
19414 }
19415
19416 else
19417 {
19418 cond_var = NULL_TREE;
19419 assign = NULL;
19420 }
19421
19422 build_instrumentation_call (seq: &body, fncode: BUILT_IN_PROFILE_FUNC_ENTER,
19423 cond_var, stmt: assign);
19424
19425 /* If -finstrument-functions-once is specified, generate:
19426
19427 if (!tmp_called)
19428 [call profiling exit function]
19429
19430 without specific protection for data races. */
19431 build_instrumentation_call (seq: &cleanup, fncode: BUILT_IN_PROFILE_FUNC_EXIT,
19432 cond_var, NULL);
19433
19434 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
19435 gimplify_seq_add_stmt (seq_p: &body, gs: tf);
19436 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
19437
19438 /* Replace the current function body with the body
19439 wrapped in the try/finally TF. */
19440 seq = NULL;
19441 gimple_seq_add_stmt (&seq, new_bind);
19442 gimple_set_body (fndecl, seq);
19443 bind = new_bind;
19444 }
19445
19446 if (sanitize_flags_p (flag: SANITIZE_THREAD)
19447 && param_tsan_instrument_func_entry_exit)
19448 {
19449 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
19450 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
19451 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
19452 /* Replace the current function body with the body
19453 wrapped in the try/finally TF. */
19454 seq = NULL;
19455 gimple_seq_add_stmt (&seq, new_bind);
19456 gimple_set_body (fndecl, seq);
19457 }
19458
19459 DECL_SAVED_TREE (fndecl) = NULL_TREE;
19460 cfun->curr_properties |= PROP_gimple_any;
19461
19462 pop_cfun ();
19463
19464 dump_function (phase: TDI_gimple, fn: fndecl);
19465}
19466
19467/* Return a dummy expression of type TYPE in order to keep going after an
19468 error. */
19469
19470static tree
19471dummy_object (tree type)
19472{
19473 tree t = build_int_cst (build_pointer_type (type), 0);
19474 return build2 (MEM_REF, type, t, t);
19475}
19476
19477/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
19478 builtin function, but a very special sort of operator. */
19479
19480enum gimplify_status
19481gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
19482 gimple_seq *post_p ATTRIBUTE_UNUSED)
19483{
19484 tree promoted_type, have_va_type;
19485 tree valist = TREE_OPERAND (*expr_p, 0);
19486 tree type = TREE_TYPE (*expr_p);
19487 tree t, tag, aptag;
19488 location_t loc = EXPR_LOCATION (*expr_p);
19489
19490 /* Verify that valist is of the proper type. */
19491 have_va_type = TREE_TYPE (valist);
19492 if (have_va_type == error_mark_node)
19493 return GS_ERROR;
19494 have_va_type = targetm.canonical_va_list_type (have_va_type);
19495 if (have_va_type == NULL_TREE
19496 && POINTER_TYPE_P (TREE_TYPE (valist)))
19497 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
19498 have_va_type
19499 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
19500 gcc_assert (have_va_type != NULL_TREE);
19501
19502 /* Generate a diagnostic for requesting data of a type that cannot
19503 be passed through `...' due to type promotion at the call site. */
19504 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
19505 != type)
19506 {
19507 static bool gave_help;
19508 bool warned;
19509 /* Use the expansion point to handle cases such as passing bool (defined
19510 in a system header) through `...'. */
19511 location_t xloc
19512 = expansion_point_location_if_in_system_header (loc);
19513
19514 /* Unfortunately, this is merely undefined, rather than a constraint
19515 violation, so we cannot make this an error. If this call is never
19516 executed, the program is still strictly conforming. */
19517 auto_diagnostic_group d;
19518 warned = warning_at (xloc, 0,
19519 "%qT is promoted to %qT when passed through %<...%>",
19520 type, promoted_type);
19521 if (!gave_help && warned)
19522 {
19523 gave_help = true;
19524 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
19525 promoted_type, type);
19526 }
19527
19528 /* We can, however, treat "undefined" any way we please.
19529 Call abort to encourage the user to fix the program. */
19530 if (warned)
19531 inform (xloc, "if this code is reached, the program will abort");
19532 /* Before the abort, allow the evaluation of the va_list
19533 expression to exit or longjmp. */
19534 gimplify_and_add (t: valist, seq_p: pre_p);
19535 t = build_call_expr_loc (loc,
19536 builtin_decl_implicit (fncode: BUILT_IN_TRAP), 0);
19537 gimplify_and_add (t, seq_p: pre_p);
19538
19539 /* This is dead code, but go ahead and finish so that the
19540 mode of the result comes out right. */
19541 *expr_p = dummy_object (type);
19542 return GS_ALL_DONE;
19543 }
19544
19545 tag = build_int_cst (build_pointer_type (type), 0);
19546 aptag = build_int_cst (TREE_TYPE (valist), 0);
19547
19548 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
19549 valist, tag, aptag);
19550
19551 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
19552 needs to be expanded. */
19553 cfun->curr_properties &= ~PROP_gimple_lva;
19554
19555 return GS_OK;
19556}
19557
19558/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
19559
19560 DST/SRC are the destination and source respectively. You can pass
19561 ungimplified trees in DST or SRC, in which case they will be
19562 converted to a gimple operand if necessary.
19563
19564 This function returns the newly created GIMPLE_ASSIGN tuple. */
19565
19566gimple *
19567gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
19568{
19569 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
19570 gimplify_and_add (t, seq_p);
19571 ggc_free (t);
19572 return gimple_seq_last_stmt (s: *seq_p);
19573}
19574
19575inline hashval_t
19576gimplify_hasher::hash (const elt_t *p)
19577{
19578 tree t = p->val;
19579 return iterative_hash_expr (tree: t, seed: 0);
19580}
19581
19582inline bool
19583gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
19584{
19585 tree t1 = p1->val;
19586 tree t2 = p2->val;
19587 enum tree_code code = TREE_CODE (t1);
19588
19589 if (TREE_CODE (t2) != code
19590 || TREE_TYPE (t1) != TREE_TYPE (t2))
19591 return false;
19592
19593 if (!operand_equal_p (t1, t2, flags: 0))
19594 return false;
19595
19596 /* Only allow them to compare equal if they also hash equal; otherwise
19597 results are nondeterminate, and we fail bootstrap comparison. */
19598 gcc_checking_assert (hash (p1) == hash (p2));
19599
19600 return true;
19601}
19602

source code of gcc/gimplify.cc