1 | /* Perform the semantic phase of parsing, i.e., the process of |
2 | building tree structure, checking semantic consistency, and |
3 | building RTL. These routines are used both during actual parsing |
4 | and during the instantiation of template functions. |
5 | |
6 | Copyright (C) 1998-2024 Free Software Foundation, Inc. |
7 | Written by Mark Mitchell (mmitchell@usa.net) based on code found |
8 | formerly in parse.y and pt.cc. |
9 | |
10 | This file is part of GCC. |
11 | |
12 | GCC is free software; you can redistribute it and/or modify it |
13 | under the terms of the GNU General Public License as published by |
14 | the Free Software Foundation; either version 3, or (at your option) |
15 | any later version. |
16 | |
17 | GCC is distributed in the hope that it will be useful, but |
18 | WITHOUT ANY WARRANTY; without even the implied warranty of |
19 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
20 | General Public License for more details. |
21 | |
22 | You should have received a copy of the GNU General Public License |
23 | along with GCC; see the file COPYING3. If not see |
24 | <http://www.gnu.org/licenses/>. */ |
25 | |
26 | #include "config.h" |
27 | #include "system.h" |
28 | #include "coretypes.h" |
29 | #include "target.h" |
30 | #include "bitmap.h" |
31 | #include "cp-tree.h" |
32 | #include "stringpool.h" |
33 | #include "cgraph.h" |
34 | #include "stmt.h" |
35 | #include "varasm.h" |
36 | #include "stor-layout.h" |
37 | #include "c-family/c-objc.h" |
38 | #include "tree-inline.h" |
39 | #include "intl.h" |
40 | #include "tree-iterator.h" |
41 | #include "omp-general.h" |
42 | #include "convert.h" |
43 | #include "stringpool.h" |
44 | #include "attribs.h" |
45 | #include "gomp-constants.h" |
46 | #include "predict.h" |
47 | #include "memmodel.h" |
48 | |
49 | /* There routines provide a modular interface to perform many parsing |
50 | operations. They may therefore be used during actual parsing, or |
51 | during template instantiation, which may be regarded as a |
52 | degenerate form of parsing. */ |
53 | |
54 | static tree maybe_convert_cond (tree); |
55 | static tree finalize_nrv_r (tree *, int *, void *); |
56 | |
57 | /* Used for OpenMP non-static data member privatization. */ |
58 | |
59 | static hash_map<tree, tree> *omp_private_member_map; |
60 | static vec<tree> omp_private_member_vec; |
61 | static bool omp_private_member_ignore_next; |
62 | |
63 | |
64 | /* Deferred Access Checking Overview |
65 | --------------------------------- |
66 | |
67 | Most C++ expressions and declarations require access checking |
68 | to be performed during parsing. However, in several cases, |
69 | this has to be treated differently. |
70 | |
71 | For member declarations, access checking has to be deferred |
72 | until more information about the declaration is known. For |
73 | example: |
74 | |
75 | class A { |
76 | typedef int X; |
77 | public: |
78 | X f(); |
79 | }; |
80 | |
81 | A::X A::f(); |
82 | A::X g(); |
83 | |
84 | When we are parsing the function return type `A::X', we don't |
85 | really know if this is allowed until we parse the function name. |
86 | |
87 | Furthermore, some contexts require that access checking is |
88 | never performed at all. These include class heads, and template |
89 | instantiations. |
90 | |
91 | Typical use of access checking functions is described here: |
92 | |
93 | 1. When we enter a context that requires certain access checking |
94 | mode, the function `push_deferring_access_checks' is called with |
95 | DEFERRING argument specifying the desired mode. Access checking |
96 | may be performed immediately (dk_no_deferred), deferred |
97 | (dk_deferred), or not performed (dk_no_check). |
98 | |
99 | 2. When a declaration such as a type, or a variable, is encountered, |
100 | the function `perform_or_defer_access_check' is called. It |
101 | maintains a vector of all deferred checks. |
102 | |
103 | 3. The global `current_class_type' or `current_function_decl' is then |
104 | setup by the parser. `enforce_access' relies on these information |
105 | to check access. |
106 | |
107 | 4. Upon exiting the context mentioned in step 1, |
108 | `perform_deferred_access_checks' is called to check all declaration |
109 | stored in the vector. `pop_deferring_access_checks' is then |
110 | called to restore the previous access checking mode. |
111 | |
112 | In case of parsing error, we simply call `pop_deferring_access_checks' |
113 | without `perform_deferred_access_checks'. */ |
114 | |
115 | struct GTY(()) deferred_access { |
116 | /* A vector representing name-lookups for which we have deferred |
117 | checking access controls. We cannot check the accessibility of |
118 | names used in a decl-specifier-seq until we know what is being |
119 | declared because code like: |
120 | |
121 | class A { |
122 | class B {}; |
123 | B* f(); |
124 | } |
125 | |
126 | A::B* A::f() { return 0; } |
127 | |
128 | is valid, even though `A::B' is not generally accessible. */ |
129 | vec<deferred_access_check, va_gc> *deferred_access_checks; |
130 | |
131 | /* The current mode of access checks. */ |
132 | enum deferring_kind deferring_access_checks_kind; |
133 | }; |
134 | |
135 | /* Data for deferred access checking. */ |
136 | static GTY(()) vec<deferred_access, va_gc> *deferred_access_stack; |
137 | static GTY(()) unsigned deferred_access_no_check; |
138 | |
139 | /* Save the current deferred access states and start deferred |
140 | access checking iff DEFER_P is true. */ |
141 | |
142 | void |
143 | push_deferring_access_checks (deferring_kind deferring) |
144 | { |
145 | /* For context like template instantiation, access checking |
146 | disabling applies to all nested context. */ |
147 | if (deferred_access_no_check || deferring == dk_no_check) |
148 | deferred_access_no_check++; |
149 | else |
150 | { |
151 | deferred_access e = {NULL, .deferring_access_checks_kind: deferring}; |
152 | vec_safe_push (v&: deferred_access_stack, obj: e); |
153 | } |
154 | } |
155 | |
156 | /* Save the current deferred access states and start deferred access |
157 | checking, continuing the set of deferred checks in CHECKS. */ |
158 | |
159 | void |
160 | reopen_deferring_access_checks (vec<deferred_access_check, va_gc> * checks) |
161 | { |
162 | push_deferring_access_checks (deferring: dk_deferred); |
163 | if (!deferred_access_no_check) |
164 | deferred_access_stack->last().deferred_access_checks = checks; |
165 | } |
166 | |
167 | /* Resume deferring access checks again after we stopped doing |
168 | this previously. */ |
169 | |
170 | void |
171 | resume_deferring_access_checks (void) |
172 | { |
173 | if (!deferred_access_no_check) |
174 | deferred_access_stack->last().deferring_access_checks_kind = dk_deferred; |
175 | } |
176 | |
177 | /* Stop deferring access checks. */ |
178 | |
179 | void |
180 | stop_deferring_access_checks (void) |
181 | { |
182 | if (!deferred_access_no_check) |
183 | deferred_access_stack->last().deferring_access_checks_kind = dk_no_deferred; |
184 | } |
185 | |
186 | /* Discard the current deferred access checks and restore the |
187 | previous states. */ |
188 | |
189 | void |
190 | pop_deferring_access_checks (void) |
191 | { |
192 | if (deferred_access_no_check) |
193 | deferred_access_no_check--; |
194 | else |
195 | deferred_access_stack->pop (); |
196 | } |
197 | |
198 | /* Returns a TREE_LIST representing the deferred checks. |
199 | The TREE_PURPOSE of each node is the type through which the |
200 | access occurred; the TREE_VALUE is the declaration named. |
201 | */ |
202 | |
203 | vec<deferred_access_check, va_gc> * |
204 | get_deferred_access_checks (void) |
205 | { |
206 | if (deferred_access_no_check) |
207 | return NULL; |
208 | else |
209 | return (deferred_access_stack->last().deferred_access_checks); |
210 | } |
211 | |
212 | /* Take current deferred checks and combine with the |
213 | previous states if we also defer checks previously. |
214 | Otherwise perform checks now. */ |
215 | |
216 | void |
217 | pop_to_parent_deferring_access_checks (void) |
218 | { |
219 | if (deferred_access_no_check) |
220 | deferred_access_no_check--; |
221 | else |
222 | { |
223 | vec<deferred_access_check, va_gc> *checks; |
224 | deferred_access *ptr; |
225 | |
226 | checks = (deferred_access_stack->last ().deferred_access_checks); |
227 | |
228 | deferred_access_stack->pop (); |
229 | ptr = &deferred_access_stack->last (); |
230 | if (ptr->deferring_access_checks_kind == dk_no_deferred) |
231 | { |
232 | /* Check access. */ |
233 | perform_access_checks (checks, tf_warning_or_error); |
234 | } |
235 | else |
236 | { |
237 | /* Merge with parent. */ |
238 | int i, j; |
239 | deferred_access_check *chk, *probe; |
240 | |
241 | FOR_EACH_VEC_SAFE_ELT (checks, i, chk) |
242 | { |
243 | FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, j, probe) |
244 | { |
245 | if (probe->binfo == chk->binfo && |
246 | probe->decl == chk->decl && |
247 | probe->diag_decl == chk->diag_decl) |
248 | goto found; |
249 | } |
250 | /* Insert into parent's checks. */ |
251 | vec_safe_push (v&: ptr->deferred_access_checks, obj: *chk); |
252 | found:; |
253 | } |
254 | } |
255 | } |
256 | } |
257 | |
258 | /* Called from enforce_access. A class has attempted (but failed) to access |
259 | DECL. It is already established that a baseclass of that class, |
260 | PARENT_BINFO, has private access to DECL. Examine certain special cases |
261 | to find a decl that accurately describes the source of the problem. If |
262 | none of the special cases apply, simply return DECL as the source of the |
263 | problem. */ |
264 | |
265 | static tree |
266 | get_class_access_diagnostic_decl (tree parent_binfo, tree decl) |
267 | { |
268 | /* When a class is denied access to a decl in a baseclass, most of the |
269 | time it is because the decl itself was declared as private at the point |
270 | of declaration. |
271 | |
272 | However, in C++, there are (at least) two situations in which a decl |
273 | can be private even though it was not originally defined as such. |
274 | These two situations only apply if a baseclass had private access to |
275 | DECL (this function is only called if that is the case). */ |
276 | |
277 | /* We should first check whether the reason the parent had private access |
278 | to DECL was simply because DECL was created and declared as private in |
279 | the parent. If it was, then DECL is definitively the source of the |
280 | problem. */ |
281 | if (SAME_BINFO_TYPE_P (context_for_name_lookup (decl), |
282 | BINFO_TYPE (parent_binfo))) |
283 | return decl; |
284 | |
285 | /* 1. If the "using" keyword is used to inherit DECL within the parent, |
286 | this may cause DECL to be private, so we should return the using |
287 | statement as the source of the problem. |
288 | |
289 | Scan the fields of PARENT_BINFO and see if there are any using decls. If |
290 | there are, see if they inherit DECL. If they do, that's where DECL must |
291 | have been declared private. */ |
292 | |
293 | for (tree parent_field = TYPE_FIELDS (BINFO_TYPE (parent_binfo)); |
294 | parent_field; |
295 | parent_field = DECL_CHAIN (parent_field)) |
296 | /* Not necessary, but also check TREE_PRIVATE for the sake of |
297 | eliminating obviously non-relevant using decls. */ |
298 | if (TREE_CODE (parent_field) == USING_DECL |
299 | && TREE_PRIVATE (parent_field)) |
300 | { |
301 | tree decl_stripped = strip_using_decl (parent_field); |
302 | |
303 | /* The using statement might be overloaded. If so, we need to |
304 | check all of the overloads. */ |
305 | for (ovl_iterator iter (decl_stripped); iter; ++iter) |
306 | /* If equal, the using statement inherits DECL, and so is the |
307 | source of the access failure, so return it. */ |
308 | if (*iter == decl) |
309 | return parent_field; |
310 | } |
311 | |
312 | /* 2. If DECL was privately inherited by the parent class, then DECL will |
313 | be inaccessible, even though it may originally have been accessible to |
314 | deriving classes. In that case, the fault lies with the parent, since it |
315 | used a private inheritance, so we return the parent as the source of the |
316 | problem. |
317 | |
318 | Since this is the last check, we just assume it's true. At worst, it |
319 | will simply point to the class that failed to give access, which is |
320 | technically true. */ |
321 | return TYPE_NAME (BINFO_TYPE (parent_binfo)); |
322 | } |
323 | |
324 | /* If the current scope isn't allowed to access DECL along |
325 | BASETYPE_PATH, give an error, or if we're parsing a function or class |
326 | template, defer the access check to be performed at instantiation time. |
327 | The most derived class in BASETYPE_PATH is the one used to qualify DECL. |
328 | DIAG_DECL is the declaration to use in the error diagnostic. */ |
329 | |
330 | static bool |
331 | enforce_access (tree basetype_path, tree decl, tree diag_decl, |
332 | tsubst_flags_t complain, access_failure_info *afi = NULL) |
333 | { |
334 | gcc_assert (TREE_CODE (basetype_path) == TREE_BINFO); |
335 | |
336 | if (flag_new_inheriting_ctors |
337 | && DECL_INHERITED_CTOR (decl)) |
338 | { |
339 | /* 7.3.3/18: The additional constructors are accessible if they would be |
340 | accessible when used to construct an object of the corresponding base |
341 | class. */ |
342 | decl = strip_inheriting_ctors (decl); |
343 | basetype_path = lookup_base (basetype_path, DECL_CONTEXT (decl), |
344 | ba_any, NULL, complain); |
345 | } |
346 | |
347 | tree cs = current_scope (); |
348 | if (in_template_context |
349 | && (CLASS_TYPE_P (cs) || TREE_CODE (cs) == FUNCTION_DECL)) |
350 | if (tree template_info = get_template_info (cs)) |
351 | { |
352 | /* When parsing a function or class template, we in general need to |
353 | defer access checks until template instantiation time, since a friend |
354 | declaration may grant access only to a particular specialization of |
355 | the template. */ |
356 | |
357 | if (accessible_p (basetype_path, decl, /*consider_local_p=*/true)) |
358 | /* But if the member is deemed accessible at parse time, then we can |
359 | assume it'll be accessible at instantiation time. */ |
360 | return true; |
361 | |
362 | /* Access of a dependent decl should be rechecked after tsubst'ing |
363 | into the user of the decl, rather than explicitly deferring the |
364 | check here. */ |
365 | gcc_assert (!uses_template_parms (decl)); |
366 | if (TREE_CODE (decl) == FIELD_DECL) |
367 | gcc_assert (!uses_template_parms (DECL_CONTEXT (decl))); |
368 | |
369 | /* Defer this access check until instantiation time. */ |
370 | deferred_access_check access_check; |
371 | access_check.binfo = basetype_path; |
372 | access_check.decl = decl; |
373 | access_check.diag_decl = diag_decl; |
374 | access_check.loc = input_location; |
375 | vec_safe_push (TI_DEFERRED_ACCESS_CHECKS (template_info), obj: access_check); |
376 | return true; |
377 | } |
378 | |
379 | if (!accessible_p (basetype_path, decl, /*consider_local_p=*/true)) |
380 | { |
381 | if (flag_new_inheriting_ctors) |
382 | diag_decl = strip_inheriting_ctors (diag_decl); |
383 | if (complain & tf_error) |
384 | { |
385 | access_kind access_failure_reason = ak_none; |
386 | |
387 | /* By default, using the decl as the source of the problem will |
388 | usually give correct results. */ |
389 | tree diag_location = diag_decl; |
390 | |
391 | /* However, if a parent of BASETYPE_PATH had private access to decl, |
392 | then it actually might be the case that the source of the problem |
393 | is not DECL. */ |
394 | tree parent_binfo = get_parent_with_private_access (decl, |
395 | binfo: basetype_path); |
396 | |
397 | /* So if a parent did have private access, then we need to do |
398 | special checks to obtain the best diagnostic location decl. */ |
399 | if (parent_binfo != NULL_TREE) |
400 | { |
401 | diag_location = get_class_access_diagnostic_decl (parent_binfo, |
402 | decl: diag_decl); |
403 | |
404 | /* We also at this point know that the reason access failed was |
405 | because decl was private. */ |
406 | access_failure_reason = ak_private; |
407 | } |
408 | |
409 | /* Finally, generate an error message. */ |
410 | complain_about_access (decl, diag_decl, diag_location, true, |
411 | access_failure_reason); |
412 | } |
413 | if (afi) |
414 | afi->record_access_failure (basetype_path, decl, diag_decl); |
415 | return false; |
416 | } |
417 | |
418 | return true; |
419 | } |
420 | |
421 | /* Perform the access checks in CHECKS. The TREE_PURPOSE of each node |
422 | is the BINFO indicating the qualifying scope used to access the |
423 | DECL node stored in the TREE_VALUE of the node. If CHECKS is empty |
424 | or we aren't in SFINAE context or all the checks succeed return TRUE, |
425 | otherwise FALSE. */ |
426 | |
427 | bool |
428 | perform_access_checks (vec<deferred_access_check, va_gc> *checks, |
429 | tsubst_flags_t complain) |
430 | { |
431 | int i; |
432 | deferred_access_check *chk; |
433 | location_t loc = input_location; |
434 | bool ok = true; |
435 | |
436 | if (!checks) |
437 | return true; |
438 | |
439 | FOR_EACH_VEC_SAFE_ELT (checks, i, chk) |
440 | { |
441 | input_location = chk->loc; |
442 | ok &= enforce_access (basetype_path: chk->binfo, decl: chk->decl, diag_decl: chk->diag_decl, complain); |
443 | } |
444 | |
445 | input_location = loc; |
446 | return (complain & tf_error) ? true : ok; |
447 | } |
448 | |
449 | /* Perform the deferred access checks. |
450 | |
451 | After performing the checks, we still have to keep the list |
452 | `deferred_access_stack->deferred_access_checks' since we may want |
453 | to check access for them again later in a different context. |
454 | For example: |
455 | |
456 | class A { |
457 | typedef int X; |
458 | static X a; |
459 | }; |
460 | A::X A::a, x; // No error for `A::a', error for `x' |
461 | |
462 | We have to perform deferred access of `A::X', first with `A::a', |
463 | next with `x'. Return value like perform_access_checks above. */ |
464 | |
465 | bool |
466 | perform_deferred_access_checks (tsubst_flags_t complain) |
467 | { |
468 | return perform_access_checks (checks: get_deferred_access_checks (), complain); |
469 | } |
470 | |
471 | /* Defer checking the accessibility of DECL, when looked up in |
472 | BINFO. DIAG_DECL is the declaration to use to print diagnostics. |
473 | Return value like perform_access_checks above. |
474 | If non-NULL, report failures to AFI. */ |
475 | |
476 | bool |
477 | perform_or_defer_access_check (tree binfo, tree decl, tree diag_decl, |
478 | tsubst_flags_t complain, |
479 | access_failure_info *afi) |
480 | { |
481 | int i; |
482 | deferred_access *ptr; |
483 | deferred_access_check *chk; |
484 | |
485 | /* Exit if we are in a context that no access checking is performed. */ |
486 | if (deferred_access_no_check) |
487 | return true; |
488 | |
489 | gcc_assert (TREE_CODE (binfo) == TREE_BINFO); |
490 | |
491 | ptr = &deferred_access_stack->last (); |
492 | |
493 | /* If we are not supposed to defer access checks, just check now. */ |
494 | if (ptr->deferring_access_checks_kind == dk_no_deferred) |
495 | { |
496 | bool ok = enforce_access (basetype_path: binfo, decl, diag_decl, complain, afi); |
497 | return (complain & tf_error) ? true : ok; |
498 | } |
499 | |
500 | /* See if we are already going to perform this check. */ |
501 | FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, i, chk) |
502 | { |
503 | if (chk->decl == decl && chk->binfo == binfo && |
504 | chk->diag_decl == diag_decl) |
505 | { |
506 | return true; |
507 | } |
508 | } |
509 | /* If not, record the check. */ |
510 | deferred_access_check new_access = {.binfo: binfo, .decl: decl, .diag_decl: diag_decl, .loc: input_location}; |
511 | vec_safe_push (v&: ptr->deferred_access_checks, obj: new_access); |
512 | |
513 | return true; |
514 | } |
515 | |
516 | /* Returns nonzero if the current statement is a full expression, |
517 | i.e. temporaries created during that statement should be destroyed |
518 | at the end of the statement. */ |
519 | |
520 | int |
521 | stmts_are_full_exprs_p (void) |
522 | { |
523 | return current_stmt_tree ()->stmts_are_full_exprs_p; |
524 | } |
525 | |
526 | /* T is a statement. Add it to the statement-tree. This is the C++ |
527 | version. The C/ObjC frontends have a slightly different version of |
528 | this function. */ |
529 | |
530 | tree |
531 | add_stmt (tree t) |
532 | { |
533 | enum tree_code code = TREE_CODE (t); |
534 | |
535 | if (EXPR_P (t) && code != LABEL_EXPR) |
536 | { |
537 | if (!EXPR_HAS_LOCATION (t)) |
538 | SET_EXPR_LOCATION (t, input_location); |
539 | |
540 | /* When we expand a statement-tree, we must know whether or not the |
541 | statements are full-expressions. We record that fact here. */ |
542 | if (STATEMENT_CODE_P (TREE_CODE (t))) |
543 | STMT_IS_FULL_EXPR_P (t) = stmts_are_full_exprs_p (); |
544 | } |
545 | |
546 | if (code == LABEL_EXPR || code == CASE_LABEL_EXPR) |
547 | STATEMENT_LIST_HAS_LABEL (cur_stmt_list) = 1; |
548 | |
549 | /* Add T to the statement-tree. Non-side-effect statements need to be |
550 | recorded during statement expressions. */ |
551 | gcc_checking_assert (!stmt_list_stack->is_empty ()); |
552 | append_to_statement_list_force (t, &cur_stmt_list); |
553 | |
554 | return t; |
555 | } |
556 | |
557 | /* Returns the stmt_tree to which statements are currently being added. */ |
558 | |
559 | stmt_tree |
560 | current_stmt_tree (void) |
561 | { |
562 | return (cfun |
563 | ? &cfun->language->base.x_stmt_tree |
564 | : &scope_chain->x_stmt_tree); |
565 | } |
566 | |
567 | /* If statements are full expressions, wrap STMT in a CLEANUP_POINT_EXPR. */ |
568 | |
569 | static tree |
570 | maybe_cleanup_point_expr (tree expr) |
571 | { |
572 | if (!processing_template_decl && stmts_are_full_exprs_p ()) |
573 | expr = fold_build_cleanup_point_expr (TREE_TYPE (expr), expr); |
574 | return expr; |
575 | } |
576 | |
577 | /* Like maybe_cleanup_point_expr except have the type of the new expression be |
578 | void so we don't need to create a temporary variable to hold the inner |
579 | expression. The reason why we do this is because the original type might be |
580 | an aggregate and we cannot create a temporary variable for that type. */ |
581 | |
582 | tree |
583 | maybe_cleanup_point_expr_void (tree expr) |
584 | { |
585 | if (!processing_template_decl && stmts_are_full_exprs_p ()) |
586 | expr = fold_build_cleanup_point_expr (void_type_node, expr); |
587 | return expr; |
588 | } |
589 | |
590 | |
591 | |
592 | /* Create a declaration statement for the declaration given by the DECL. */ |
593 | |
594 | void |
595 | add_decl_expr (tree decl) |
596 | { |
597 | tree r = build_stmt (DECL_SOURCE_LOCATION (decl), DECL_EXPR, decl); |
598 | if (DECL_INITIAL (decl) |
599 | || (DECL_SIZE (decl) && TREE_SIDE_EFFECTS (DECL_SIZE (decl)))) |
600 | r = maybe_cleanup_point_expr_void (expr: r); |
601 | add_stmt (t: r); |
602 | } |
603 | |
604 | /* Set EXPR_LOCATION of the cleanups of any CLEANUP_STMT in STMTS to LOC. */ |
605 | |
606 | static void |
607 | set_cleanup_locs (tree stmts, location_t loc) |
608 | { |
609 | if (TREE_CODE (stmts) == CLEANUP_STMT) |
610 | { |
611 | tree t = CLEANUP_EXPR (stmts); |
612 | if (t && TREE_CODE (t) != POSTCONDITION_STMT) |
613 | protected_set_expr_location (t, loc); |
614 | /* Avoid locus differences for C++ cdtor calls depending on whether |
615 | cdtor_returns_this: a conversion to void is added to discard the return |
616 | value, and this conversion ends up carrying the location, and when it |
617 | gets discarded, the location is lost. So hold it in the call as |
618 | well. */ |
619 | if (TREE_CODE (t) == NOP_EXPR |
620 | && TREE_TYPE (t) == void_type_node |
621 | && TREE_CODE (TREE_OPERAND (t, 0)) == CALL_EXPR) |
622 | protected_set_expr_location (TREE_OPERAND (t, 0), loc); |
623 | set_cleanup_locs (CLEANUP_BODY (stmts), loc); |
624 | } |
625 | else if (TREE_CODE (stmts) == STATEMENT_LIST) |
626 | for (tree stmt : tsi_range (stmts)) |
627 | set_cleanup_locs (stmts: stmt, loc); |
628 | } |
629 | |
630 | /* True iff the innermost block scope is a try block. */ |
631 | |
632 | static bool |
633 | at_try_scope () |
634 | { |
635 | cp_binding_level *b = current_binding_level; |
636 | while (b && b->kind == sk_cleanup) |
637 | b = b->level_chain; |
638 | return b && b->kind == sk_try; |
639 | } |
640 | |
641 | /* Finish a scope. */ |
642 | |
643 | tree |
644 | do_poplevel (tree stmt_list) |
645 | { |
646 | tree block = NULL; |
647 | |
648 | bool was_try = at_try_scope (); |
649 | |
650 | if (stmts_are_full_exprs_p ()) |
651 | block = poplevel (kept_level_p (), 1, 0); |
652 | |
653 | /* This needs to come after poplevel merges sk_cleanup statement_lists. */ |
654 | maybe_splice_retval_cleanup (stmt_list, was_try); |
655 | |
656 | stmt_list = pop_stmt_list (stmt_list); |
657 | |
658 | /* input_location is the last token of the scope, usually a }. */ |
659 | set_cleanup_locs (stmts: stmt_list, loc: input_location); |
660 | |
661 | if (!processing_template_decl) |
662 | { |
663 | stmt_list = c_build_bind_expr (input_location, block, stmt_list); |
664 | /* ??? See c_end_compound_stmt re statement expressions. */ |
665 | } |
666 | |
667 | return stmt_list; |
668 | } |
669 | |
670 | /* Begin a new scope. */ |
671 | |
672 | static tree |
673 | do_pushlevel (scope_kind sk) |
674 | { |
675 | tree ret = push_stmt_list (); |
676 | if (stmts_are_full_exprs_p ()) |
677 | begin_scope (sk, NULL); |
678 | return ret; |
679 | } |
680 | |
681 | /* Queue a cleanup. CLEANUP is an expression/statement to be executed |
682 | when the current scope is exited. EH_ONLY is true when this is not |
683 | meant to apply to normal control flow transfer. DECL is the VAR_DECL |
684 | being cleaned up, if any, or null for temporaries or subobjects. */ |
685 | |
686 | void |
687 | push_cleanup (tree decl, tree cleanup, bool eh_only) |
688 | { |
689 | tree stmt = build_stmt (input_location, CLEANUP_STMT, NULL, cleanup, decl); |
690 | CLEANUP_EH_ONLY (stmt) = eh_only; |
691 | add_stmt (t: stmt); |
692 | CLEANUP_BODY (stmt) = push_stmt_list (); |
693 | } |
694 | |
695 | /* Simple infinite loop tracking for -Wreturn-type. We keep a stack of all |
696 | the current loops, represented by 'NULL_TREE' if we've seen a possible |
697 | exit, and 'error_mark_node' if not. This is currently used only to |
698 | suppress the warning about a function with no return statements, and |
699 | therefore we don't bother noting returns as possible exits. We also |
700 | don't bother with gotos. */ |
701 | |
702 | static void |
703 | begin_maybe_infinite_loop (tree cond) |
704 | { |
705 | /* Only track this while parsing a function, not during instantiation. */ |
706 | if (!cfun || (DECL_TEMPLATE_INSTANTIATION (current_function_decl) |
707 | && !processing_template_decl)) |
708 | return; |
709 | bool maybe_infinite = true; |
710 | if (cond) |
711 | { |
712 | cond = fold_non_dependent_expr (cond); |
713 | maybe_infinite = integer_nonzerop (cond); |
714 | } |
715 | vec_safe_push (cp_function_chain->infinite_loops, |
716 | obj: maybe_infinite ? error_mark_node : NULL_TREE); |
717 | |
718 | } |
719 | |
720 | /* A break is a possible exit for the current loop. */ |
721 | |
722 | void |
723 | break_maybe_infinite_loop (void) |
724 | { |
725 | if (!cfun) |
726 | return; |
727 | cp_function_chain->infinite_loops->last() = NULL_TREE; |
728 | } |
729 | |
730 | /* If we reach the end of the loop without seeing a possible exit, we have |
731 | an infinite loop. */ |
732 | |
733 | static void |
734 | end_maybe_infinite_loop (tree cond) |
735 | { |
736 | if (!cfun || (DECL_TEMPLATE_INSTANTIATION (current_function_decl) |
737 | && !processing_template_decl)) |
738 | return; |
739 | tree current = cp_function_chain->infinite_loops->pop(); |
740 | if (current != NULL_TREE) |
741 | { |
742 | cond = fold_non_dependent_expr (cond); |
743 | if (integer_nonzerop (cond)) |
744 | current_function_infinite_loop = 1; |
745 | } |
746 | } |
747 | |
748 | /* Begin a conditional that might contain a declaration. When generating |
749 | normal code, we want the declaration to appear before the statement |
750 | containing the conditional. When generating template code, we want the |
751 | conditional to be rendered as the raw DECL_EXPR. */ |
752 | |
753 | static void |
754 | begin_cond (tree *cond_p) |
755 | { |
756 | if (processing_template_decl) |
757 | *cond_p = push_stmt_list (); |
758 | } |
759 | |
760 | /* Finish such a conditional. */ |
761 | |
762 | static void |
763 | finish_cond (tree *cond_p, tree expr) |
764 | { |
765 | if (processing_template_decl) |
766 | { |
767 | tree cond = pop_stmt_list (*cond_p); |
768 | |
769 | if (expr == NULL_TREE) |
770 | /* Empty condition in 'for'. */ |
771 | gcc_assert (empty_expr_stmt_p (cond)); |
772 | else if (check_for_bare_parameter_packs (expr)) |
773 | expr = error_mark_node; |
774 | else if (!empty_expr_stmt_p (cond)) |
775 | expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), cond, expr); |
776 | } |
777 | *cond_p = expr; |
778 | } |
779 | |
780 | /* If *COND_P specifies a conditional with a declaration, transform the |
781 | loop such that |
782 | while (A x = 42) { } |
783 | for (; A x = 42;) { } |
784 | becomes |
785 | while (true) { A x = 42; if (!x) break; } |
786 | for (;;) { A x = 42; if (!x) break; } |
787 | The statement list for BODY will be empty if the conditional did |
788 | not declare anything. */ |
789 | |
790 | static void |
791 | simplify_loop_decl_cond (tree *cond_p, tree body) |
792 | { |
793 | tree cond, if_stmt; |
794 | |
795 | if (!TREE_SIDE_EFFECTS (body)) |
796 | return; |
797 | |
798 | cond = *cond_p; |
799 | *cond_p = boolean_true_node; |
800 | |
801 | if_stmt = begin_if_stmt (); |
802 | cond_p = &cond; |
803 | while (TREE_CODE (*cond_p) == ANNOTATE_EXPR) |
804 | cond_p = &TREE_OPERAND (*cond_p, 0); |
805 | *cond_p = cp_build_unary_op (TRUTH_NOT_EXPR, *cond_p, false, |
806 | tf_warning_or_error); |
807 | finish_if_stmt_cond (cond, if_stmt); |
808 | finish_break_stmt (); |
809 | finish_then_clause (if_stmt); |
810 | finish_if_stmt (if_stmt); |
811 | } |
812 | |
813 | /* Finish a goto-statement. */ |
814 | |
815 | tree |
816 | finish_goto_stmt (tree destination) |
817 | { |
818 | if (identifier_p (t: destination)) |
819 | destination = lookup_label (destination); |
820 | |
821 | /* We warn about unused labels with -Wunused. That means we have to |
822 | mark the used labels as used. */ |
823 | if (TREE_CODE (destination) == LABEL_DECL) |
824 | TREE_USED (destination) = 1; |
825 | else |
826 | { |
827 | destination = mark_rvalue_use (destination); |
828 | if (!processing_template_decl) |
829 | { |
830 | destination = cp_convert (ptr_type_node, destination, |
831 | tf_warning_or_error); |
832 | if (error_operand_p (t: destination)) |
833 | return NULL_TREE; |
834 | destination |
835 | = fold_build_cleanup_point_expr (TREE_TYPE (destination), |
836 | expr: destination); |
837 | } |
838 | } |
839 | |
840 | check_goto (destination); |
841 | |
842 | add_stmt (t: build_predict_expr (PRED_GOTO, NOT_TAKEN)); |
843 | return add_stmt (t: build_stmt (input_location, GOTO_EXPR, destination)); |
844 | } |
845 | |
846 | /* Returns true if T corresponds to an assignment operator expression. */ |
847 | |
848 | static bool |
849 | is_assignment_op_expr_p (tree t) |
850 | { |
851 | if (t == NULL_TREE) |
852 | return false; |
853 | |
854 | if (TREE_CODE (t) == MODIFY_EXPR |
855 | || (TREE_CODE (t) == MODOP_EXPR |
856 | && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)) |
857 | return true; |
858 | |
859 | tree call = extract_call_expr (t); |
860 | if (call == NULL_TREE |
861 | || call == error_mark_node |
862 | || !CALL_EXPR_OPERATOR_SYNTAX (call)) |
863 | return false; |
864 | |
865 | tree fndecl = cp_get_callee_fndecl_nofold (call); |
866 | return fndecl != NULL_TREE |
867 | && DECL_ASSIGNMENT_OPERATOR_P (fndecl) |
868 | && DECL_OVERLOADED_OPERATOR_IS (fndecl, NOP_EXPR); |
869 | } |
870 | |
871 | /* Return true if TYPE is a class type that is convertible to |
872 | and assignable from bool. */ |
873 | |
874 | static GTY((deletable)) hash_map<tree, bool> *boolish_class_type_p_cache; |
875 | |
876 | static bool |
877 | boolish_class_type_p (tree type) |
878 | { |
879 | type = TYPE_MAIN_VARIANT (type); |
880 | if (!CLASS_TYPE_P (type) || !COMPLETE_TYPE_P (type)) |
881 | return false; |
882 | |
883 | if (bool *r = hash_map_safe_get (h: boolish_class_type_p_cache, k: type)) |
884 | return *r; |
885 | |
886 | tree ops; |
887 | bool has_bool_assignment = false; |
888 | bool has_bool_conversion = false; |
889 | |
890 | ops = lookup_fnfields (type, assign_op_identifier, /*protect=*/0, tf_none); |
891 | for (tree op : ovl_range (BASELINK_FUNCTIONS (ops))) |
892 | { |
893 | op = STRIP_TEMPLATE (op); |
894 | if (TREE_CODE (op) != FUNCTION_DECL) |
895 | continue; |
896 | tree parm = DECL_CHAIN (DECL_ARGUMENTS (op)); |
897 | tree parm_type = non_reference (TREE_TYPE (parm)); |
898 | if (TREE_CODE (parm_type) == BOOLEAN_TYPE) |
899 | { |
900 | has_bool_assignment = true; |
901 | break; |
902 | } |
903 | } |
904 | |
905 | if (has_bool_assignment) |
906 | { |
907 | ops = lookup_conversions (type); |
908 | for (; ops; ops = TREE_CHAIN (ops)) |
909 | { |
910 | tree op = TREE_VALUE (ops); |
911 | if (!DECL_NONCONVERTING_P (op) |
912 | && TREE_CODE (DECL_CONV_FN_TYPE (op)) == BOOLEAN_TYPE) |
913 | { |
914 | has_bool_conversion = true; |
915 | break; |
916 | } |
917 | } |
918 | } |
919 | |
920 | bool boolish = has_bool_assignment && has_bool_conversion; |
921 | hash_map_safe_put<true> (h&: boolish_class_type_p_cache, k: type, v: boolish); |
922 | return boolish; |
923 | } |
924 | |
925 | |
926 | /* Maybe warn about an unparenthesized 'a = b' (appearing in a |
927 | boolean context where 'a == b' might have been intended). |
928 | NESTED_P is true if T is the RHS of another assignment. */ |
929 | |
930 | void |
931 | maybe_warn_unparenthesized_assignment (tree t, bool nested_p, |
932 | tsubst_flags_t complain) |
933 | { |
934 | tree type = TREE_TYPE (t); |
935 | t = STRIP_REFERENCE_REF (t); |
936 | |
937 | if ((complain & tf_warning) |
938 | && warn_parentheses |
939 | && is_assignment_op_expr_p (t) |
940 | /* A parenthesized expression would've had this warning |
941 | suppressed by finish_parenthesized_expr. */ |
942 | && !warning_suppressed_p (t, OPT_Wparentheses) |
943 | /* In c = a = b, don't warn if a has type bool or bool-like class. */ |
944 | && (!nested_p |
945 | || (TREE_CODE (type) != BOOLEAN_TYPE |
946 | && !boolish_class_type_p (type)))) |
947 | { |
948 | warning_at (cp_expr_loc_or_input_loc (t), OPT_Wparentheses, |
949 | "suggest parentheses around assignment used as truth value" ); |
950 | suppress_warning (t, OPT_Wparentheses); |
951 | } |
952 | } |
953 | |
954 | /* COND is the condition-expression for an if, while, etc., |
955 | statement. Convert it to a boolean value, if appropriate. |
956 | In addition, verify sequence points if -Wsequence-point is enabled. */ |
957 | |
958 | static tree |
959 | maybe_convert_cond (tree cond) |
960 | { |
961 | /* Empty conditions remain empty. */ |
962 | if (!cond) |
963 | return NULL_TREE; |
964 | |
965 | /* Wait until we instantiate templates before doing conversion. */ |
966 | if (type_dependent_expression_p (cond)) |
967 | return cond; |
968 | |
969 | if (warn_sequence_point && !processing_template_decl) |
970 | verify_sequence_points (cond); |
971 | |
972 | maybe_warn_unparenthesized_assignment (t: cond, /*nested_p=*/false, |
973 | complain: tf_warning_or_error); |
974 | |
975 | /* Do the conversion. */ |
976 | cond = convert_from_reference (cond); |
977 | return condition_conversion (cond); |
978 | } |
979 | |
980 | /* Finish an expression-statement, whose EXPRESSION is as indicated. */ |
981 | |
982 | tree |
983 | finish_expr_stmt (tree expr) |
984 | { |
985 | tree r = NULL_TREE; |
986 | location_t loc = EXPR_LOCATION (expr); |
987 | |
988 | if (expr != NULL_TREE) |
989 | { |
990 | /* If we ran into a problem, make sure we complained. */ |
991 | gcc_assert (expr != error_mark_node || seen_error ()); |
992 | |
993 | if (!processing_template_decl) |
994 | { |
995 | if (warn_sequence_point) |
996 | verify_sequence_points (expr); |
997 | expr = convert_to_void (expr, ICV_STATEMENT, tf_warning_or_error); |
998 | } |
999 | else if (!type_dependent_expression_p (expr)) |
1000 | convert_to_void (expr, ICV_STATEMENT, tf_warning_or_error); |
1001 | |
1002 | if (check_for_bare_parameter_packs (expr)) |
1003 | expr = error_mark_node; |
1004 | |
1005 | /* Simplification of inner statement expressions, compound exprs, |
1006 | etc can result in us already having an EXPR_STMT. */ |
1007 | if (TREE_CODE (expr) != CLEANUP_POINT_EXPR) |
1008 | { |
1009 | if (TREE_CODE (expr) != EXPR_STMT) |
1010 | expr = build_stmt (loc, EXPR_STMT, expr); |
1011 | expr = maybe_cleanup_point_expr_void (expr); |
1012 | } |
1013 | |
1014 | r = add_stmt (t: expr); |
1015 | } |
1016 | |
1017 | return r; |
1018 | } |
1019 | |
1020 | |
1021 | /* Begin an if-statement. Returns a newly created IF_STMT if |
1022 | appropriate. */ |
1023 | |
1024 | tree |
1025 | begin_if_stmt (void) |
1026 | { |
1027 | tree r, scope; |
1028 | scope = do_pushlevel (sk: sk_cond); |
1029 | r = build_stmt (input_location, IF_STMT, NULL_TREE, |
1030 | NULL_TREE, NULL_TREE, scope); |
1031 | current_binding_level->this_entity = r; |
1032 | begin_cond (cond_p: &IF_COND (r)); |
1033 | return r; |
1034 | } |
1035 | |
1036 | /* Returns true if FN, a CALL_EXPR, is a call to |
1037 | std::is_constant_evaluated or __builtin_is_constant_evaluated. */ |
1038 | |
1039 | static bool |
1040 | is_std_constant_evaluated_p (tree fn) |
1041 | { |
1042 | /* std::is_constant_evaluated takes no arguments. */ |
1043 | if (call_expr_nargs (fn) != 0) |
1044 | return false; |
1045 | |
1046 | tree fndecl = cp_get_callee_fndecl_nofold (fn); |
1047 | if (fndecl == NULL_TREE) |
1048 | return false; |
1049 | |
1050 | if (fndecl_built_in_p (node: fndecl, name: CP_BUILT_IN_IS_CONSTANT_EVALUATED, |
1051 | klass: BUILT_IN_FRONTEND)) |
1052 | return true; |
1053 | |
1054 | if (!decl_in_std_namespace_p (fndecl)) |
1055 | return false; |
1056 | |
1057 | tree name = DECL_NAME (fndecl); |
1058 | return name && id_equal (id: name, str: "is_constant_evaluated" ); |
1059 | } |
1060 | |
1061 | /* Callback function for maybe_warn_for_constant_evaluated that looks |
1062 | for calls to std::is_constant_evaluated in TP. */ |
1063 | |
1064 | static tree |
1065 | find_std_constant_evaluated_r (tree *tp, int *walk_subtrees, void *) |
1066 | { |
1067 | tree t = *tp; |
1068 | |
1069 | if (TYPE_P (t) || TREE_CONSTANT (t)) |
1070 | { |
1071 | *walk_subtrees = false; |
1072 | return NULL_TREE; |
1073 | } |
1074 | |
1075 | switch (TREE_CODE (t)) |
1076 | { |
1077 | case CALL_EXPR: |
1078 | if (is_std_constant_evaluated_p (fn: t)) |
1079 | return t; |
1080 | break; |
1081 | case EXPR_STMT: |
1082 | /* Don't warn in statement expressions. */ |
1083 | *walk_subtrees = false; |
1084 | return NULL_TREE; |
1085 | default: |
1086 | break; |
1087 | } |
1088 | |
1089 | return NULL_TREE; |
1090 | } |
1091 | |
1092 | /* In certain contexts, std::is_constant_evaluated() is always true (for |
1093 | instance, in a consteval function or in a constexpr if), or always false |
1094 | (e.g., in a non-constexpr non-consteval function) so give the user a clue. */ |
1095 | |
1096 | static void |
1097 | maybe_warn_for_constant_evaluated (tree cond, bool constexpr_if, |
1098 | bool trivial_infinite) |
1099 | { |
1100 | if (!warn_tautological_compare) |
1101 | return; |
1102 | |
1103 | /* Suppress warning for std::is_constant_evaluated if the conditional |
1104 | comes from a macro. */ |
1105 | if (from_macro_expansion_at (EXPR_LOCATION (cond))) |
1106 | return; |
1107 | |
1108 | cond = cp_walk_tree_without_duplicates (&cond, find_std_constant_evaluated_r, |
1109 | NULL); |
1110 | if (cond) |
1111 | { |
1112 | if (constexpr_if) |
1113 | warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare, |
1114 | "%<std::is_constant_evaluated%> always evaluates to " |
1115 | "true in %<if constexpr%>" ); |
1116 | else if (trivial_infinite) |
1117 | { |
1118 | auto_diagnostic_group d; |
1119 | if (warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare, |
1120 | "%<std::is_constant_evaluated%> evaluates to " |
1121 | "true when checking if trivially empty iteration " |
1122 | "statement is trivial infinite loop" ) |
1123 | && !maybe_constexpr_fn (current_function_decl)) |
1124 | inform (EXPR_LOCATION (cond), |
1125 | "and evaluates to false when actually evaluating " |
1126 | "the condition in non-%<constexpr%> function" ); |
1127 | } |
1128 | else if (!maybe_constexpr_fn (current_function_decl)) |
1129 | warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare, |
1130 | "%<std::is_constant_evaluated%> always evaluates to " |
1131 | "false in a non-%<constexpr%> function" ); |
1132 | else if (DECL_IMMEDIATE_FUNCTION_P (current_function_decl)) |
1133 | warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare, |
1134 | "%<std::is_constant_evaluated%> always evaluates to " |
1135 | "true in a %<consteval%> function" ); |
1136 | } |
1137 | } |
1138 | |
1139 | /* Process the COND of an if-statement, which may be given by |
1140 | IF_STMT. */ |
1141 | |
1142 | tree |
1143 | finish_if_stmt_cond (tree orig_cond, tree if_stmt) |
1144 | { |
1145 | tree cond = maybe_convert_cond (cond: orig_cond); |
1146 | maybe_warn_for_constant_evaluated (cond, IF_STMT_CONSTEXPR_P (if_stmt), |
1147 | /*trivial_infinite=*/false); |
1148 | if (IF_STMT_CONSTEXPR_P (if_stmt) |
1149 | && !type_dependent_expression_p (cond) |
1150 | && require_constant_expression (cond) |
1151 | && !instantiation_dependent_expression_p (cond) |
1152 | /* Wait until instantiation time, since only then COND has been |
1153 | converted to bool. */ |
1154 | && TYPE_MAIN_VARIANT (TREE_TYPE (cond)) == boolean_type_node) |
1155 | { |
1156 | cond = instantiate_non_dependent_expr (cond); |
1157 | cond = cxx_constant_value (cond); |
1158 | } |
1159 | else if (processing_template_decl) |
1160 | cond = orig_cond; |
1161 | finish_cond (cond_p: &IF_COND (if_stmt), expr: cond); |
1162 | add_stmt (t: if_stmt); |
1163 | THEN_CLAUSE (if_stmt) = push_stmt_list (); |
1164 | return cond; |
1165 | } |
1166 | |
1167 | /* Finish the then-clause of an if-statement, which may be given by |
1168 | IF_STMT. */ |
1169 | |
1170 | tree |
1171 | finish_then_clause (tree if_stmt) |
1172 | { |
1173 | THEN_CLAUSE (if_stmt) = pop_stmt_list (THEN_CLAUSE (if_stmt)); |
1174 | return if_stmt; |
1175 | } |
1176 | |
1177 | /* Begin the else-clause of an if-statement. */ |
1178 | |
1179 | void |
1180 | begin_else_clause (tree if_stmt) |
1181 | { |
1182 | ELSE_CLAUSE (if_stmt) = push_stmt_list (); |
1183 | } |
1184 | |
1185 | /* Finish the else-clause of an if-statement, which may be given by |
1186 | IF_STMT. */ |
1187 | |
1188 | void |
1189 | finish_else_clause (tree if_stmt) |
1190 | { |
1191 | ELSE_CLAUSE (if_stmt) = pop_stmt_list (ELSE_CLAUSE (if_stmt)); |
1192 | } |
1193 | |
1194 | /* Callback for cp_walk_tree to mark all {VAR,PARM}_DECLs in a tree as |
1195 | read. */ |
1196 | |
1197 | static tree |
1198 | maybe_mark_exp_read_r (tree *tp, int *, void *) |
1199 | { |
1200 | tree t = *tp; |
1201 | if (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
1202 | mark_exp_read (t); |
1203 | return NULL_TREE; |
1204 | } |
1205 | |
1206 | /* Finish an if-statement. */ |
1207 | |
1208 | void |
1209 | finish_if_stmt (tree if_stmt) |
1210 | { |
1211 | tree scope = IF_SCOPE (if_stmt); |
1212 | IF_SCOPE (if_stmt) = NULL; |
1213 | if (IF_STMT_CONSTEXPR_P (if_stmt)) |
1214 | { |
1215 | /* Prevent various -Wunused warnings. We might not instantiate |
1216 | either of these branches, so we would not mark the variables |
1217 | used in that branch as read. */ |
1218 | cp_walk_tree_without_duplicates (&THEN_CLAUSE (if_stmt), |
1219 | maybe_mark_exp_read_r, NULL); |
1220 | cp_walk_tree_without_duplicates (&ELSE_CLAUSE (if_stmt), |
1221 | maybe_mark_exp_read_r, NULL); |
1222 | } |
1223 | add_stmt (t: do_poplevel (stmt_list: scope)); |
1224 | } |
1225 | |
1226 | /* Determine if iteration statement with *CONDP condition and |
1227 | loop BODY is trivially empty iteration statement or even |
1228 | trivial infinite loop. In the latter case for -ffinite-loops |
1229 | add ANNOTATE_EXPR to mark the loop as maybe validly infinite. |
1230 | Also, emit -Wtautological-compare warning for std::is_constant_evaluated () |
1231 | calls in the condition when needed. */ |
1232 | |
1233 | static void |
1234 | finish_loop_cond (tree *condp, tree body) |
1235 | { |
1236 | if (TREE_CODE (*condp) == INTEGER_CST) |
1237 | return; |
1238 | bool trivially_empty = expr_first (body) == NULL_TREE; |
1239 | bool trivial_infinite = false; |
1240 | if (trivially_empty) |
1241 | { |
1242 | tree c = fold_non_dependent_expr (*condp, tf_none, |
1243 | /*manifestly_const_eval=*/true); |
1244 | trivial_infinite = c && integer_nonzerop (c); |
1245 | } |
1246 | if (warn_tautological_compare) |
1247 | { |
1248 | tree cond = *condp; |
1249 | while (TREE_CODE (cond) == ANNOTATE_EXPR) |
1250 | cond = TREE_OPERAND (cond, 0); |
1251 | if (trivial_infinite |
1252 | && !DECL_IMMEDIATE_FUNCTION_P (current_function_decl)) |
1253 | maybe_warn_for_constant_evaluated (cond, /*constexpr_if=*/false, |
1254 | /*trivial_infinite=*/true); |
1255 | else if (!trivially_empty |
1256 | || !processing_template_decl |
1257 | || DECL_IMMEDIATE_FUNCTION_P (current_function_decl)) |
1258 | maybe_warn_for_constant_evaluated (cond, /*constexpr_if=*/false, |
1259 | /*trivial_infinite=*/false); |
1260 | } |
1261 | if (trivial_infinite && flag_finite_loops && !processing_template_decl) |
1262 | *condp = build3 (ANNOTATE_EXPR, TREE_TYPE (*condp), *condp, |
1263 | build_int_cst (integer_type_node, |
1264 | annot_expr_maybe_infinite_kind), |
1265 | integer_zero_node); |
1266 | } |
1267 | |
1268 | /* Begin a while-statement. Returns a newly created WHILE_STMT if |
1269 | appropriate. */ |
1270 | |
1271 | tree |
1272 | begin_while_stmt (void) |
1273 | { |
1274 | tree r; |
1275 | r = build_stmt (input_location, WHILE_STMT, NULL_TREE, NULL_TREE); |
1276 | add_stmt (t: r); |
1277 | WHILE_BODY (r) = do_pushlevel (sk: sk_block); |
1278 | begin_cond (cond_p: &WHILE_COND (r)); |
1279 | return r; |
1280 | } |
1281 | |
1282 | /* Process the COND of a while-statement, which may be given by |
1283 | WHILE_STMT. */ |
1284 | |
1285 | void |
1286 | finish_while_stmt_cond (tree cond, tree while_stmt, bool ivdep, |
1287 | tree unroll, bool novector) |
1288 | { |
1289 | cond = maybe_convert_cond (cond); |
1290 | finish_cond (cond_p: &WHILE_COND (while_stmt), expr: cond); |
1291 | begin_maybe_infinite_loop (cond); |
1292 | if (ivdep && cond != error_mark_node) |
1293 | WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR, |
1294 | TREE_TYPE (WHILE_COND (while_stmt)), |
1295 | WHILE_COND (while_stmt), |
1296 | build_int_cst (integer_type_node, |
1297 | annot_expr_ivdep_kind), |
1298 | integer_zero_node); |
1299 | if (unroll && cond != error_mark_node) |
1300 | WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR, |
1301 | TREE_TYPE (WHILE_COND (while_stmt)), |
1302 | WHILE_COND (while_stmt), |
1303 | build_int_cst (integer_type_node, |
1304 | annot_expr_unroll_kind), |
1305 | unroll); |
1306 | if (novector && cond != error_mark_node) |
1307 | WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR, |
1308 | TREE_TYPE (WHILE_COND (while_stmt)), |
1309 | WHILE_COND (while_stmt), |
1310 | build_int_cst (integer_type_node, |
1311 | annot_expr_no_vector_kind), |
1312 | integer_zero_node); |
1313 | simplify_loop_decl_cond (cond_p: &WHILE_COND (while_stmt), WHILE_BODY (while_stmt)); |
1314 | } |
1315 | |
1316 | /* Finish a while-statement, which may be given by WHILE_STMT. */ |
1317 | |
1318 | void |
1319 | finish_while_stmt (tree while_stmt) |
1320 | { |
1321 | end_maybe_infinite_loop (boolean_true_node); |
1322 | WHILE_BODY (while_stmt) = do_poplevel (WHILE_BODY (while_stmt)); |
1323 | finish_loop_cond (condp: &WHILE_COND (while_stmt), WHILE_BODY (while_stmt)); |
1324 | } |
1325 | |
1326 | /* Begin a do-statement. Returns a newly created DO_STMT if |
1327 | appropriate. */ |
1328 | |
1329 | tree |
1330 | begin_do_stmt (void) |
1331 | { |
1332 | tree r = build_stmt (input_location, DO_STMT, NULL_TREE, NULL_TREE); |
1333 | begin_maybe_infinite_loop (boolean_true_node); |
1334 | add_stmt (t: r); |
1335 | DO_BODY (r) = push_stmt_list (); |
1336 | return r; |
1337 | } |
1338 | |
1339 | /* Finish the body of a do-statement, which may be given by DO_STMT. */ |
1340 | |
1341 | void |
1342 | finish_do_body (tree do_stmt) |
1343 | { |
1344 | tree body = DO_BODY (do_stmt) = pop_stmt_list (DO_BODY (do_stmt)); |
1345 | |
1346 | if (TREE_CODE (body) == STATEMENT_LIST && STATEMENT_LIST_TAIL (body)) |
1347 | body = STATEMENT_LIST_TAIL (body)->stmt; |
1348 | |
1349 | if (IS_EMPTY_STMT (body)) |
1350 | warning (OPT_Wempty_body, |
1351 | "suggest explicit braces around empty body in %<do%> statement" ); |
1352 | } |
1353 | |
1354 | /* Finish a do-statement, which may be given by DO_STMT, and whose |
1355 | COND is as indicated. */ |
1356 | |
1357 | void |
1358 | finish_do_stmt (tree cond, tree do_stmt, bool ivdep, tree unroll, |
1359 | bool novector) |
1360 | { |
1361 | cond = maybe_convert_cond (cond); |
1362 | end_maybe_infinite_loop (cond); |
1363 | /* Unlike other iteration statements, the condition may not contain |
1364 | a declaration, so we don't call finish_cond which checks for |
1365 | unexpanded parameter packs. */ |
1366 | if (check_for_bare_parameter_packs (cond)) |
1367 | cond = error_mark_node; |
1368 | if (ivdep && cond != error_mark_node) |
1369 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
1370 | build_int_cst (integer_type_node, annot_expr_ivdep_kind), |
1371 | integer_zero_node); |
1372 | if (unroll && cond != error_mark_node) |
1373 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
1374 | build_int_cst (integer_type_node, annot_expr_unroll_kind), |
1375 | unroll); |
1376 | if (novector && cond != error_mark_node) |
1377 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
1378 | build_int_cst (integer_type_node, annot_expr_no_vector_kind), |
1379 | integer_zero_node); |
1380 | DO_COND (do_stmt) = cond; |
1381 | tree do_body = DO_BODY (do_stmt); |
1382 | if (CONVERT_EXPR_P (do_body) |
1383 | && integer_zerop (TREE_OPERAND (do_body, 0)) |
1384 | && VOID_TYPE_P (TREE_TYPE (do_body))) |
1385 | do_body = NULL_TREE; |
1386 | finish_loop_cond (condp: &DO_COND (do_stmt), body: do_body); |
1387 | } |
1388 | |
1389 | /* Finish a return-statement. The EXPRESSION returned, if any, is as |
1390 | indicated. */ |
1391 | |
1392 | tree |
1393 | finish_return_stmt (tree expr) |
1394 | { |
1395 | tree r; |
1396 | bool no_warning; |
1397 | bool dangling; |
1398 | |
1399 | expr = check_return_expr (expr, &no_warning, &dangling); |
1400 | |
1401 | if (error_operand_p (t: expr) |
1402 | || (flag_openmp && !check_omp_return ())) |
1403 | { |
1404 | /* Suppress -Wreturn-type for this function. */ |
1405 | if (warn_return_type) |
1406 | suppress_warning (current_function_decl, OPT_Wreturn_type); |
1407 | return error_mark_node; |
1408 | } |
1409 | |
1410 | if (!processing_template_decl) |
1411 | { |
1412 | if (warn_sequence_point) |
1413 | verify_sequence_points (expr); |
1414 | } |
1415 | |
1416 | r = build_stmt (input_location, RETURN_EXPR, expr); |
1417 | RETURN_EXPR_LOCAL_ADDR_P (r) = dangling; |
1418 | if (no_warning) |
1419 | suppress_warning (r, OPT_Wreturn_type); |
1420 | r = maybe_cleanup_point_expr_void (expr: r); |
1421 | r = add_stmt (t: r); |
1422 | |
1423 | return r; |
1424 | } |
1425 | |
1426 | /* Begin the scope of a for-statement or a range-for-statement. |
1427 | Both the returned trees are to be used in a call to |
1428 | begin_for_stmt or begin_range_for_stmt. */ |
1429 | |
1430 | tree |
1431 | begin_for_scope (tree *init) |
1432 | { |
1433 | tree scope = do_pushlevel (sk: sk_for); |
1434 | |
1435 | if (processing_template_decl) |
1436 | *init = push_stmt_list (); |
1437 | else |
1438 | *init = NULL_TREE; |
1439 | |
1440 | return scope; |
1441 | } |
1442 | |
1443 | /* Begin a for-statement. Returns a new FOR_STMT. |
1444 | SCOPE and INIT should be the return of begin_for_scope, |
1445 | or both NULL_TREE */ |
1446 | |
1447 | tree |
1448 | begin_for_stmt (tree scope, tree init) |
1449 | { |
1450 | tree r; |
1451 | |
1452 | r = build_stmt (input_location, FOR_STMT, NULL_TREE, NULL_TREE, |
1453 | NULL_TREE, NULL_TREE, NULL_TREE); |
1454 | |
1455 | if (scope == NULL_TREE) |
1456 | { |
1457 | gcc_assert (!init); |
1458 | scope = begin_for_scope (init: &init); |
1459 | } |
1460 | |
1461 | FOR_INIT_STMT (r) = init; |
1462 | FOR_SCOPE (r) = scope; |
1463 | |
1464 | return r; |
1465 | } |
1466 | |
1467 | /* Finish the init-statement of a for-statement, which may be |
1468 | given by FOR_STMT. */ |
1469 | |
1470 | void |
1471 | finish_init_stmt (tree for_stmt) |
1472 | { |
1473 | if (processing_template_decl) |
1474 | FOR_INIT_STMT (for_stmt) = pop_stmt_list (FOR_INIT_STMT (for_stmt)); |
1475 | add_stmt (t: for_stmt); |
1476 | FOR_BODY (for_stmt) = do_pushlevel (sk: sk_block); |
1477 | begin_cond (cond_p: &FOR_COND (for_stmt)); |
1478 | } |
1479 | |
1480 | /* Finish the COND of a for-statement, which may be given by |
1481 | FOR_STMT. */ |
1482 | |
1483 | void |
1484 | finish_for_cond (tree cond, tree for_stmt, bool ivdep, tree unroll, |
1485 | bool novector) |
1486 | { |
1487 | cond = maybe_convert_cond (cond); |
1488 | finish_cond (cond_p: &FOR_COND (for_stmt), expr: cond); |
1489 | begin_maybe_infinite_loop (cond); |
1490 | if (ivdep && cond != error_mark_node) |
1491 | FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR, |
1492 | TREE_TYPE (FOR_COND (for_stmt)), |
1493 | FOR_COND (for_stmt), |
1494 | build_int_cst (integer_type_node, |
1495 | annot_expr_ivdep_kind), |
1496 | integer_zero_node); |
1497 | if (unroll && cond != error_mark_node) |
1498 | FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR, |
1499 | TREE_TYPE (FOR_COND (for_stmt)), |
1500 | FOR_COND (for_stmt), |
1501 | build_int_cst (integer_type_node, |
1502 | annot_expr_unroll_kind), |
1503 | unroll); |
1504 | if (novector && cond != error_mark_node) |
1505 | FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR, |
1506 | TREE_TYPE (FOR_COND (for_stmt)), |
1507 | FOR_COND (for_stmt), |
1508 | build_int_cst (integer_type_node, |
1509 | annot_expr_no_vector_kind), |
1510 | integer_zero_node); |
1511 | simplify_loop_decl_cond (cond_p: &FOR_COND (for_stmt), FOR_BODY (for_stmt)); |
1512 | } |
1513 | |
1514 | /* Finish the increment-EXPRESSION in a for-statement, which may be |
1515 | given by FOR_STMT. */ |
1516 | |
1517 | void |
1518 | finish_for_expr (tree expr, tree for_stmt) |
1519 | { |
1520 | if (!expr) |
1521 | return; |
1522 | /* If EXPR is an overloaded function, issue an error; there is no |
1523 | context available to use to perform overload resolution. */ |
1524 | if (type_unknown_p (expr)) |
1525 | { |
1526 | cxx_incomplete_type_error (value: expr, TREE_TYPE (expr)); |
1527 | expr = error_mark_node; |
1528 | } |
1529 | if (!processing_template_decl) |
1530 | { |
1531 | if (warn_sequence_point) |
1532 | verify_sequence_points (expr); |
1533 | expr = convert_to_void (expr, ICV_THIRD_IN_FOR, |
1534 | tf_warning_or_error); |
1535 | } |
1536 | else if (!type_dependent_expression_p (expr)) |
1537 | convert_to_void (expr, ICV_THIRD_IN_FOR, tf_warning_or_error); |
1538 | expr = maybe_cleanup_point_expr_void (expr); |
1539 | if (check_for_bare_parameter_packs (expr)) |
1540 | expr = error_mark_node; |
1541 | FOR_EXPR (for_stmt) = expr; |
1542 | } |
1543 | |
1544 | /* Finish the body of a for-statement, which may be given by |
1545 | FOR_STMT. The increment-EXPR for the loop must be |
1546 | provided. |
1547 | It can also finish RANGE_FOR_STMT. */ |
1548 | |
1549 | void |
1550 | finish_for_stmt (tree for_stmt) |
1551 | { |
1552 | end_maybe_infinite_loop (boolean_true_node); |
1553 | |
1554 | if (TREE_CODE (for_stmt) == RANGE_FOR_STMT) |
1555 | RANGE_FOR_BODY (for_stmt) = do_poplevel (RANGE_FOR_BODY (for_stmt)); |
1556 | else |
1557 | { |
1558 | FOR_BODY (for_stmt) = do_poplevel (FOR_BODY (for_stmt)); |
1559 | if (FOR_COND (for_stmt)) |
1560 | finish_loop_cond (condp: &FOR_COND (for_stmt), |
1561 | FOR_EXPR (for_stmt) ? integer_one_node |
1562 | : FOR_BODY (for_stmt)); |
1563 | } |
1564 | |
1565 | /* Pop the scope for the body of the loop. */ |
1566 | tree *scope_ptr = (TREE_CODE (for_stmt) == RANGE_FOR_STMT |
1567 | ? &RANGE_FOR_SCOPE (for_stmt) |
1568 | : &FOR_SCOPE (for_stmt)); |
1569 | tree scope = *scope_ptr; |
1570 | *scope_ptr = NULL; |
1571 | |
1572 | /* During parsing of the body, range for uses "__for_{range,begin,end} " |
1573 | decl names to make those unaccessible by code in the body. |
1574 | Change it to ones with underscore instead of space, so that it can |
1575 | be inspected in the debugger. */ |
1576 | tree range_for_decl[3] = { NULL_TREE, NULL_TREE, NULL_TREE }; |
1577 | gcc_assert (CPTI_FOR_BEGIN__IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 1 |
1578 | && CPTI_FOR_END__IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 2 |
1579 | && CPTI_FOR_RANGE_IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 3 |
1580 | && CPTI_FOR_BEGIN_IDENTIFIER == CPTI_FOR_BEGIN__IDENTIFIER + 3 |
1581 | && CPTI_FOR_END_IDENTIFIER == CPTI_FOR_END__IDENTIFIER + 3); |
1582 | for (int i = 0; i < 3; i++) |
1583 | { |
1584 | tree id = cp_global_trees[CPTI_FOR_RANGE__IDENTIFIER + i]; |
1585 | if (IDENTIFIER_BINDING (id) |
1586 | && IDENTIFIER_BINDING (id)->scope == current_binding_level) |
1587 | { |
1588 | range_for_decl[i] = IDENTIFIER_BINDING (id)->value; |
1589 | gcc_assert (VAR_P (range_for_decl[i]) |
1590 | && DECL_ARTIFICIAL (range_for_decl[i])); |
1591 | } |
1592 | } |
1593 | |
1594 | add_stmt (t: do_poplevel (stmt_list: scope)); |
1595 | |
1596 | /* If we're being called from build_vec_init, don't mess with the names of |
1597 | the variables for an enclosing range-for. */ |
1598 | if (!stmts_are_full_exprs_p ()) |
1599 | return; |
1600 | |
1601 | for (int i = 0; i < 3; i++) |
1602 | if (range_for_decl[i]) |
1603 | DECL_NAME (range_for_decl[i]) |
1604 | = cp_global_trees[CPTI_FOR_RANGE_IDENTIFIER + i]; |
1605 | } |
1606 | |
1607 | /* Begin a range-for-statement. Returns a new RANGE_FOR_STMT. |
1608 | SCOPE and INIT should be the return of begin_for_scope, |
1609 | or both NULL_TREE . |
1610 | To finish it call finish_for_stmt(). */ |
1611 | |
1612 | tree |
1613 | begin_range_for_stmt (tree scope, tree init) |
1614 | { |
1615 | begin_maybe_infinite_loop (boolean_false_node); |
1616 | |
1617 | tree r = build_stmt (input_location, RANGE_FOR_STMT, NULL_TREE, NULL_TREE, |
1618 | NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE); |
1619 | |
1620 | if (scope == NULL_TREE) |
1621 | { |
1622 | gcc_assert (!init); |
1623 | scope = begin_for_scope (init: &init); |
1624 | } |
1625 | |
1626 | /* Since C++20, RANGE_FOR_STMTs can use the init tree, so save it. */ |
1627 | RANGE_FOR_INIT_STMT (r) = init; |
1628 | RANGE_FOR_SCOPE (r) = scope; |
1629 | |
1630 | return r; |
1631 | } |
1632 | |
1633 | /* Finish the head of a range-based for statement, which may |
1634 | be given by RANGE_FOR_STMT. DECL must be the declaration |
1635 | and EXPR must be the loop expression. */ |
1636 | |
1637 | void |
1638 | finish_range_for_decl (tree range_for_stmt, tree decl, tree expr) |
1639 | { |
1640 | if (processing_template_decl) |
1641 | RANGE_FOR_INIT_STMT (range_for_stmt) |
1642 | = pop_stmt_list (RANGE_FOR_INIT_STMT (range_for_stmt)); |
1643 | RANGE_FOR_DECL (range_for_stmt) = decl; |
1644 | RANGE_FOR_EXPR (range_for_stmt) = expr; |
1645 | add_stmt (t: range_for_stmt); |
1646 | RANGE_FOR_BODY (range_for_stmt) = do_pushlevel (sk: sk_block); |
1647 | } |
1648 | |
1649 | /* Finish a break-statement. */ |
1650 | |
1651 | tree |
1652 | finish_break_stmt (void) |
1653 | { |
1654 | /* In switch statements break is sometimes stylistically used after |
1655 | a return statement. This can lead to spurious warnings about |
1656 | control reaching the end of a non-void function when it is |
1657 | inlined. Note that we are calling block_may_fallthru with |
1658 | language specific tree nodes; this works because |
1659 | block_may_fallthru returns true when given something it does not |
1660 | understand. */ |
1661 | if (!block_may_fallthru (cur_stmt_list)) |
1662 | return void_node; |
1663 | note_break_stmt (); |
1664 | return add_stmt (t: build_stmt (input_location, BREAK_STMT)); |
1665 | } |
1666 | |
1667 | /* Finish a continue-statement. */ |
1668 | |
1669 | tree |
1670 | finish_continue_stmt (void) |
1671 | { |
1672 | return add_stmt (t: build_stmt (input_location, CONTINUE_STMT)); |
1673 | } |
1674 | |
1675 | /* Begin a switch-statement. Returns a new SWITCH_STMT if |
1676 | appropriate. */ |
1677 | |
1678 | tree |
1679 | begin_switch_stmt (void) |
1680 | { |
1681 | tree r, scope; |
1682 | |
1683 | scope = do_pushlevel (sk: sk_cond); |
1684 | r = build_stmt (input_location, SWITCH_STMT, NULL_TREE, NULL_TREE, NULL_TREE, scope); |
1685 | |
1686 | begin_cond (cond_p: &SWITCH_STMT_COND (r)); |
1687 | |
1688 | return r; |
1689 | } |
1690 | |
1691 | /* Finish the cond of a switch-statement. */ |
1692 | |
1693 | void |
1694 | finish_switch_cond (tree cond, tree switch_stmt) |
1695 | { |
1696 | tree orig_type = NULL; |
1697 | |
1698 | if (!processing_template_decl) |
1699 | { |
1700 | /* Convert the condition to an integer or enumeration type. */ |
1701 | tree orig_cond = cond; |
1702 | cond = build_expr_type_conversion (WANT_INT | WANT_ENUM, cond, true); |
1703 | if (cond == NULL_TREE) |
1704 | { |
1705 | error_at (cp_expr_loc_or_input_loc (t: orig_cond), |
1706 | "switch quantity not an integer" ); |
1707 | cond = error_mark_node; |
1708 | } |
1709 | /* We want unlowered type here to handle enum bit-fields. */ |
1710 | orig_type = unlowered_expr_type (cond); |
1711 | if (TREE_CODE (orig_type) != ENUMERAL_TYPE) |
1712 | orig_type = TREE_TYPE (cond); |
1713 | if (cond != error_mark_node) |
1714 | { |
1715 | /* [stmt.switch] |
1716 | |
1717 | Integral promotions are performed. */ |
1718 | cond = perform_integral_promotions (cond); |
1719 | cond = maybe_cleanup_point_expr (expr: cond); |
1720 | } |
1721 | } |
1722 | if (check_for_bare_parameter_packs (cond)) |
1723 | cond = error_mark_node; |
1724 | else if (!processing_template_decl && warn_sequence_point) |
1725 | verify_sequence_points (cond); |
1726 | |
1727 | finish_cond (cond_p: &SWITCH_STMT_COND (switch_stmt), expr: cond); |
1728 | SWITCH_STMT_TYPE (switch_stmt) = orig_type; |
1729 | add_stmt (t: switch_stmt); |
1730 | push_switch (switch_stmt); |
1731 | SWITCH_STMT_BODY (switch_stmt) = push_stmt_list (); |
1732 | } |
1733 | |
1734 | /* Finish the body of a switch-statement, which may be given by |
1735 | SWITCH_STMT. The COND to switch on is indicated. */ |
1736 | |
1737 | void |
1738 | finish_switch_stmt (tree switch_stmt) |
1739 | { |
1740 | tree scope; |
1741 | |
1742 | SWITCH_STMT_BODY (switch_stmt) = |
1743 | pop_stmt_list (SWITCH_STMT_BODY (switch_stmt)); |
1744 | pop_switch (); |
1745 | |
1746 | scope = SWITCH_STMT_SCOPE (switch_stmt); |
1747 | SWITCH_STMT_SCOPE (switch_stmt) = NULL; |
1748 | add_stmt (t: do_poplevel (stmt_list: scope)); |
1749 | } |
1750 | |
1751 | /* Begin a try-block. Returns a newly-created TRY_BLOCK if |
1752 | appropriate. */ |
1753 | |
1754 | tree |
1755 | begin_try_block (void) |
1756 | { |
1757 | tree r = build_stmt (input_location, TRY_BLOCK, NULL_TREE, NULL_TREE); |
1758 | add_stmt (t: r); |
1759 | TRY_STMTS (r) = push_stmt_list (); |
1760 | return r; |
1761 | } |
1762 | |
1763 | /* Likewise, for a function-try-block. The block returned in |
1764 | *COMPOUND_STMT is an artificial outer scope, containing the |
1765 | function-try-block. */ |
1766 | |
1767 | tree |
1768 | begin_function_try_block (tree *compound_stmt) |
1769 | { |
1770 | tree r; |
1771 | /* This outer scope does not exist in the C++ standard, but we need |
1772 | a place to put __FUNCTION__ and similar variables. */ |
1773 | *compound_stmt = begin_compound_stmt (0); |
1774 | current_binding_level->artificial = 1; |
1775 | r = begin_try_block (); |
1776 | FN_TRY_BLOCK_P (r) = 1; |
1777 | return r; |
1778 | } |
1779 | |
1780 | /* Finish a try-block, which may be given by TRY_BLOCK. */ |
1781 | |
1782 | void |
1783 | finish_try_block (tree try_block) |
1784 | { |
1785 | TRY_STMTS (try_block) = pop_stmt_list (TRY_STMTS (try_block)); |
1786 | TRY_HANDLERS (try_block) = push_stmt_list (); |
1787 | } |
1788 | |
1789 | /* Finish the body of a cleanup try-block, which may be given by |
1790 | TRY_BLOCK. */ |
1791 | |
1792 | void |
1793 | finish_cleanup_try_block (tree try_block) |
1794 | { |
1795 | TRY_STMTS (try_block) = pop_stmt_list (TRY_STMTS (try_block)); |
1796 | } |
1797 | |
1798 | /* Finish an implicitly generated try-block, with a cleanup is given |
1799 | by CLEANUP. */ |
1800 | |
1801 | void |
1802 | finish_cleanup (tree cleanup, tree try_block) |
1803 | { |
1804 | TRY_HANDLERS (try_block) = cleanup; |
1805 | CLEANUP_P (try_block) = 1; |
1806 | } |
1807 | |
1808 | /* Likewise, for a function-try-block. */ |
1809 | |
1810 | void |
1811 | finish_function_try_block (tree try_block) |
1812 | { |
1813 | finish_try_block (try_block); |
1814 | /* FIXME : something queer about CTOR_INITIALIZER somehow following |
1815 | the try block, but moving it inside. */ |
1816 | in_function_try_handler = 1; |
1817 | } |
1818 | |
1819 | /* Finish a handler-sequence for a try-block, which may be given by |
1820 | TRY_BLOCK. */ |
1821 | |
1822 | void |
1823 | finish_handler_sequence (tree try_block) |
1824 | { |
1825 | TRY_HANDLERS (try_block) = pop_stmt_list (TRY_HANDLERS (try_block)); |
1826 | check_handlers (TRY_HANDLERS (try_block)); |
1827 | } |
1828 | |
1829 | /* Finish the handler-seq for a function-try-block, given by |
1830 | TRY_BLOCK. COMPOUND_STMT is the outer block created by |
1831 | begin_function_try_block. */ |
1832 | |
1833 | void |
1834 | finish_function_handler_sequence (tree try_block, tree compound_stmt) |
1835 | { |
1836 | in_function_try_handler = 0; |
1837 | finish_handler_sequence (try_block); |
1838 | finish_compound_stmt (compound_stmt); |
1839 | } |
1840 | |
1841 | /* Begin a handler. Returns a HANDLER if appropriate. */ |
1842 | |
1843 | tree |
1844 | begin_handler (void) |
1845 | { |
1846 | tree r; |
1847 | |
1848 | r = build_stmt (input_location, HANDLER, NULL_TREE, NULL_TREE); |
1849 | add_stmt (t: r); |
1850 | |
1851 | /* Create a binding level for the eh_info and the exception object |
1852 | cleanup. */ |
1853 | HANDLER_BODY (r) = do_pushlevel (sk: sk_catch); |
1854 | |
1855 | return r; |
1856 | } |
1857 | |
1858 | /* Finish the handler-parameters for a handler, which may be given by |
1859 | HANDLER. DECL is the declaration for the catch parameter, or NULL |
1860 | if this is a `catch (...)' clause. */ |
1861 | |
1862 | void |
1863 | finish_handler_parms (tree decl, tree handler) |
1864 | { |
1865 | tree type = NULL_TREE; |
1866 | if (processing_template_decl) |
1867 | { |
1868 | if (decl) |
1869 | { |
1870 | decl = pushdecl (decl); |
1871 | decl = push_template_decl (decl); |
1872 | HANDLER_PARMS (handler) = decl; |
1873 | type = TREE_TYPE (decl); |
1874 | } |
1875 | } |
1876 | else |
1877 | { |
1878 | type = expand_start_catch_block (decl); |
1879 | if (warn_catch_value |
1880 | && type != NULL_TREE |
1881 | && type != error_mark_node |
1882 | && !TYPE_REF_P (TREE_TYPE (decl))) |
1883 | { |
1884 | tree orig_type = TREE_TYPE (decl); |
1885 | if (CLASS_TYPE_P (orig_type)) |
1886 | { |
1887 | if (TYPE_POLYMORPHIC_P (orig_type)) |
1888 | warning_at (DECL_SOURCE_LOCATION (decl), |
1889 | OPT_Wcatch_value_, |
1890 | "catching polymorphic type %q#T by value" , |
1891 | orig_type); |
1892 | else if (warn_catch_value > 1) |
1893 | warning_at (DECL_SOURCE_LOCATION (decl), |
1894 | OPT_Wcatch_value_, |
1895 | "catching type %q#T by value" , orig_type); |
1896 | } |
1897 | else if (warn_catch_value > 2) |
1898 | warning_at (DECL_SOURCE_LOCATION (decl), |
1899 | OPT_Wcatch_value_, |
1900 | "catching non-reference type %q#T" , orig_type); |
1901 | } |
1902 | } |
1903 | HANDLER_TYPE (handler) = type; |
1904 | } |
1905 | |
1906 | /* Finish a handler, which may be given by HANDLER. The BLOCKs are |
1907 | the return value from the matching call to finish_handler_parms. */ |
1908 | |
1909 | void |
1910 | finish_handler (tree handler) |
1911 | { |
1912 | if (!processing_template_decl) |
1913 | expand_end_catch_block (); |
1914 | HANDLER_BODY (handler) = do_poplevel (HANDLER_BODY (handler)); |
1915 | } |
1916 | |
1917 | /* Begin a compound statement. FLAGS contains some bits that control the |
1918 | behavior and context. If BCS_NO_SCOPE is set, the compound statement |
1919 | does not define a scope. If BCS_FN_BODY is set, this is the outermost |
1920 | block of a function. If BCS_TRY_BLOCK is set, this is the block |
1921 | created on behalf of a TRY statement. Returns a token to be passed to |
1922 | finish_compound_stmt. */ |
1923 | |
1924 | tree |
1925 | begin_compound_stmt (unsigned int flags) |
1926 | { |
1927 | tree r; |
1928 | |
1929 | if (flags & BCS_NO_SCOPE) |
1930 | { |
1931 | r = push_stmt_list (); |
1932 | STATEMENT_LIST_NO_SCOPE (r) = 1; |
1933 | |
1934 | /* Normally, we try hard to keep the BLOCK for a statement-expression. |
1935 | But, if it's a statement-expression with a scopeless block, there's |
1936 | nothing to keep, and we don't want to accidentally keep a block |
1937 | *inside* the scopeless block. */ |
1938 | keep_next_level (false); |
1939 | } |
1940 | else |
1941 | { |
1942 | scope_kind sk = sk_block; |
1943 | if (flags & BCS_TRY_BLOCK) |
1944 | sk = sk_try; |
1945 | else if (flags & BCS_TRANSACTION) |
1946 | sk = sk_transaction; |
1947 | else if (flags & BCS_STMT_EXPR) |
1948 | sk = sk_stmt_expr; |
1949 | r = do_pushlevel (sk); |
1950 | } |
1951 | |
1952 | /* When processing a template, we need to remember where the braces were, |
1953 | so that we can set up identical scopes when instantiating the template |
1954 | later. BIND_EXPR is a handy candidate for this. |
1955 | Note that do_poplevel won't create a BIND_EXPR itself here (and thus |
1956 | result in nested BIND_EXPRs), since we don't build BLOCK nodes when |
1957 | processing templates. */ |
1958 | if (processing_template_decl) |
1959 | { |
1960 | r = build3 (BIND_EXPR, NULL, NULL, r, NULL); |
1961 | BIND_EXPR_TRY_BLOCK (r) = (flags & BCS_TRY_BLOCK) != 0; |
1962 | BIND_EXPR_BODY_BLOCK (r) = (flags & BCS_FN_BODY) != 0; |
1963 | TREE_SIDE_EFFECTS (r) = 1; |
1964 | } |
1965 | |
1966 | return r; |
1967 | } |
1968 | |
1969 | /* Finish a compound-statement, which is given by STMT. */ |
1970 | |
1971 | void |
1972 | finish_compound_stmt (tree stmt) |
1973 | { |
1974 | if (TREE_CODE (stmt) == BIND_EXPR) |
1975 | { |
1976 | tree body = do_poplevel (BIND_EXPR_BODY (stmt)); |
1977 | /* If the STATEMENT_LIST is empty and this BIND_EXPR isn't special, |
1978 | discard the BIND_EXPR so it can be merged with the containing |
1979 | STATEMENT_LIST. */ |
1980 | if (TREE_CODE (body) == STATEMENT_LIST |
1981 | && STATEMENT_LIST_HEAD (body) == NULL |
1982 | && !BIND_EXPR_BODY_BLOCK (stmt) |
1983 | && !BIND_EXPR_TRY_BLOCK (stmt)) |
1984 | stmt = body; |
1985 | else |
1986 | BIND_EXPR_BODY (stmt) = body; |
1987 | } |
1988 | else if (STATEMENT_LIST_NO_SCOPE (stmt)) |
1989 | stmt = pop_stmt_list (stmt); |
1990 | else |
1991 | { |
1992 | /* Destroy any ObjC "super" receivers that may have been |
1993 | created. */ |
1994 | objc_clear_super_receiver (); |
1995 | |
1996 | stmt = do_poplevel (stmt_list: stmt); |
1997 | } |
1998 | |
1999 | /* ??? See c_end_compound_stmt wrt statement expressions. */ |
2000 | add_stmt (t: stmt); |
2001 | } |
2002 | |
2003 | /* Finish an asm-statement, whose components are a STRING, some |
2004 | OUTPUT_OPERANDS, some INPUT_OPERANDS, some CLOBBERS and some |
2005 | LABELS. Also note whether the asm-statement should be |
2006 | considered volatile, and whether it is asm inline. */ |
2007 | |
2008 | tree |
2009 | finish_asm_stmt (location_t loc, int volatile_p, tree string, |
2010 | tree output_operands, tree input_operands, tree clobbers, |
2011 | tree labels, bool inline_p) |
2012 | { |
2013 | tree r; |
2014 | tree t; |
2015 | int ninputs = list_length (input_operands); |
2016 | int noutputs = list_length (output_operands); |
2017 | |
2018 | if (!processing_template_decl) |
2019 | { |
2020 | const char *constraint; |
2021 | const char **oconstraints; |
2022 | bool allows_mem, allows_reg, is_inout; |
2023 | tree operand; |
2024 | int i; |
2025 | |
2026 | oconstraints = XALLOCAVEC (const char *, noutputs); |
2027 | |
2028 | string = resolve_asm_operand_names (string, output_operands, |
2029 | input_operands, labels); |
2030 | |
2031 | for (i = 0, t = output_operands; t; t = TREE_CHAIN (t), ++i) |
2032 | { |
2033 | operand = TREE_VALUE (t); |
2034 | |
2035 | /* ??? Really, this should not be here. Users should be using a |
2036 | proper lvalue, dammit. But there's a long history of using |
2037 | casts in the output operands. In cases like longlong.h, this |
2038 | becomes a primitive form of typechecking -- if the cast can be |
2039 | removed, then the output operand had a type of the proper width; |
2040 | otherwise we'll get an error. Gross, but ... */ |
2041 | STRIP_NOPS (operand); |
2042 | |
2043 | operand = mark_lvalue_use (operand); |
2044 | |
2045 | if (!lvalue_or_else (operand, lv_asm, tf_warning_or_error)) |
2046 | operand = error_mark_node; |
2047 | |
2048 | if (operand != error_mark_node |
2049 | && (TREE_READONLY (operand) |
2050 | || CP_TYPE_CONST_P (TREE_TYPE (operand)) |
2051 | /* Functions are not modifiable, even though they are |
2052 | lvalues. */ |
2053 | || FUNC_OR_METHOD_TYPE_P (TREE_TYPE (operand)) |
2054 | /* If it's an aggregate and any field is const, then it is |
2055 | effectively const. */ |
2056 | || (CLASS_TYPE_P (TREE_TYPE (operand)) |
2057 | && C_TYPE_FIELDS_READONLY (TREE_TYPE (operand))))) |
2058 | cxx_readonly_error (loc, operand, lv_asm); |
2059 | |
2060 | tree *op = &operand; |
2061 | while (TREE_CODE (*op) == COMPOUND_EXPR) |
2062 | op = &TREE_OPERAND (*op, 1); |
2063 | switch (TREE_CODE (*op)) |
2064 | { |
2065 | case PREINCREMENT_EXPR: |
2066 | case PREDECREMENT_EXPR: |
2067 | case MODIFY_EXPR: |
2068 | *op = genericize_compound_lvalue (*op); |
2069 | op = &TREE_OPERAND (*op, 1); |
2070 | break; |
2071 | default: |
2072 | break; |
2073 | } |
2074 | |
2075 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); |
2076 | oconstraints[i] = constraint; |
2077 | |
2078 | if (parse_output_constraint (&constraint, i, ninputs, noutputs, |
2079 | &allows_mem, &allows_reg, &is_inout)) |
2080 | { |
2081 | /* If the operand is going to end up in memory, |
2082 | mark it addressable. */ |
2083 | if (!allows_reg && !cxx_mark_addressable (*op)) |
2084 | operand = error_mark_node; |
2085 | } |
2086 | else |
2087 | operand = error_mark_node; |
2088 | |
2089 | TREE_VALUE (t) = operand; |
2090 | } |
2091 | |
2092 | for (i = 0, t = input_operands; t; ++i, t = TREE_CHAIN (t)) |
2093 | { |
2094 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); |
2095 | bool constraint_parsed |
2096 | = parse_input_constraint (&constraint, i, ninputs, noutputs, 0, |
2097 | oconstraints, &allows_mem, &allows_reg); |
2098 | /* If the operand is going to end up in memory, don't call |
2099 | decay_conversion. */ |
2100 | if (constraint_parsed && !allows_reg && allows_mem) |
2101 | operand = mark_lvalue_use (TREE_VALUE (t)); |
2102 | else |
2103 | operand = decay_conversion (TREE_VALUE (t), tf_warning_or_error); |
2104 | |
2105 | /* If the type of the operand hasn't been determined (e.g., |
2106 | because it involves an overloaded function), then issue |
2107 | an error message. There's no context available to |
2108 | resolve the overloading. */ |
2109 | if (TREE_TYPE (operand) == unknown_type_node) |
2110 | { |
2111 | error_at (loc, |
2112 | "type of %<asm%> operand %qE could not be determined" , |
2113 | TREE_VALUE (t)); |
2114 | operand = error_mark_node; |
2115 | } |
2116 | |
2117 | if (constraint_parsed) |
2118 | { |
2119 | /* If the operand is going to end up in memory, |
2120 | mark it addressable. */ |
2121 | if (!allows_reg && allows_mem) |
2122 | { |
2123 | /* Strip the nops as we allow this case. FIXME, this really |
2124 | should be rejected or made deprecated. */ |
2125 | STRIP_NOPS (operand); |
2126 | |
2127 | tree *op = &operand; |
2128 | while (TREE_CODE (*op) == COMPOUND_EXPR) |
2129 | op = &TREE_OPERAND (*op, 1); |
2130 | switch (TREE_CODE (*op)) |
2131 | { |
2132 | case PREINCREMENT_EXPR: |
2133 | case PREDECREMENT_EXPR: |
2134 | case MODIFY_EXPR: |
2135 | *op = genericize_compound_lvalue (*op); |
2136 | op = &TREE_OPERAND (*op, 1); |
2137 | break; |
2138 | default: |
2139 | break; |
2140 | } |
2141 | |
2142 | if (!cxx_mark_addressable (*op)) |
2143 | operand = error_mark_node; |
2144 | } |
2145 | else if (!allows_reg && !allows_mem) |
2146 | { |
2147 | /* If constraint allows neither register nor memory, |
2148 | try harder to get a constant. */ |
2149 | tree constop = maybe_constant_value (operand); |
2150 | if (TREE_CONSTANT (constop)) |
2151 | operand = constop; |
2152 | } |
2153 | } |
2154 | else |
2155 | operand = error_mark_node; |
2156 | |
2157 | TREE_VALUE (t) = operand; |
2158 | } |
2159 | } |
2160 | |
2161 | r = build_stmt (loc, ASM_EXPR, string, |
2162 | output_operands, input_operands, |
2163 | clobbers, labels); |
2164 | ASM_VOLATILE_P (r) = volatile_p || noutputs == 0; |
2165 | ASM_INLINE_P (r) = inline_p; |
2166 | r = maybe_cleanup_point_expr_void (expr: r); |
2167 | return add_stmt (t: r); |
2168 | } |
2169 | |
2170 | /* Finish a label with the indicated NAME. Returns the new label. */ |
2171 | |
2172 | tree |
2173 | finish_label_stmt (tree name) |
2174 | { |
2175 | tree decl = define_label (input_location, name); |
2176 | |
2177 | if (decl == error_mark_node) |
2178 | return error_mark_node; |
2179 | |
2180 | add_stmt (t: build_stmt (input_location, LABEL_EXPR, decl)); |
2181 | |
2182 | return decl; |
2183 | } |
2184 | |
2185 | /* Finish a series of declarations for local labels. G++ allows users |
2186 | to declare "local" labels, i.e., labels with scope. This extension |
2187 | is useful when writing code involving statement-expressions. */ |
2188 | |
2189 | void |
2190 | finish_label_decl (tree name) |
2191 | { |
2192 | if (!at_function_scope_p ()) |
2193 | { |
2194 | error ("%<__label__%> declarations are only allowed in function scopes" ); |
2195 | return; |
2196 | } |
2197 | |
2198 | add_decl_expr (decl: declare_local_label (name)); |
2199 | } |
2200 | |
2201 | /* When DECL goes out of scope, make sure that CLEANUP is executed. */ |
2202 | |
2203 | void |
2204 | finish_decl_cleanup (tree decl, tree cleanup) |
2205 | { |
2206 | push_cleanup (decl, cleanup, eh_only: false); |
2207 | } |
2208 | |
2209 | /* If the current scope exits with an exception, run CLEANUP. */ |
2210 | |
2211 | void |
2212 | finish_eh_cleanup (tree cleanup) |
2213 | { |
2214 | push_cleanup (NULL, cleanup, eh_only: true); |
2215 | } |
2216 | |
2217 | /* The MEM_INITS is a list of mem-initializers, in reverse of the |
2218 | order they were written by the user. Each node is as for |
2219 | emit_mem_initializers. */ |
2220 | |
2221 | void |
2222 | finish_mem_initializers (tree mem_inits) |
2223 | { |
2224 | /* Reorder the MEM_INITS so that they are in the order they appeared |
2225 | in the source program. */ |
2226 | mem_inits = nreverse (mem_inits); |
2227 | |
2228 | if (processing_template_decl) |
2229 | { |
2230 | tree mem; |
2231 | |
2232 | for (mem = mem_inits; mem; mem = TREE_CHAIN (mem)) |
2233 | { |
2234 | /* If the TREE_PURPOSE is a TYPE_PACK_EXPANSION, skip the |
2235 | check for bare parameter packs in the TREE_VALUE, because |
2236 | any parameter packs in the TREE_VALUE have already been |
2237 | bound as part of the TREE_PURPOSE. See |
2238 | make_pack_expansion for more information. */ |
2239 | if (TREE_CODE (TREE_PURPOSE (mem)) != TYPE_PACK_EXPANSION |
2240 | && check_for_bare_parameter_packs (TREE_VALUE (mem))) |
2241 | TREE_VALUE (mem) = error_mark_node; |
2242 | } |
2243 | |
2244 | add_stmt (t: build_min_nt_loc (UNKNOWN_LOCATION, |
2245 | CTOR_INITIALIZER, mem_inits)); |
2246 | } |
2247 | else |
2248 | emit_mem_initializers (mem_inits); |
2249 | } |
2250 | |
2251 | /* Obfuscate EXPR if it looks like an id-expression or member access so |
2252 | that the call to finish_decltype in do_auto_deduction will give the |
2253 | right result. If EVEN_UNEVAL, do this even in unevaluated context. */ |
2254 | |
2255 | tree |
2256 | force_paren_expr (tree expr, bool even_uneval /* = false */) |
2257 | { |
2258 | /* This is only needed for decltype(auto) in C++14. */ |
2259 | if (cxx_dialect < cxx14) |
2260 | return expr; |
2261 | |
2262 | /* If we're in unevaluated context, we can't be deducing a |
2263 | return/initializer type, so we don't need to mess with this. */ |
2264 | if (cp_unevaluated_operand && !even_uneval) |
2265 | return expr; |
2266 | |
2267 | if (TREE_CODE (expr) == COMPONENT_REF |
2268 | || TREE_CODE (expr) == SCOPE_REF |
2269 | || REFERENCE_REF_P (expr)) |
2270 | REF_PARENTHESIZED_P (expr) = true; |
2271 | else if (DECL_P (tree_strip_any_location_wrapper (expr))) |
2272 | { |
2273 | location_t loc = cp_expr_location (t_: expr); |
2274 | const tree_code code = processing_template_decl ? PAREN_EXPR |
2275 | : VIEW_CONVERT_EXPR; |
2276 | expr = build1_loc (loc, code, TREE_TYPE (expr), arg1: expr); |
2277 | REF_PARENTHESIZED_P (expr) = true; |
2278 | } |
2279 | return expr; |
2280 | } |
2281 | |
2282 | /* If T is an id-expression obfuscated by force_paren_expr, undo the |
2283 | obfuscation and return the underlying id-expression. Otherwise |
2284 | return T. */ |
2285 | |
2286 | tree |
2287 | maybe_undo_parenthesized_ref (tree t) |
2288 | { |
2289 | if (cxx_dialect < cxx14) |
2290 | return t; |
2291 | |
2292 | if ((TREE_CODE (t) == PAREN_EXPR || TREE_CODE (t) == VIEW_CONVERT_EXPR) |
2293 | && REF_PARENTHESIZED_P (t)) |
2294 | t = TREE_OPERAND (t, 0); |
2295 | |
2296 | return t; |
2297 | } |
2298 | |
2299 | /* Finish a parenthesized expression EXPR. */ |
2300 | |
2301 | cp_expr |
2302 | finish_parenthesized_expr (cp_expr expr) |
2303 | { |
2304 | if (EXPR_P (expr)) |
2305 | { |
2306 | /* This inhibits warnings in maybe_warn_unparenthesized_assignment |
2307 | and c_common_truthvalue_conversion. */ |
2308 | suppress_warning (STRIP_REFERENCE_REF (*expr), OPT_Wparentheses); |
2309 | } |
2310 | |
2311 | if (TREE_CODE (expr) == OFFSET_REF |
2312 | || TREE_CODE (expr) == SCOPE_REF) |
2313 | /* [expr.unary.op]/3 The qualified id of a pointer-to-member must not be |
2314 | enclosed in parentheses. */ |
2315 | PTRMEM_OK_P (expr) = 0; |
2316 | |
2317 | tree stripped_expr = tree_strip_any_location_wrapper (exp: expr); |
2318 | if (TREE_CODE (stripped_expr) == STRING_CST) |
2319 | PAREN_STRING_LITERAL_P (stripped_expr) = 1; |
2320 | |
2321 | expr = cp_expr (force_paren_expr (expr), expr.get_location ()); |
2322 | |
2323 | return expr; |
2324 | } |
2325 | |
2326 | /* Finish a reference to a non-static data member (DECL) that is not |
2327 | preceded by `.' or `->'. */ |
2328 | |
2329 | tree |
2330 | finish_non_static_data_member (tree decl, tree object, tree qualifying_scope, |
2331 | tsubst_flags_t complain /* = tf_warning_or_error */) |
2332 | { |
2333 | gcc_assert (TREE_CODE (decl) == FIELD_DECL); |
2334 | bool try_omp_private = !object && omp_private_member_map; |
2335 | tree ret; |
2336 | |
2337 | if (!object) |
2338 | { |
2339 | tree scope = qualifying_scope; |
2340 | if (scope == NULL_TREE) |
2341 | { |
2342 | scope = context_for_name_lookup (decl); |
2343 | if (!TYPE_P (scope)) |
2344 | { |
2345 | /* Can happen during error recovery (c++/85014). */ |
2346 | gcc_assert (seen_error ()); |
2347 | return error_mark_node; |
2348 | } |
2349 | } |
2350 | object = maybe_dummy_object (scope, NULL); |
2351 | } |
2352 | |
2353 | object = maybe_resolve_dummy (object, true); |
2354 | if (object == error_mark_node) |
2355 | return error_mark_node; |
2356 | |
2357 | /* DR 613/850: Can use non-static data members without an associated |
2358 | object in sizeof/decltype/alignof. */ |
2359 | if (is_dummy_object (object) |
2360 | && !cp_unevaluated_operand |
2361 | && (!processing_template_decl || !current_class_ref)) |
2362 | { |
2363 | if (complain & tf_error) |
2364 | { |
2365 | if (current_function_decl |
2366 | && DECL_STATIC_FUNCTION_P (current_function_decl)) |
2367 | error ("invalid use of member %qD in static member function" , decl); |
2368 | else if (current_function_decl |
2369 | && processing_contract_condition |
2370 | && DECL_CONSTRUCTOR_P (current_function_decl)) |
2371 | error ("invalid use of member %qD in constructor %<pre%> contract" , decl); |
2372 | else if (current_function_decl |
2373 | && processing_contract_condition |
2374 | && DECL_DESTRUCTOR_P (current_function_decl)) |
2375 | error ("invalid use of member %qD in destructor %<post%> contract" , decl); |
2376 | else |
2377 | error ("invalid use of non-static data member %qD" , decl); |
2378 | inform (DECL_SOURCE_LOCATION (decl), "declared here" ); |
2379 | } |
2380 | |
2381 | return error_mark_node; |
2382 | } |
2383 | |
2384 | if (current_class_ptr) |
2385 | TREE_USED (current_class_ptr) = 1; |
2386 | if (processing_template_decl) |
2387 | { |
2388 | tree type = TREE_TYPE (decl); |
2389 | |
2390 | if (TYPE_REF_P (type)) |
2391 | /* Quals on the object don't matter. */; |
2392 | else if (PACK_EXPANSION_P (type)) |
2393 | /* Don't bother trying to represent this. */ |
2394 | type = NULL_TREE; |
2395 | else if (WILDCARD_TYPE_P (TREE_TYPE (object))) |
2396 | /* We don't know what the eventual quals will be, so punt until |
2397 | instantiation time. |
2398 | |
2399 | This can happen when called from build_capture_proxy for an explicit |
2400 | object lambda. It's a bit marginal to call this function in that |
2401 | case, since this function is for references to members of 'this', |
2402 | but the deduced type is required to be derived from the closure |
2403 | type, so it works. */ |
2404 | type = NULL_TREE; |
2405 | else |
2406 | { |
2407 | /* Set the cv qualifiers. */ |
2408 | int quals = cp_type_quals (TREE_TYPE (object)); |
2409 | |
2410 | if (DECL_MUTABLE_P (decl)) |
2411 | quals &= ~TYPE_QUAL_CONST; |
2412 | |
2413 | quals |= cp_type_quals (TREE_TYPE (decl)); |
2414 | type = cp_build_qualified_type (type, quals); |
2415 | } |
2416 | |
2417 | if (qualifying_scope) |
2418 | /* Wrap this in a SCOPE_REF for now. */ |
2419 | ret = build_qualified_name (type, qualifying_scope, decl, |
2420 | /*template_p=*/false); |
2421 | else |
2422 | ret = (convert_from_reference |
2423 | (build_min (COMPONENT_REF, type, object, decl, NULL_TREE))); |
2424 | } |
2425 | /* If PROCESSING_TEMPLATE_DECL is nonzero here, then |
2426 | QUALIFYING_SCOPE is also non-null. */ |
2427 | else |
2428 | { |
2429 | tree access_type = TREE_TYPE (object); |
2430 | |
2431 | if (!perform_or_defer_access_check (TYPE_BINFO (access_type), decl, |
2432 | diag_decl: decl, complain)) |
2433 | return error_mark_node; |
2434 | |
2435 | /* If the data member was named `C::M', convert `*this' to `C' |
2436 | first. */ |
2437 | if (qualifying_scope) |
2438 | { |
2439 | tree binfo = NULL_TREE; |
2440 | object = build_scoped_ref (object, qualifying_scope, |
2441 | &binfo); |
2442 | } |
2443 | |
2444 | ret = build_class_member_access_expr (object, decl, |
2445 | /*access_path=*/NULL_TREE, |
2446 | /*preserve_reference=*/false, |
2447 | complain); |
2448 | } |
2449 | if (try_omp_private) |
2450 | { |
2451 | tree *v = omp_private_member_map->get (k: decl); |
2452 | if (v) |
2453 | ret = convert_from_reference (*v); |
2454 | } |
2455 | return ret; |
2456 | } |
2457 | |
2458 | /* DECL was the declaration to which a qualified-id resolved. Issue |
2459 | an error message if it is not accessible. If OBJECT_TYPE is |
2460 | non-NULL, we have just seen `x->' or `x.' and OBJECT_TYPE is the |
2461 | type of `*x', or `x', respectively. If the DECL was named as |
2462 | `A::B' then NESTED_NAME_SPECIFIER is `A'. Return value is like |
2463 | perform_access_checks above. */ |
2464 | |
2465 | bool |
2466 | check_accessibility_of_qualified_id (tree decl, |
2467 | tree object_type, |
2468 | tree nested_name_specifier, |
2469 | tsubst_flags_t complain) |
2470 | { |
2471 | /* If we're not checking, return immediately. */ |
2472 | if (deferred_access_no_check) |
2473 | return true; |
2474 | |
2475 | /* Determine the SCOPE of DECL. */ |
2476 | tree scope = context_for_name_lookup (decl); |
2477 | /* If the SCOPE is not a type, then DECL is not a member. */ |
2478 | if (!TYPE_P (scope) |
2479 | /* If SCOPE is dependent then we can't perform this access check now, |
2480 | and since we'll perform this access check again after substitution |
2481 | there's no need to explicitly defer it. */ |
2482 | || dependent_type_p (scope)) |
2483 | return true; |
2484 | |
2485 | tree qualifying_type = NULL_TREE; |
2486 | /* Compute the scope through which DECL is being accessed. */ |
2487 | if (object_type |
2488 | /* OBJECT_TYPE might not be a class type; consider: |
2489 | |
2490 | class A { typedef int I; }; |
2491 | I *p; |
2492 | p->A::I::~I(); |
2493 | |
2494 | In this case, we will have "A::I" as the DECL, but "I" as the |
2495 | OBJECT_TYPE. */ |
2496 | && CLASS_TYPE_P (object_type) |
2497 | && DERIVED_FROM_P (scope, object_type)) |
2498 | /* If we are processing a `->' or `.' expression, use the type of the |
2499 | left-hand side. */ |
2500 | qualifying_type = object_type; |
2501 | else if (nested_name_specifier) |
2502 | { |
2503 | /* If the reference is to a non-static member of the |
2504 | current class, treat it as if it were referenced through |
2505 | `this'. */ |
2506 | if (DECL_NONSTATIC_MEMBER_P (decl) |
2507 | && current_class_ptr) |
2508 | if (tree current = current_nonlambda_class_type ()) |
2509 | { |
2510 | if (dependent_type_p (current)) |
2511 | /* In general we can't know whether this access goes through |
2512 | `this' until instantiation time. Punt now, or else we might |
2513 | create a deferred access check that's not relative to `this' |
2514 | when it ought to be. We'll check this access again after |
2515 | substitution, e.g. from tsubst_qualified_id. */ |
2516 | return true; |
2517 | |
2518 | if (DERIVED_FROM_P (scope, current)) |
2519 | qualifying_type = current; |
2520 | } |
2521 | /* Otherwise, use the type indicated by the |
2522 | nested-name-specifier. */ |
2523 | if (!qualifying_type) |
2524 | qualifying_type = nested_name_specifier; |
2525 | } |
2526 | else |
2527 | /* Otherwise, the name must be from the current class or one of |
2528 | its bases. */ |
2529 | qualifying_type = currently_open_derived_class (scope); |
2530 | |
2531 | if (qualifying_type |
2532 | /* It is possible for qualifying type to be a TEMPLATE_TYPE_PARM |
2533 | or similar in a default argument value. */ |
2534 | && CLASS_TYPE_P (qualifying_type)) |
2535 | return perform_or_defer_access_check (TYPE_BINFO (qualifying_type), decl, |
2536 | diag_decl: decl, complain); |
2537 | |
2538 | return true; |
2539 | } |
2540 | |
2541 | /* EXPR is the result of a qualified-id. The QUALIFYING_CLASS was the |
2542 | class named to the left of the "::" operator. DONE is true if this |
2543 | expression is a complete postfix-expression; it is false if this |
2544 | expression is followed by '->', '[', '(', etc. ADDRESS_P is true |
2545 | iff this expression is the operand of '&'. TEMPLATE_P is true iff |
2546 | the qualified-id was of the form "A::template B". TEMPLATE_ARG_P |
2547 | is true iff this qualified name appears as a template argument. */ |
2548 | |
2549 | tree |
2550 | finish_qualified_id_expr (tree qualifying_class, |
2551 | tree expr, |
2552 | bool done, |
2553 | bool address_p, |
2554 | bool template_p, |
2555 | bool template_arg_p, |
2556 | tsubst_flags_t complain) |
2557 | { |
2558 | gcc_assert (TYPE_P (qualifying_class)); |
2559 | |
2560 | if (error_operand_p (t: expr)) |
2561 | return error_mark_node; |
2562 | |
2563 | if (DECL_P (expr) |
2564 | /* Functions are marked after overload resolution; avoid redundant |
2565 | warnings. */ |
2566 | && TREE_CODE (expr) != FUNCTION_DECL |
2567 | && !mark_used (expr, complain)) |
2568 | return error_mark_node; |
2569 | |
2570 | if (template_p) |
2571 | { |
2572 | if (TREE_CODE (expr) == UNBOUND_CLASS_TEMPLATE) |
2573 | { |
2574 | /* cp_parser_lookup_name thought we were looking for a type, |
2575 | but we're actually looking for a declaration. */ |
2576 | qualifying_class = TYPE_CONTEXT (expr); |
2577 | expr = TYPE_IDENTIFIER (expr); |
2578 | } |
2579 | else |
2580 | check_template_keyword (expr); |
2581 | } |
2582 | |
2583 | /* If EXPR occurs as the operand of '&', use special handling that |
2584 | permits a pointer-to-member. */ |
2585 | if (address_p && done |
2586 | && TREE_CODE (qualifying_class) != ENUMERAL_TYPE) |
2587 | { |
2588 | if (TREE_CODE (expr) == SCOPE_REF) |
2589 | expr = TREE_OPERAND (expr, 1); |
2590 | expr = build_offset_ref (qualifying_class, expr, |
2591 | /*address_p=*/true, complain); |
2592 | return expr; |
2593 | } |
2594 | |
2595 | /* No need to check access within an enum. */ |
2596 | if (TREE_CODE (qualifying_class) == ENUMERAL_TYPE |
2597 | && TREE_CODE (expr) != IDENTIFIER_NODE) |
2598 | return expr; |
2599 | |
2600 | /* Within the scope of a class, turn references to non-static |
2601 | members into expression of the form "this->...". */ |
2602 | if (template_arg_p) |
2603 | /* But, within a template argument, we do not want make the |
2604 | transformation, as there is no "this" pointer. */ |
2605 | ; |
2606 | else if (TREE_CODE (expr) == FIELD_DECL) |
2607 | { |
2608 | push_deferring_access_checks (deferring: dk_no_check); |
2609 | expr = finish_non_static_data_member (decl: expr, NULL_TREE, |
2610 | qualifying_scope: qualifying_class, complain); |
2611 | pop_deferring_access_checks (); |
2612 | } |
2613 | else if (BASELINK_P (expr)) |
2614 | { |
2615 | /* See if any of the functions are non-static members. */ |
2616 | /* If so, the expression may be relative to 'this'. */ |
2617 | if (!shared_member_p (expr) |
2618 | && current_class_ptr |
2619 | && DERIVED_FROM_P (qualifying_class, |
2620 | current_nonlambda_class_type ())) |
2621 | expr = (build_class_member_access_expr |
2622 | (maybe_dummy_object (qualifying_class, NULL), |
2623 | expr, |
2624 | BASELINK_ACCESS_BINFO (expr), |
2625 | /*preserve_reference=*/false, |
2626 | complain)); |
2627 | else if (done) |
2628 | /* The expression is a qualified name whose address is not |
2629 | being taken. */ |
2630 | expr = build_offset_ref (qualifying_class, expr, /*address_p=*/false, |
2631 | complain); |
2632 | } |
2633 | else if (!template_p |
2634 | && TREE_CODE (expr) == TEMPLATE_DECL |
2635 | && !DECL_FUNCTION_TEMPLATE_P (expr)) |
2636 | { |
2637 | if (complain & tf_error) |
2638 | error ("%qE missing template arguments" , expr); |
2639 | return error_mark_node; |
2640 | } |
2641 | else |
2642 | { |
2643 | /* In a template, return a SCOPE_REF for most qualified-ids |
2644 | so that we can check access at instantiation time. But if |
2645 | we're looking at a member of the current instantiation, we |
2646 | know we have access and building up the SCOPE_REF confuses |
2647 | non-type template argument handling. */ |
2648 | if (processing_template_decl |
2649 | && (!currently_open_class (qualifying_class) |
2650 | || TREE_CODE (expr) == IDENTIFIER_NODE |
2651 | || TREE_CODE (expr) == TEMPLATE_ID_EXPR |
2652 | || TREE_CODE (expr) == BIT_NOT_EXPR)) |
2653 | expr = build_qualified_name (TREE_TYPE (expr), |
2654 | qualifying_class, expr, |
2655 | template_p); |
2656 | else if (tree wrap = maybe_get_tls_wrapper_call (expr)) |
2657 | expr = wrap; |
2658 | |
2659 | expr = convert_from_reference (expr); |
2660 | } |
2661 | |
2662 | return expr; |
2663 | } |
2664 | |
2665 | /* Begin a statement-expression. The value returned must be passed to |
2666 | finish_stmt_expr. */ |
2667 | |
2668 | tree |
2669 | begin_stmt_expr (void) |
2670 | { |
2671 | return push_stmt_list (); |
2672 | } |
2673 | |
2674 | /* Process the final expression of a statement expression. EXPR can be |
2675 | NULL, if the final expression is empty. Return a STATEMENT_LIST |
2676 | containing all the statements in the statement-expression, or |
2677 | ERROR_MARK_NODE if there was an error. */ |
2678 | |
2679 | tree |
2680 | finish_stmt_expr_expr (tree expr, tree stmt_expr) |
2681 | { |
2682 | if (error_operand_p (t: expr)) |
2683 | { |
2684 | /* The type of the statement-expression is the type of the last |
2685 | expression. */ |
2686 | TREE_TYPE (stmt_expr) = error_mark_node; |
2687 | return error_mark_node; |
2688 | } |
2689 | |
2690 | /* If the last statement does not have "void" type, then the value |
2691 | of the last statement is the value of the entire expression. */ |
2692 | if (expr) |
2693 | { |
2694 | tree type = TREE_TYPE (expr); |
2695 | |
2696 | if (type && type_unknown_p (expr: type)) |
2697 | { |
2698 | error ("a statement expression is an insufficient context" |
2699 | " for overload resolution" ); |
2700 | TREE_TYPE (stmt_expr) = error_mark_node; |
2701 | return error_mark_node; |
2702 | } |
2703 | else if (processing_template_decl) |
2704 | { |
2705 | expr = build_stmt (input_location, EXPR_STMT, expr); |
2706 | expr = add_stmt (t: expr); |
2707 | /* Mark the last statement so that we can recognize it as such at |
2708 | template-instantiation time. */ |
2709 | EXPR_STMT_STMT_EXPR_RESULT (expr) = 1; |
2710 | } |
2711 | else if (VOID_TYPE_P (type)) |
2712 | { |
2713 | /* Just treat this like an ordinary statement. */ |
2714 | expr = finish_expr_stmt (expr); |
2715 | } |
2716 | else |
2717 | { |
2718 | /* It actually has a value we need to deal with. First, force it |
2719 | to be an rvalue so that we won't need to build up a copy |
2720 | constructor call later when we try to assign it to something. */ |
2721 | expr = force_rvalue (expr, tf_warning_or_error); |
2722 | if (error_operand_p (t: expr)) |
2723 | return error_mark_node; |
2724 | |
2725 | /* Update for array-to-pointer decay. */ |
2726 | type = TREE_TYPE (expr); |
2727 | |
2728 | /* This TARGET_EXPR will initialize the outer one added by |
2729 | finish_stmt_expr. */ |
2730 | set_target_expr_eliding (expr); |
2731 | |
2732 | /* Wrap it in a CLEANUP_POINT_EXPR and add it to the list like a |
2733 | normal statement, but don't convert to void or actually add |
2734 | the EXPR_STMT. */ |
2735 | if (TREE_CODE (expr) != CLEANUP_POINT_EXPR) |
2736 | expr = maybe_cleanup_point_expr (expr); |
2737 | add_stmt (t: expr); |
2738 | } |
2739 | |
2740 | /* The type of the statement-expression is the type of the last |
2741 | expression. */ |
2742 | TREE_TYPE (stmt_expr) = type; |
2743 | } |
2744 | |
2745 | return stmt_expr; |
2746 | } |
2747 | |
2748 | /* Finish a statement-expression. EXPR should be the value returned |
2749 | by the previous begin_stmt_expr. Returns an expression |
2750 | representing the statement-expression. */ |
2751 | |
2752 | tree |
2753 | finish_stmt_expr (tree stmt_expr, bool has_no_scope) |
2754 | { |
2755 | tree type; |
2756 | tree result; |
2757 | |
2758 | if (error_operand_p (t: stmt_expr)) |
2759 | { |
2760 | pop_stmt_list (stmt_expr); |
2761 | return error_mark_node; |
2762 | } |
2763 | |
2764 | gcc_assert (TREE_CODE (stmt_expr) == STATEMENT_LIST); |
2765 | |
2766 | type = TREE_TYPE (stmt_expr); |
2767 | result = pop_stmt_list (stmt_expr); |
2768 | TREE_TYPE (result) = type; |
2769 | |
2770 | if (processing_template_decl) |
2771 | { |
2772 | result = build_min (STMT_EXPR, type, result); |
2773 | TREE_SIDE_EFFECTS (result) = 1; |
2774 | STMT_EXPR_NO_SCOPE (result) = has_no_scope; |
2775 | } |
2776 | else if (CLASS_TYPE_P (type)) |
2777 | { |
2778 | /* Wrap the statement-expression in a TARGET_EXPR so that the |
2779 | temporary object created by the final expression is destroyed at |
2780 | the end of the full-expression containing the |
2781 | statement-expression. */ |
2782 | result = force_target_expr (type, result, tf_warning_or_error); |
2783 | } |
2784 | |
2785 | return result; |
2786 | } |
2787 | |
2788 | /* Returns the expression which provides the value of STMT_EXPR. */ |
2789 | |
2790 | tree |
2791 | stmt_expr_value_expr (tree stmt_expr) |
2792 | { |
2793 | tree t = STMT_EXPR_STMT (stmt_expr); |
2794 | |
2795 | if (TREE_CODE (t) == BIND_EXPR) |
2796 | t = BIND_EXPR_BODY (t); |
2797 | |
2798 | if (TREE_CODE (t) == STATEMENT_LIST && STATEMENT_LIST_TAIL (t)) |
2799 | t = STATEMENT_LIST_TAIL (t)->stmt; |
2800 | |
2801 | if (TREE_CODE (t) == EXPR_STMT) |
2802 | t = EXPR_STMT_EXPR (t); |
2803 | |
2804 | return t; |
2805 | } |
2806 | |
2807 | /* Return TRUE iff EXPR_STMT is an empty list of |
2808 | expression statements. */ |
2809 | |
2810 | bool |
2811 | empty_expr_stmt_p (tree expr_stmt) |
2812 | { |
2813 | tree body = NULL_TREE; |
2814 | |
2815 | if (expr_stmt == void_node) |
2816 | return true; |
2817 | |
2818 | if (expr_stmt) |
2819 | { |
2820 | if (TREE_CODE (expr_stmt) == EXPR_STMT) |
2821 | body = EXPR_STMT_EXPR (expr_stmt); |
2822 | else if (TREE_CODE (expr_stmt) == STATEMENT_LIST) |
2823 | body = expr_stmt; |
2824 | } |
2825 | |
2826 | if (body) |
2827 | { |
2828 | if (TREE_CODE (body) == STATEMENT_LIST) |
2829 | return tsi_end_p (i: tsi_start (t: body)); |
2830 | else |
2831 | return empty_expr_stmt_p (expr_stmt: body); |
2832 | } |
2833 | return false; |
2834 | } |
2835 | |
2836 | /* Perform Koenig lookup. FN_EXPR is the postfix-expression representing |
2837 | the function (or functions) to call; ARGS are the arguments to the |
2838 | call. Returns the functions to be considered by overload resolution. */ |
2839 | |
2840 | cp_expr |
2841 | perform_koenig_lookup (cp_expr fn_expr, vec<tree, va_gc> *args, |
2842 | tsubst_flags_t complain) |
2843 | { |
2844 | tree identifier = NULL_TREE; |
2845 | tree functions = NULL_TREE; |
2846 | tree tmpl_args = NULL_TREE; |
2847 | bool template_id = false; |
2848 | location_t loc = fn_expr.get_location (); |
2849 | tree fn = fn_expr.get_value (); |
2850 | |
2851 | STRIP_ANY_LOCATION_WRAPPER (fn); |
2852 | |
2853 | if (TREE_CODE (fn) == TEMPLATE_ID_EXPR) |
2854 | { |
2855 | /* Use a separate flag to handle null args. */ |
2856 | template_id = true; |
2857 | tmpl_args = TREE_OPERAND (fn, 1); |
2858 | fn = TREE_OPERAND (fn, 0); |
2859 | } |
2860 | |
2861 | /* Find the name of the overloaded function. */ |
2862 | if (identifier_p (t: fn)) |
2863 | identifier = fn; |
2864 | else |
2865 | { |
2866 | functions = fn; |
2867 | identifier = OVL_NAME (functions); |
2868 | } |
2869 | |
2870 | /* A call to a namespace-scope function using an unqualified name. |
2871 | |
2872 | Do Koenig lookup -- unless any of the arguments are |
2873 | type-dependent. */ |
2874 | if (!any_type_dependent_arguments_p (args) |
2875 | && !any_dependent_template_arguments_p (tmpl_args)) |
2876 | { |
2877 | fn = lookup_arg_dependent (identifier, functions, args); |
2878 | if (!fn) |
2879 | { |
2880 | /* The unqualified name could not be resolved. */ |
2881 | if (complain & tf_error) |
2882 | fn = unqualified_fn_lookup_error (cp_expr (identifier, loc)); |
2883 | else |
2884 | fn = identifier; |
2885 | } |
2886 | } |
2887 | |
2888 | if (fn && template_id && fn != error_mark_node) |
2889 | fn = build2 (TEMPLATE_ID_EXPR, unknown_type_node, fn, tmpl_args); |
2890 | |
2891 | return cp_expr (fn, loc); |
2892 | } |
2893 | |
2894 | /* Generate an expression for `FN (ARGS)'. This may change the |
2895 | contents of ARGS. |
2896 | |
2897 | If DISALLOW_VIRTUAL is true, the call to FN will be not generated |
2898 | as a virtual call, even if FN is virtual. (This flag is set when |
2899 | encountering an expression where the function name is explicitly |
2900 | qualified. For example a call to `X::f' never generates a virtual |
2901 | call.) |
2902 | |
2903 | Returns code for the call. */ |
2904 | |
2905 | tree |
2906 | finish_call_expr (tree fn, vec<tree, va_gc> **args, bool disallow_virtual, |
2907 | bool koenig_p, tsubst_flags_t complain) |
2908 | { |
2909 | tree result; |
2910 | tree orig_fn; |
2911 | vec<tree, va_gc> *orig_args = *args; |
2912 | |
2913 | if (fn == error_mark_node) |
2914 | return error_mark_node; |
2915 | |
2916 | gcc_assert (!TYPE_P (fn)); |
2917 | |
2918 | /* If FN may be a FUNCTION_DECL obfuscated by force_paren_expr, undo |
2919 | it so that we can tell this is a call to a known function. */ |
2920 | fn = maybe_undo_parenthesized_ref (t: fn); |
2921 | |
2922 | STRIP_ANY_LOCATION_WRAPPER (fn); |
2923 | |
2924 | orig_fn = fn; |
2925 | |
2926 | if (processing_template_decl) |
2927 | { |
2928 | /* If FN is a local extern declaration (or set thereof) in a template, |
2929 | look it up again at instantiation time. */ |
2930 | if (is_overloaded_fn (fn)) |
2931 | { |
2932 | tree ifn = get_first_fn (fn); |
2933 | if (TREE_CODE (ifn) == FUNCTION_DECL |
2934 | && dependent_local_decl_p (ifn)) |
2935 | orig_fn = DECL_NAME (ifn); |
2936 | } |
2937 | |
2938 | /* If the call expression is dependent, build a CALL_EXPR node |
2939 | with no type; type_dependent_expression_p recognizes |
2940 | expressions with no type as being dependent. */ |
2941 | if (type_dependent_expression_p (fn) |
2942 | || any_type_dependent_arguments_p (*args)) |
2943 | { |
2944 | result = build_min_nt_call_vec (orig_fn, *args); |
2945 | SET_EXPR_LOCATION (result, cp_expr_loc_or_input_loc (fn)); |
2946 | KOENIG_LOOKUP_P (result) = koenig_p; |
2947 | /* Disable the std::move warnings since this call was dependent |
2948 | (c++/89780, c++/107363). This also suppresses the |
2949 | -Wredundant-move warning. */ |
2950 | suppress_warning (result, OPT_Wpessimizing_move); |
2951 | |
2952 | if (cfun) |
2953 | { |
2954 | bool abnormal = true; |
2955 | for (lkp_iterator iter (maybe_get_fns (fn)); iter; ++iter) |
2956 | { |
2957 | tree fndecl = STRIP_TEMPLATE (*iter); |
2958 | if (TREE_CODE (fndecl) != FUNCTION_DECL |
2959 | || !TREE_THIS_VOLATILE (fndecl)) |
2960 | { |
2961 | abnormal = false; |
2962 | break; |
2963 | } |
2964 | } |
2965 | /* FIXME: Stop warning about falling off end of non-void |
2966 | function. But this is wrong. Even if we only see |
2967 | no-return fns at this point, we could select a |
2968 | future-defined return fn during instantiation. Or |
2969 | vice-versa. */ |
2970 | if (abnormal) |
2971 | current_function_returns_abnormally = 1; |
2972 | } |
2973 | if (TREE_CODE (fn) == COMPONENT_REF) |
2974 | maybe_generic_this_capture (TREE_OPERAND (fn, 0), |
2975 | TREE_OPERAND (fn, 1)); |
2976 | return result; |
2977 | } |
2978 | orig_args = make_tree_vector_copy (*args); |
2979 | } |
2980 | |
2981 | if (TREE_CODE (fn) == COMPONENT_REF) |
2982 | { |
2983 | tree member = TREE_OPERAND (fn, 1); |
2984 | if (BASELINK_P (member)) |
2985 | { |
2986 | tree object = TREE_OPERAND (fn, 0); |
2987 | return build_new_method_call (object, member, |
2988 | args, NULL_TREE, |
2989 | (disallow_virtual |
2990 | ? LOOKUP_NORMAL | LOOKUP_NONVIRTUAL |
2991 | : LOOKUP_NORMAL), |
2992 | /*fn_p=*/NULL, |
2993 | complain); |
2994 | } |
2995 | } |
2996 | |
2997 | /* Per 13.3.1.1, '(&f)(...)' is the same as '(f)(...)'. */ |
2998 | if (TREE_CODE (fn) == ADDR_EXPR |
2999 | && TREE_CODE (TREE_OPERAND (fn, 0)) == OVERLOAD) |
3000 | fn = TREE_OPERAND (fn, 0); |
3001 | |
3002 | if (is_overloaded_fn (fn)) |
3003 | fn = baselink_for_fns (fn); |
3004 | |
3005 | result = NULL_TREE; |
3006 | if (BASELINK_P (fn)) |
3007 | { |
3008 | tree object; |
3009 | |
3010 | /* A call to a member function. From [over.call.func]: |
3011 | |
3012 | If the keyword this is in scope and refers to the class of |
3013 | that member function, or a derived class thereof, then the |
3014 | function call is transformed into a qualified function call |
3015 | using (*this) as the postfix-expression to the left of the |
3016 | . operator.... [Otherwise] a contrived object of type T |
3017 | becomes the implied object argument. |
3018 | |
3019 | In this situation: |
3020 | |
3021 | struct A { void f(); }; |
3022 | struct B : public A {}; |
3023 | struct C : public A { void g() { B::f(); }}; |
3024 | |
3025 | "the class of that member function" refers to `A'. But 11.2 |
3026 | [class.access.base] says that we need to convert 'this' to B* as |
3027 | part of the access, so we pass 'B' to maybe_dummy_object. */ |
3028 | |
3029 | if (DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (get_first_fn (fn))) |
3030 | { |
3031 | /* A constructor call always uses a dummy object. (This constructor |
3032 | call which has the form A::A () is actually invalid and we are |
3033 | going to reject it later in build_new_method_call.) */ |
3034 | object = build_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn))); |
3035 | } |
3036 | else |
3037 | object = maybe_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn)), |
3038 | NULL); |
3039 | |
3040 | result = build_new_method_call (object, fn, args, NULL_TREE, |
3041 | (disallow_virtual |
3042 | ? LOOKUP_NORMAL|LOOKUP_NONVIRTUAL |
3043 | : LOOKUP_NORMAL), |
3044 | /*fn_p=*/NULL, |
3045 | complain); |
3046 | } |
3047 | else if (concept_check_p (t: fn)) |
3048 | { |
3049 | /* FN is actually a template-id referring to a concept definition. */ |
3050 | tree id = unpack_concept_check (fn); |
3051 | tree tmpl = TREE_OPERAND (id, 0); |
3052 | tree args = TREE_OPERAND (id, 1); |
3053 | |
3054 | if (!function_concept_p (t: tmpl)) |
3055 | { |
3056 | error_at (EXPR_LOC_OR_LOC (fn, input_location), |
3057 | "cannot call a concept as a function" ); |
3058 | return error_mark_node; |
3059 | } |
3060 | |
3061 | /* Ensure the result is wrapped as a call expression. */ |
3062 | result = build_concept_check (tmpl, args, tf_warning_or_error); |
3063 | } |
3064 | else if (is_overloaded_fn (fn)) |
3065 | { |
3066 | /* If the function is an overloaded builtin, resolve it. */ |
3067 | if (TREE_CODE (fn) == FUNCTION_DECL |
3068 | && (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL |
3069 | || DECL_BUILT_IN_CLASS (fn) == BUILT_IN_MD)) |
3070 | result = resolve_overloaded_builtin (input_location, fn, *args); |
3071 | |
3072 | if (!result) |
3073 | { |
3074 | tree alloc_size_attr = NULL_TREE; |
3075 | if (warn_calloc_transposed_args |
3076 | && TREE_CODE (fn) == FUNCTION_DECL |
3077 | && (alloc_size_attr |
3078 | = lookup_attribute (attr_name: "alloc_size" , |
3079 | TYPE_ATTRIBUTES (TREE_TYPE (fn))))) |
3080 | if (TREE_VALUE (alloc_size_attr) == NULL_TREE |
3081 | || TREE_CHAIN (TREE_VALUE (alloc_size_attr)) == NULL_TREE) |
3082 | alloc_size_attr = NULL_TREE; |
3083 | if ((warn_sizeof_pointer_memaccess || alloc_size_attr) |
3084 | && (complain & tf_warning) |
3085 | && !vec_safe_is_empty (v: *args) |
3086 | && !processing_template_decl) |
3087 | { |
3088 | location_t sizeof_arg_loc[6]; |
3089 | tree sizeof_arg[6]; |
3090 | unsigned int i; |
3091 | for (i = 0; i < (alloc_size_attr ? 6 : 3); i++) |
3092 | { |
3093 | tree t; |
3094 | |
3095 | sizeof_arg_loc[i] = UNKNOWN_LOCATION; |
3096 | sizeof_arg[i] = NULL_TREE; |
3097 | if (i >= (*args)->length ()) |
3098 | continue; |
3099 | t = (**args)[i]; |
3100 | if (TREE_CODE (t) != SIZEOF_EXPR) |
3101 | continue; |
3102 | if (SIZEOF_EXPR_TYPE_P (t)) |
3103 | sizeof_arg[i] = TREE_TYPE (TREE_OPERAND (t, 0)); |
3104 | else |
3105 | sizeof_arg[i] = TREE_OPERAND (t, 0); |
3106 | sizeof_arg_loc[i] = EXPR_LOCATION (t); |
3107 | } |
3108 | if (warn_sizeof_pointer_memaccess) |
3109 | { |
3110 | auto same_p = same_type_ignoring_top_level_qualifiers_p; |
3111 | sizeof_pointer_memaccess_warning (sizeof_arg_loc, fn, *args, |
3112 | sizeof_arg, same_p); |
3113 | } |
3114 | if (alloc_size_attr) |
3115 | warn_for_calloc (sizeof_arg_loc, fn, *args, sizeof_arg, |
3116 | alloc_size_attr); |
3117 | } |
3118 | |
3119 | if ((complain & tf_warning) |
3120 | && TREE_CODE (fn) == FUNCTION_DECL |
3121 | && fndecl_built_in_p (node: fn, name1: BUILT_IN_MEMSET) |
3122 | && vec_safe_length (v: *args) == 3 |
3123 | && !any_type_dependent_arguments_p (*args)) |
3124 | { |
3125 | tree arg0 = (*orig_args)[0]; |
3126 | tree arg1 = (*orig_args)[1]; |
3127 | tree arg2 = (*orig_args)[2]; |
3128 | int literal_mask = ((literal_integer_zerop (arg1) << 1) |
3129 | | (literal_integer_zerop (arg2) << 2)); |
3130 | warn_for_memset (input_location, arg0, arg2, literal_mask); |
3131 | } |
3132 | |
3133 | /* A call to a namespace-scope function. */ |
3134 | result = build_new_function_call (fn, args, complain); |
3135 | } |
3136 | } |
3137 | else if (TREE_CODE (fn) == PSEUDO_DTOR_EXPR) |
3138 | { |
3139 | if (!vec_safe_is_empty (v: *args)) |
3140 | error ("arguments to destructor are not allowed" ); |
3141 | /* C++20/DR: If the postfix-expression names a pseudo-destructor (in |
3142 | which case the postfix-expression is a possibly-parenthesized class |
3143 | member access), the function call destroys the object of scalar type |
3144 | denoted by the object expression of the class member access. */ |
3145 | tree ob = TREE_OPERAND (fn, 0); |
3146 | if (obvalue_p (ob)) |
3147 | result = build_trivial_dtor_call (ob, true); |
3148 | else |
3149 | /* No location to clobber. */ |
3150 | result = convert_to_void (ob, ICV_STATEMENT, complain); |
3151 | } |
3152 | else if (CLASS_TYPE_P (TREE_TYPE (fn))) |
3153 | /* If the "function" is really an object of class type, it might |
3154 | have an overloaded `operator ()'. */ |
3155 | result = build_op_call (fn, args, complain); |
3156 | |
3157 | if (!result) |
3158 | /* A call where the function is unknown. */ |
3159 | result = cp_build_function_call_vec (fn, args, complain); |
3160 | |
3161 | if (processing_template_decl && result != error_mark_node) |
3162 | { |
3163 | if (INDIRECT_REF_P (result)) |
3164 | result = TREE_OPERAND (result, 0); |
3165 | |
3166 | /* Prune all but the selected function from the original overload |
3167 | set so that we can avoid some duplicate work at instantiation time. */ |
3168 | if (TREE_CODE (result) == CALL_EXPR |
3169 | && really_overloaded_fn (orig_fn)) |
3170 | { |
3171 | tree sel_fn = CALL_EXPR_FN (result); |
3172 | if (TREE_CODE (sel_fn) == COMPONENT_REF) |
3173 | { |
3174 | /* The non-dependent result of build_new_method_call. */ |
3175 | sel_fn = TREE_OPERAND (sel_fn, 1); |
3176 | gcc_assert (BASELINK_P (sel_fn)); |
3177 | } |
3178 | else if (TREE_CODE (sel_fn) == ADDR_EXPR) |
3179 | /* Our original callee wasn't wrapped in an ADDR_EXPR, |
3180 | so strip this ADDR_EXPR added by build_over_call. */ |
3181 | sel_fn = TREE_OPERAND (sel_fn, 0); |
3182 | orig_fn = sel_fn; |
3183 | } |
3184 | |
3185 | result = build_call_vec (TREE_TYPE (result), orig_fn, orig_args); |
3186 | SET_EXPR_LOCATION (result, input_location); |
3187 | KOENIG_LOOKUP_P (result) = koenig_p; |
3188 | release_tree_vector (orig_args); |
3189 | result = convert_from_reference (result); |
3190 | } |
3191 | |
3192 | return result; |
3193 | } |
3194 | |
3195 | /* Finish a call to a postfix increment or decrement or EXPR. (Which |
3196 | is indicated by CODE, which should be POSTINCREMENT_EXPR or |
3197 | POSTDECREMENT_EXPR.) */ |
3198 | |
3199 | cp_expr |
3200 | finish_increment_expr (cp_expr expr, enum tree_code code) |
3201 | { |
3202 | /* input_location holds the location of the trailing operator token. |
3203 | Build a location of the form: |
3204 | expr++ |
3205 | ~~~~^~ |
3206 | with the caret at the operator token, ranging from the start |
3207 | of EXPR to the end of the operator token. */ |
3208 | location_t combined_loc = make_location (caret: input_location, |
3209 | start: expr.get_start (), |
3210 | finish: get_finish (loc: input_location)); |
3211 | cp_expr result = build_x_unary_op (combined_loc, code, expr, |
3212 | NULL_TREE, tf_warning_or_error); |
3213 | /* TODO: build_x_unary_op doesn't honor the location, so set it here. */ |
3214 | result.set_location (combined_loc); |
3215 | return result; |
3216 | } |
3217 | |
3218 | /* Finish a use of `this'. Returns an expression for `this'. */ |
3219 | |
3220 | tree |
3221 | finish_this_expr (void) |
3222 | { |
3223 | tree result = NULL_TREE; |
3224 | |
3225 | if (current_class_ptr) |
3226 | { |
3227 | tree type = TREE_TYPE (current_class_ref); |
3228 | |
3229 | /* In a lambda expression, 'this' refers to the captured 'this'. */ |
3230 | if (LAMBDA_TYPE_P (type)) |
3231 | result = lambda_expr_this_capture (CLASSTYPE_LAMBDA_EXPR (type), true); |
3232 | else |
3233 | result = current_class_ptr; |
3234 | } |
3235 | |
3236 | if (result) |
3237 | /* The keyword 'this' is a prvalue expression. */ |
3238 | return rvalue (result); |
3239 | |
3240 | tree fn = current_nonlambda_function (); |
3241 | if (fn && DECL_XOBJ_MEMBER_FUNCTION_P (fn)) |
3242 | { |
3243 | auto_diagnostic_group d; |
3244 | error ("%<this%> is unavailable for explicit object member " |
3245 | "functions" ); |
3246 | tree xobj_parm = DECL_ARGUMENTS (fn); |
3247 | gcc_assert (xobj_parm); |
3248 | tree parm_name = DECL_NAME (xobj_parm); |
3249 | |
3250 | static tree remembered_fn = NULL_TREE; |
3251 | /* Only output this diagnostic once per function. */ |
3252 | if (remembered_fn == fn) |
3253 | /* Early escape. */; |
3254 | else if (parm_name) |
3255 | inform (DECL_SOURCE_LOCATION (xobj_parm), |
3256 | "use explicit object parameter %qs instead" , |
3257 | IDENTIFIER_POINTER (parm_name)); |
3258 | else |
3259 | inform (DECL_SOURCE_LOCATION (xobj_parm), |
3260 | "name the explicit object parameter" ); |
3261 | |
3262 | remembered_fn = fn; |
3263 | } |
3264 | else if (fn && DECL_STATIC_FUNCTION_P (fn)) |
3265 | error ("%<this%> is unavailable for static member functions" ); |
3266 | else if (fn && processing_contract_condition && DECL_CONSTRUCTOR_P (fn)) |
3267 | error ("invalid use of %<this%> before it is valid" ); |
3268 | else if (fn && processing_contract_condition && DECL_DESTRUCTOR_P (fn)) |
3269 | error ("invalid use of %<this%> after it is valid" ); |
3270 | else if (fn) |
3271 | error ("invalid use of %<this%> in non-member function" ); |
3272 | else |
3273 | error ("invalid use of %<this%> at top level" ); |
3274 | return error_mark_node; |
3275 | } |
3276 | |
3277 | /* Finish a pseudo-destructor expression. If SCOPE is NULL, the |
3278 | expression was of the form `OBJECT.~DESTRUCTOR' where DESTRUCTOR is |
3279 | the TYPE for the type given. If SCOPE is non-NULL, the expression |
3280 | was of the form `OBJECT.SCOPE::~DESTRUCTOR'. */ |
3281 | |
3282 | tree |
3283 | finish_pseudo_destructor_expr (tree object, tree scope, tree destructor, |
3284 | location_t loc) |
3285 | { |
3286 | if (object == error_mark_node || destructor == error_mark_node) |
3287 | return error_mark_node; |
3288 | |
3289 | gcc_assert (TYPE_P (destructor)); |
3290 | |
3291 | if (!processing_template_decl) |
3292 | { |
3293 | if (scope == error_mark_node) |
3294 | { |
3295 | error_at (loc, "invalid qualifying scope in pseudo-destructor name" ); |
3296 | return error_mark_node; |
3297 | } |
3298 | if (is_auto (destructor)) |
3299 | destructor = TREE_TYPE (object); |
3300 | if (scope && TYPE_P (scope) && !check_dtor_name (scope, destructor)) |
3301 | { |
3302 | error_at (loc, |
3303 | "qualified type %qT does not match destructor name ~%qT" , |
3304 | scope, destructor); |
3305 | return error_mark_node; |
3306 | } |
3307 | |
3308 | |
3309 | /* [expr.pseudo] says both: |
3310 | |
3311 | The type designated by the pseudo-destructor-name shall be |
3312 | the same as the object type. |
3313 | |
3314 | and: |
3315 | |
3316 | The cv-unqualified versions of the object type and of the |
3317 | type designated by the pseudo-destructor-name shall be the |
3318 | same type. |
3319 | |
3320 | We implement the more generous second sentence, since that is |
3321 | what most other compilers do. */ |
3322 | if (!same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (object), |
3323 | destructor)) |
3324 | { |
3325 | error_at (loc, "%qE is not of type %qT" , object, destructor); |
3326 | return error_mark_node; |
3327 | } |
3328 | } |
3329 | |
3330 | tree type = (type_dependent_expression_p (object) |
3331 | ? NULL_TREE : void_type_node); |
3332 | |
3333 | return build3_loc (loc, code: PSEUDO_DTOR_EXPR, type, arg0: object, |
3334 | arg1: scope, arg2: destructor); |
3335 | } |
3336 | |
3337 | /* Finish an expression of the form CODE EXPR. */ |
3338 | |
3339 | cp_expr |
3340 | finish_unary_op_expr (location_t op_loc, enum tree_code code, cp_expr expr, |
3341 | tsubst_flags_t complain) |
3342 | { |
3343 | /* Build a location of the form: |
3344 | ++expr |
3345 | ^~~~~~ |
3346 | with the caret at the operator token, ranging from the start |
3347 | of the operator token to the end of EXPR. */ |
3348 | location_t combined_loc = make_location (caret: op_loc, |
3349 | start: op_loc, finish: expr.get_finish ()); |
3350 | cp_expr result = build_x_unary_op (combined_loc, code, expr, |
3351 | NULL_TREE, complain); |
3352 | /* TODO: build_x_unary_op doesn't always honor the location. */ |
3353 | result.set_location (combined_loc); |
3354 | |
3355 | if (result == error_mark_node) |
3356 | return result; |
3357 | |
3358 | if (!(complain & tf_warning)) |
3359 | return result; |
3360 | |
3361 | tree result_ovl = result; |
3362 | tree expr_ovl = expr; |
3363 | |
3364 | if (!processing_template_decl) |
3365 | expr_ovl = cp_fully_fold (expr_ovl); |
3366 | |
3367 | if (!CONSTANT_CLASS_P (expr_ovl) |
3368 | || TREE_OVERFLOW_P (expr_ovl)) |
3369 | return result; |
3370 | |
3371 | if (!processing_template_decl) |
3372 | result_ovl = cp_fully_fold (result_ovl); |
3373 | |
3374 | if (CONSTANT_CLASS_P (result_ovl) && TREE_OVERFLOW_P (result_ovl)) |
3375 | overflow_warning (combined_loc, result_ovl); |
3376 | |
3377 | return result; |
3378 | } |
3379 | |
3380 | /* Return true if CONSTRUCTOR EXPR after pack expansion could have no |
3381 | elements. */ |
3382 | |
3383 | static bool |
3384 | maybe_zero_constructor_nelts (tree expr) |
3385 | { |
3386 | if (CONSTRUCTOR_NELTS (expr) == 0) |
3387 | return true; |
3388 | if (!processing_template_decl) |
3389 | return false; |
3390 | for (constructor_elt &elt : CONSTRUCTOR_ELTS (expr)) |
3391 | if (!PACK_EXPANSION_P (elt.value)) |
3392 | return false; |
3393 | return true; |
3394 | } |
3395 | |
3396 | /* Finish a compound-literal expression or C++11 functional cast with aggregate |
3397 | initializer. TYPE is the type to which the CONSTRUCTOR in COMPOUND_LITERAL |
3398 | is being cast. */ |
3399 | |
3400 | tree |
3401 | finish_compound_literal (tree type, tree compound_literal, |
3402 | tsubst_flags_t complain, |
3403 | fcl_t fcl_context) |
3404 | { |
3405 | if (type == error_mark_node) |
3406 | return error_mark_node; |
3407 | |
3408 | if (TYPE_REF_P (type)) |
3409 | { |
3410 | compound_literal |
3411 | = finish_compound_literal (TREE_TYPE (type), compound_literal, |
3412 | complain, fcl_context); |
3413 | /* The prvalue is then used to direct-initialize the reference. */ |
3414 | tree r = (perform_implicit_conversion_flags |
3415 | (type, compound_literal, complain, LOOKUP_NORMAL)); |
3416 | return convert_from_reference (r); |
3417 | } |
3418 | |
3419 | if (!TYPE_OBJ_P (type)) |
3420 | { |
3421 | /* DR2351 */ |
3422 | if (VOID_TYPE_P (type) && CONSTRUCTOR_NELTS (compound_literal) == 0) |
3423 | { |
3424 | if (!processing_template_decl) |
3425 | return void_node; |
3426 | TREE_TYPE (compound_literal) = type; |
3427 | TREE_HAS_CONSTRUCTOR (compound_literal) = 1; |
3428 | CONSTRUCTOR_IS_DEPENDENT (compound_literal) = 0; |
3429 | return compound_literal; |
3430 | } |
3431 | else if (VOID_TYPE_P (type) |
3432 | && processing_template_decl |
3433 | && maybe_zero_constructor_nelts (expr: compound_literal)) |
3434 | /* If there are only packs in compound_literal, it could |
3435 | be void{} after pack expansion. */; |
3436 | else |
3437 | { |
3438 | if (complain & tf_error) |
3439 | error ("compound literal of non-object type %qT" , type); |
3440 | return error_mark_node; |
3441 | } |
3442 | } |
3443 | |
3444 | if (template_placeholder_p (type)) |
3445 | { |
3446 | type = do_auto_deduction (type, compound_literal, type, complain, |
3447 | adc_variable_type); |
3448 | if (type == error_mark_node) |
3449 | return error_mark_node; |
3450 | } |
3451 | /* C++23 auto{x}. */ |
3452 | else if (is_auto (type) |
3453 | && !AUTO_IS_DECLTYPE (type) |
3454 | && CONSTRUCTOR_NELTS (compound_literal) == 1) |
3455 | { |
3456 | if (is_constrained_auto (t: type)) |
3457 | { |
3458 | if (complain & tf_error) |
3459 | error ("%<auto{x}%> cannot be constrained" ); |
3460 | return error_mark_node; |
3461 | } |
3462 | else if (cxx_dialect < cxx23) |
3463 | pedwarn (input_location, OPT_Wc__23_extensions, |
3464 | "%<auto{x}%> only available with " |
3465 | "%<-std=c++2b%> or %<-std=gnu++2b%>" ); |
3466 | type = do_auto_deduction (type, compound_literal, type, complain, |
3467 | adc_variable_type); |
3468 | if (type == error_mark_node) |
3469 | return error_mark_node; |
3470 | } |
3471 | |
3472 | /* Used to hold a copy of the compound literal in a template. */ |
3473 | tree orig_cl = NULL_TREE; |
3474 | |
3475 | if (processing_template_decl) |
3476 | { |
3477 | const bool dependent_p |
3478 | = (instantiation_dependent_expression_p (compound_literal) |
3479 | || dependent_type_p (type)); |
3480 | if (dependent_p) |
3481 | /* We're about to return, no need to copy. */ |
3482 | orig_cl = compound_literal; |
3483 | else |
3484 | /* We're going to need a copy. */ |
3485 | orig_cl = unshare_constructor (compound_literal); |
3486 | TREE_TYPE (orig_cl) = type; |
3487 | /* Mark the expression as a compound literal. */ |
3488 | TREE_HAS_CONSTRUCTOR (orig_cl) = 1; |
3489 | /* And as instantiation-dependent. */ |
3490 | CONSTRUCTOR_IS_DEPENDENT (orig_cl) = dependent_p; |
3491 | if (fcl_context == fcl_c99) |
3492 | CONSTRUCTOR_C99_COMPOUND_LITERAL (orig_cl) = 1; |
3493 | /* If the compound literal is dependent, we're done for now. */ |
3494 | if (dependent_p) |
3495 | return orig_cl; |
3496 | /* Otherwise, do go on to e.g. check narrowing. */ |
3497 | } |
3498 | |
3499 | type = complete_type (type); |
3500 | |
3501 | if (TYPE_NON_AGGREGATE_CLASS (type)) |
3502 | { |
3503 | /* Trying to deal with a CONSTRUCTOR instead of a TREE_LIST |
3504 | everywhere that deals with function arguments would be a pain, so |
3505 | just wrap it in a TREE_LIST. The parser set a flag so we know |
3506 | that it came from T{} rather than T({}). */ |
3507 | CONSTRUCTOR_IS_DIRECT_INIT (compound_literal) = 1; |
3508 | compound_literal = build_tree_list (NULL_TREE, compound_literal); |
3509 | return build_functional_cast (input_location, type, |
3510 | compound_literal, complain); |
3511 | } |
3512 | |
3513 | if (TREE_CODE (type) == ARRAY_TYPE |
3514 | && check_array_initializer (NULL_TREE, type, compound_literal)) |
3515 | return error_mark_node; |
3516 | compound_literal = reshape_init (type, compound_literal, complain); |
3517 | if (SCALAR_TYPE_P (type) |
3518 | && !BRACE_ENCLOSED_INITIALIZER_P (compound_literal) |
3519 | && !check_narrowing (type, compound_literal, complain)) |
3520 | return error_mark_node; |
3521 | if (TREE_CODE (type) == ARRAY_TYPE |
3522 | && TYPE_DOMAIN (type) == NULL_TREE) |
3523 | { |
3524 | cp_complete_array_type_or_error (&type, compound_literal, |
3525 | false, complain); |
3526 | if (type == error_mark_node) |
3527 | return error_mark_node; |
3528 | } |
3529 | compound_literal = digest_init_flags (type, compound_literal, |
3530 | LOOKUP_NORMAL | LOOKUP_NO_NARROWING, |
3531 | complain); |
3532 | if (compound_literal == error_mark_node) |
3533 | return error_mark_node; |
3534 | |
3535 | /* If we're in a template, return the original compound literal. */ |
3536 | if (orig_cl) |
3537 | return orig_cl; |
3538 | |
3539 | if (TREE_CODE (compound_literal) == CONSTRUCTOR) |
3540 | { |
3541 | TREE_HAS_CONSTRUCTOR (compound_literal) = true; |
3542 | if (fcl_context == fcl_c99) |
3543 | CONSTRUCTOR_C99_COMPOUND_LITERAL (compound_literal) = 1; |
3544 | } |
3545 | |
3546 | /* Put static/constant array temporaries in static variables. */ |
3547 | /* FIXME all C99 compound literals should be variables rather than C++ |
3548 | temporaries, unless they are used as an aggregate initializer. */ |
3549 | if ((!at_function_scope_p () || CP_TYPE_CONST_P (type)) |
3550 | && fcl_context == fcl_c99 |
3551 | && TREE_CODE (type) == ARRAY_TYPE |
3552 | && !TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type) |
3553 | && initializer_constant_valid_p (compound_literal, type)) |
3554 | { |
3555 | tree decl = create_temporary_var (type); |
3556 | DECL_CONTEXT (decl) = NULL_TREE; |
3557 | DECL_INITIAL (decl) = compound_literal; |
3558 | TREE_STATIC (decl) = 1; |
3559 | if (literal_type_p (type) && CP_TYPE_CONST_NON_VOLATILE_P (type)) |
3560 | { |
3561 | /* 5.19 says that a constant expression can include an |
3562 | lvalue-rvalue conversion applied to "a glvalue of literal type |
3563 | that refers to a non-volatile temporary object initialized |
3564 | with a constant expression". Rather than try to communicate |
3565 | that this VAR_DECL is a temporary, just mark it constexpr. */ |
3566 | DECL_DECLARED_CONSTEXPR_P (decl) = true; |
3567 | DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl) = true; |
3568 | TREE_CONSTANT (decl) = true; |
3569 | } |
3570 | cp_apply_type_quals_to_decl (cp_type_quals (type), decl); |
3571 | decl = pushdecl_top_level (decl); |
3572 | DECL_NAME (decl) = make_anon_name (); |
3573 | SET_DECL_ASSEMBLER_NAME (decl, DECL_NAME (decl)); |
3574 | /* Make sure the destructor is callable. */ |
3575 | tree clean = cxx_maybe_build_cleanup (decl, complain); |
3576 | if (clean == error_mark_node) |
3577 | return error_mark_node; |
3578 | return decl; |
3579 | } |
3580 | |
3581 | /* Represent other compound literals with TARGET_EXPR so we produce |
3582 | a prvalue, and can elide copies. */ |
3583 | if (!VECTOR_TYPE_P (type) |
3584 | && (TREE_CODE (compound_literal) == CONSTRUCTOR |
3585 | || TREE_CODE (compound_literal) == VEC_INIT_EXPR)) |
3586 | { |
3587 | /* The CONSTRUCTOR is now an initializer, not a compound literal. */ |
3588 | if (TREE_CODE (compound_literal) == CONSTRUCTOR) |
3589 | TREE_HAS_CONSTRUCTOR (compound_literal) = false; |
3590 | compound_literal = get_target_expr (compound_literal, complain); |
3591 | } |
3592 | else |
3593 | /* For e.g. int{42} just make sure it's a prvalue. */ |
3594 | compound_literal = rvalue (compound_literal); |
3595 | |
3596 | return compound_literal; |
3597 | } |
3598 | |
3599 | /* Return the declaration for the function-name variable indicated by |
3600 | ID. */ |
3601 | |
3602 | tree |
3603 | finish_fname (tree id) |
3604 | { |
3605 | tree decl; |
3606 | |
3607 | decl = fname_decl (input_location, C_RID_CODE (id), id); |
3608 | if (processing_template_decl && current_function_decl |
3609 | && decl != error_mark_node) |
3610 | decl = DECL_NAME (decl); |
3611 | return decl; |
3612 | } |
3613 | |
3614 | /* Finish a translation unit. */ |
3615 | |
3616 | void |
3617 | finish_translation_unit (void) |
3618 | { |
3619 | /* In case there were missing closebraces, |
3620 | get us back to the global binding level. */ |
3621 | pop_everything (); |
3622 | while (current_namespace != global_namespace) |
3623 | pop_namespace (); |
3624 | |
3625 | /* Do file scope __FUNCTION__ et al. */ |
3626 | finish_fname_decls (); |
3627 | |
3628 | if (vec_safe_length (v: scope_chain->omp_declare_target_attribute)) |
3629 | { |
3630 | cp_omp_declare_target_attr |
3631 | a = scope_chain->omp_declare_target_attribute->pop (); |
3632 | if (!errorcount) |
3633 | error ("%qs without corresponding %qs" , |
3634 | a.device_type >= 0 ? "#pragma omp begin declare target" |
3635 | : "#pragma omp declare target" , |
3636 | "#pragma omp end declare target" ); |
3637 | vec_safe_truncate (v: scope_chain->omp_declare_target_attribute, size: 0); |
3638 | } |
3639 | if (vec_safe_length (v: scope_chain->omp_begin_assumes)) |
3640 | { |
3641 | if (!errorcount) |
3642 | error ("%qs without corresponding %qs" , |
3643 | "#pragma omp begin assumes" , "#pragma omp end assumes" ); |
3644 | vec_safe_truncate (v: scope_chain->omp_begin_assumes, size: 0); |
3645 | } |
3646 | } |
3647 | |
3648 | /* Finish a template type parameter, specified as AGGR IDENTIFIER. |
3649 | Returns the parameter. */ |
3650 | |
3651 | tree |
3652 | finish_template_type_parm (tree aggr, tree identifier) |
3653 | { |
3654 | if (aggr != class_type_node) |
3655 | { |
3656 | permerror (input_location, "template type parameters must use the keyword %<class%> or %<typename%>" ); |
3657 | aggr = class_type_node; |
3658 | } |
3659 | |
3660 | return build_tree_list (aggr, identifier); |
3661 | } |
3662 | |
3663 | /* Finish a template template parameter, specified as AGGR IDENTIFIER. |
3664 | Returns the parameter. */ |
3665 | |
3666 | tree |
3667 | finish_template_template_parm (tree aggr, tree identifier) |
3668 | { |
3669 | tree decl = build_decl (input_location, |
3670 | TYPE_DECL, identifier, NULL_TREE); |
3671 | |
3672 | tree tmpl = build_lang_decl (TEMPLATE_DECL, identifier, NULL_TREE); |
3673 | DECL_TEMPLATE_PARMS (tmpl) = current_template_parms; |
3674 | DECL_TEMPLATE_RESULT (tmpl) = decl; |
3675 | DECL_ARTIFICIAL (decl) = 1; |
3676 | |
3677 | /* Associate the constraints with the underlying declaration, |
3678 | not the template. */ |
3679 | tree constr = current_template_constraints (); |
3680 | set_constraints (decl, constr); |
3681 | |
3682 | end_template_decl (); |
3683 | |
3684 | gcc_assert (DECL_TEMPLATE_PARMS (tmpl)); |
3685 | |
3686 | check_default_tmpl_args (decl, DECL_TEMPLATE_PARMS (tmpl), |
3687 | /*is_primary=*/true, /*is_partial=*/false, |
3688 | /*is_friend=*/0); |
3689 | |
3690 | return finish_template_type_parm (aggr, identifier: tmpl); |
3691 | } |
3692 | |
3693 | /* ARGUMENT is the default-argument value for a template template |
3694 | parameter. If ARGUMENT is invalid, issue error messages and return |
3695 | the ERROR_MARK_NODE. Otherwise, ARGUMENT itself is returned. */ |
3696 | |
3697 | tree |
3698 | check_template_template_default_arg (tree argument) |
3699 | { |
3700 | if (TREE_CODE (argument) != TEMPLATE_DECL |
3701 | && TREE_CODE (argument) != TEMPLATE_TEMPLATE_PARM |
3702 | && TREE_CODE (argument) != UNBOUND_CLASS_TEMPLATE) |
3703 | { |
3704 | if (TREE_CODE (argument) == TYPE_DECL) |
3705 | { |
3706 | if (tree t = maybe_get_template_decl_from_type_decl (argument)) |
3707 | if (TREE_CODE (t) == TEMPLATE_DECL) |
3708 | return t; |
3709 | error ("invalid use of type %qT as a default value for a template " |
3710 | "template-parameter" , TREE_TYPE (argument)); |
3711 | } |
3712 | else |
3713 | error ("invalid default argument for a template template parameter" ); |
3714 | return error_mark_node; |
3715 | } |
3716 | |
3717 | return argument; |
3718 | } |
3719 | |
3720 | /* Begin a class definition, as indicated by T. */ |
3721 | |
3722 | tree |
3723 | begin_class_definition (tree t) |
3724 | { |
3725 | if (error_operand_p (t) || error_operand_p (TYPE_MAIN_DECL (t))) |
3726 | return error_mark_node; |
3727 | |
3728 | if (processing_template_parmlist && !LAMBDA_TYPE_P (t)) |
3729 | { |
3730 | error ("definition of %q#T inside template parameter list" , t); |
3731 | return error_mark_node; |
3732 | } |
3733 | |
3734 | /* According to the C++ ABI, decimal classes defined in ISO/IEC TR 24733 |
3735 | are passed the same as decimal scalar types. */ |
3736 | if (TREE_CODE (t) == RECORD_TYPE |
3737 | && !processing_template_decl) |
3738 | { |
3739 | tree ns = TYPE_CONTEXT (t); |
3740 | if (ns && TREE_CODE (ns) == NAMESPACE_DECL |
3741 | && DECL_CONTEXT (ns) == std_node |
3742 | && DECL_NAME (ns) |
3743 | && id_equal (DECL_NAME (ns), str: "decimal" )) |
3744 | { |
3745 | const char *n = TYPE_NAME_STRING (t); |
3746 | if ((strcmp (s1: n, s2: "decimal32" ) == 0) |
3747 | || (strcmp (s1: n, s2: "decimal64" ) == 0) |
3748 | || (strcmp (s1: n, s2: "decimal128" ) == 0)) |
3749 | TYPE_TRANSPARENT_AGGR (t) = 1; |
3750 | } |
3751 | } |
3752 | |
3753 | /* A non-implicit typename comes from code like: |
3754 | |
3755 | template <typename T> struct A { |
3756 | template <typename U> struct A<T>::B ... |
3757 | |
3758 | This is erroneous. */ |
3759 | else if (TREE_CODE (t) == TYPENAME_TYPE) |
3760 | { |
3761 | error ("invalid definition of qualified type %qT" , t); |
3762 | t = error_mark_node; |
3763 | } |
3764 | |
3765 | if (t == error_mark_node || ! MAYBE_CLASS_TYPE_P (t)) |
3766 | { |
3767 | t = make_class_type (RECORD_TYPE); |
3768 | pushtag (make_anon_name (), t); |
3769 | } |
3770 | |
3771 | if (TYPE_BEING_DEFINED (t)) |
3772 | { |
3773 | t = make_class_type (TREE_CODE (t)); |
3774 | pushtag (TYPE_IDENTIFIER (t), t); |
3775 | } |
3776 | |
3777 | if (modules_p ()) |
3778 | { |
3779 | if (!module_may_redeclare (TYPE_NAME (t))) |
3780 | { |
3781 | error ("cannot declare %qD in a different module" , TYPE_NAME (t)); |
3782 | inform (DECL_SOURCE_LOCATION (TYPE_NAME (t)), "declared here" ); |
3783 | return error_mark_node; |
3784 | } |
3785 | set_instantiating_module (TYPE_NAME (t)); |
3786 | set_defining_module (TYPE_NAME (t)); |
3787 | } |
3788 | |
3789 | maybe_process_partial_specialization (t); |
3790 | pushclass (t); |
3791 | TYPE_BEING_DEFINED (t) = 1; |
3792 | class_binding_level->defining_class_p = 1; |
3793 | |
3794 | if (flag_pack_struct) |
3795 | { |
3796 | tree v; |
3797 | TYPE_PACKED (t) = 1; |
3798 | /* Even though the type is being defined for the first time |
3799 | here, there might have been a forward declaration, so there |
3800 | might be cv-qualified variants of T. */ |
3801 | for (v = TYPE_NEXT_VARIANT (t); v; v = TYPE_NEXT_VARIANT (v)) |
3802 | TYPE_PACKED (v) = 1; |
3803 | } |
3804 | /* Reset the interface data, at the earliest possible |
3805 | moment, as it might have been set via a class foo; |
3806 | before. */ |
3807 | if (! TYPE_UNNAMED_P (t)) |
3808 | { |
3809 | struct c_fileinfo *finfo = \ |
3810 | get_fileinfo (LOCATION_FILE (input_location)); |
3811 | CLASSTYPE_INTERFACE_ONLY (t) = finfo->interface_only; |
3812 | SET_CLASSTYPE_INTERFACE_UNKNOWN_X |
3813 | (t, finfo->interface_unknown); |
3814 | } |
3815 | reset_specialization (); |
3816 | |
3817 | /* Make a declaration for this class in its own scope. */ |
3818 | build_self_reference (); |
3819 | |
3820 | return t; |
3821 | } |
3822 | |
3823 | /* Finish the member declaration given by DECL. */ |
3824 | |
3825 | void |
3826 | finish_member_declaration (tree decl) |
3827 | { |
3828 | if (decl == error_mark_node || decl == NULL_TREE) |
3829 | return; |
3830 | |
3831 | if (decl == void_type_node) |
3832 | /* The COMPONENT was a friend, not a member, and so there's |
3833 | nothing for us to do. */ |
3834 | return; |
3835 | |
3836 | /* We should see only one DECL at a time. */ |
3837 | gcc_assert (DECL_CHAIN (decl) == NULL_TREE); |
3838 | |
3839 | /* Don't add decls after definition. */ |
3840 | gcc_assert (TYPE_BEING_DEFINED (current_class_type) |
3841 | /* We can add lambda types when late parsing default |
3842 | arguments. */ |
3843 | || LAMBDA_TYPE_P (TREE_TYPE (decl))); |
3844 | |
3845 | /* Set up access control for DECL. */ |
3846 | TREE_PRIVATE (decl) |
3847 | = (current_access_specifier == access_private_node); |
3848 | TREE_PROTECTED (decl) |
3849 | = (current_access_specifier == access_protected_node); |
3850 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
3851 | { |
3852 | TREE_PRIVATE (DECL_TEMPLATE_RESULT (decl)) = TREE_PRIVATE (decl); |
3853 | TREE_PROTECTED (DECL_TEMPLATE_RESULT (decl)) = TREE_PROTECTED (decl); |
3854 | } |
3855 | |
3856 | /* Mark the DECL as a member of the current class, unless it's |
3857 | a member of an enumeration. */ |
3858 | if (TREE_CODE (decl) != CONST_DECL) |
3859 | DECL_CONTEXT (decl) = current_class_type; |
3860 | |
3861 | /* Remember the single FIELD_DECL an anonymous aggregate type is used for. */ |
3862 | if (TREE_CODE (decl) == FIELD_DECL |
3863 | && ANON_AGGR_TYPE_P (TREE_TYPE (decl))) |
3864 | { |
3865 | gcc_assert (!ANON_AGGR_TYPE_FIELD (TYPE_MAIN_VARIANT (TREE_TYPE (decl)))); |
3866 | ANON_AGGR_TYPE_FIELD (TYPE_MAIN_VARIANT (TREE_TYPE (decl))) = decl; |
3867 | } |
3868 | |
3869 | if (TREE_CODE (decl) == USING_DECL) |
3870 | /* Avoid debug info for class-scope USING_DECLS for now, we'll |
3871 | call cp_emit_debug_info_for_using later. */ |
3872 | DECL_IGNORED_P (decl) = 1; |
3873 | |
3874 | /* Check for bare parameter packs in the non-static data member |
3875 | declaration. */ |
3876 | if (TREE_CODE (decl) == FIELD_DECL) |
3877 | { |
3878 | if (check_for_bare_parameter_packs (TREE_TYPE (decl))) |
3879 | TREE_TYPE (decl) = error_mark_node; |
3880 | if (check_for_bare_parameter_packs (DECL_ATTRIBUTES (decl))) |
3881 | DECL_ATTRIBUTES (decl) = NULL_TREE; |
3882 | } |
3883 | |
3884 | /* [dcl.link] |
3885 | |
3886 | A C language linkage is ignored for the names of class members |
3887 | and the member function type of class member functions. */ |
3888 | if (DECL_LANG_SPECIFIC (decl)) |
3889 | SET_DECL_LANGUAGE (decl, lang_cplusplus); |
3890 | |
3891 | bool add = false; |
3892 | |
3893 | /* Functions and non-functions are added differently. */ |
3894 | if (DECL_DECLARES_FUNCTION_P (decl)) |
3895 | add = add_method (current_class_type, decl, false); |
3896 | /* Enter the DECL into the scope of the class, if the class |
3897 | isn't a closure (whose fields are supposed to be unnamed). */ |
3898 | else if (CLASSTYPE_LAMBDA_EXPR (current_class_type) |
3899 | || maybe_push_used_methods (decl) |
3900 | || pushdecl_class_level (decl)) |
3901 | add = true; |
3902 | |
3903 | if (add) |
3904 | { |
3905 | /* All TYPE_DECLs go at the end of TYPE_FIELDS. Ordinary fields |
3906 | go at the beginning. The reason is that |
3907 | legacy_nonfn_member_lookup searches the list in order, and we |
3908 | want a field name to override a type name so that the "struct |
3909 | stat hack" will work. In particular: |
3910 | |
3911 | struct S { enum E { }; static const int E = 5; int ary[S::E]; } s; |
3912 | |
3913 | is valid. */ |
3914 | |
3915 | if (TREE_CODE (decl) == TYPE_DECL) |
3916 | TYPE_FIELDS (current_class_type) |
3917 | = chainon (TYPE_FIELDS (current_class_type), decl); |
3918 | else |
3919 | { |
3920 | DECL_CHAIN (decl) = TYPE_FIELDS (current_class_type); |
3921 | TYPE_FIELDS (current_class_type) = decl; |
3922 | } |
3923 | |
3924 | maybe_add_class_template_decl_list (current_class_type, decl, |
3925 | /*friend_p=*/0); |
3926 | } |
3927 | } |
3928 | |
3929 | /* Finish processing a complete template declaration. The PARMS are |
3930 | the template parameters. */ |
3931 | |
3932 | void |
3933 | finish_template_decl (tree parms) |
3934 | { |
3935 | if (parms) |
3936 | end_template_decl (); |
3937 | else |
3938 | end_specialization (); |
3939 | } |
3940 | |
3941 | // Returns the template type of the class scope being entered. If we're |
3942 | // entering a constrained class scope. TYPE is the class template |
3943 | // scope being entered and we may need to match the intended type with |
3944 | // a constrained specialization. For example: |
3945 | // |
3946 | // template<Object T> |
3947 | // struct S { void f(); }; #1 |
3948 | // |
3949 | // template<Object T> |
3950 | // void S<T>::f() { } #2 |
3951 | // |
3952 | // We check, in #2, that S<T> refers precisely to the type declared by |
3953 | // #1 (i.e., that the constraints match). Note that the following should |
3954 | // be an error since there is no specialization of S<T> that is |
3955 | // unconstrained, but this is not diagnosed here. |
3956 | // |
3957 | // template<typename T> |
3958 | // void S<T>::f() { } |
3959 | // |
3960 | // We cannot diagnose this problem here since this function also matches |
3961 | // qualified template names that are not part of a definition. For example: |
3962 | // |
3963 | // template<Integral T, Floating_point U> |
3964 | // typename pair<T, U>::first_type void f(T, U); |
3965 | // |
3966 | // Here, it is unlikely that there is a partial specialization of |
3967 | // pair constrained for Integral and Floating_point arguments. |
3968 | // |
3969 | // The general rule is: if a constrained specialization with matching |
3970 | // constraints is found return that type. Also note that if TYPE is not a |
3971 | // class-type (e.g. a typename type), then no fixup is needed. |
3972 | |
3973 | static tree |
3974 | fixup_template_type (tree type) |
3975 | { |
3976 | // Find the template parameter list at the a depth appropriate to |
3977 | // the scope we're trying to enter. |
3978 | tree parms = current_template_parms; |
3979 | int depth = template_class_depth (type); |
3980 | for (int n = current_template_depth; n > depth && parms; --n) |
3981 | parms = TREE_CHAIN (parms); |
3982 | if (!parms) |
3983 | return type; |
3984 | tree cur_reqs = TEMPLATE_PARMS_CONSTRAINTS (parms); |
3985 | tree cur_constr = build_constraints (cur_reqs, NULL_TREE); |
3986 | |
3987 | // Search for a specialization whose type and constraints match. |
3988 | tree tmpl = CLASSTYPE_TI_TEMPLATE (type); |
3989 | tree specs = DECL_TEMPLATE_SPECIALIZATIONS (tmpl); |
3990 | while (specs) |
3991 | { |
3992 | tree spec_constr = get_constraints (TREE_VALUE (specs)); |
3993 | |
3994 | // If the type and constraints match a specialization, then we |
3995 | // are entering that type. |
3996 | if (same_type_p (type, TREE_TYPE (specs)) |
3997 | && equivalent_constraints (cur_constr, spec_constr)) |
3998 | return TREE_TYPE (specs); |
3999 | specs = TREE_CHAIN (specs); |
4000 | } |
4001 | |
4002 | // If no specialization matches, then must return the type |
4003 | // previously found. |
4004 | return type; |
4005 | } |
4006 | |
4007 | /* Finish processing a template-id (which names a type) of the form |
4008 | NAME < ARGS >. Return the TYPE_DECL for the type named by the |
4009 | template-id. If ENTERING_SCOPE is nonzero we are about to enter |
4010 | the scope of template-id indicated. */ |
4011 | |
4012 | tree |
4013 | finish_template_type (tree name, tree args, int entering_scope) |
4014 | { |
4015 | tree type; |
4016 | |
4017 | type = lookup_template_class (name, args, |
4018 | NULL_TREE, NULL_TREE, entering_scope, |
4019 | tf_warning_or_error | tf_user); |
4020 | |
4021 | /* If we might be entering the scope of a partial specialization, |
4022 | find the one with the right constraints. */ |
4023 | if (flag_concepts |
4024 | && entering_scope |
4025 | && CLASS_TYPE_P (type) |
4026 | && CLASSTYPE_TEMPLATE_INFO (type) |
4027 | && dependent_type_p (type) |
4028 | && PRIMARY_TEMPLATE_P (CLASSTYPE_TI_TEMPLATE (type))) |
4029 | type = fixup_template_type (type); |
4030 | |
4031 | if (type == error_mark_node) |
4032 | return type; |
4033 | else if (CLASS_TYPE_P (type) && !alias_type_or_template_p (type)) |
4034 | return TYPE_STUB_DECL (type); |
4035 | else |
4036 | return TYPE_NAME (type); |
4037 | } |
4038 | |
4039 | /* Finish processing a BASE_CLASS with the indicated ACCESS_SPECIFIER. |
4040 | Return a TREE_LIST containing the ACCESS_SPECIFIER and the |
4041 | BASE_CLASS, or NULL_TREE if an error occurred. The |
4042 | ACCESS_SPECIFIER is one of |
4043 | access_{default,public,protected_private}_node. For a virtual base |
4044 | we set TREE_TYPE. */ |
4045 | |
4046 | tree |
4047 | finish_base_specifier (tree base, tree access, bool virtual_p) |
4048 | { |
4049 | tree result; |
4050 | |
4051 | if (base == error_mark_node) |
4052 | { |
4053 | error ("invalid base-class specification" ); |
4054 | result = NULL_TREE; |
4055 | } |
4056 | else if (! MAYBE_CLASS_TYPE_P (base)) |
4057 | { |
4058 | error ("%qT is not a class type" , base); |
4059 | result = NULL_TREE; |
4060 | } |
4061 | else |
4062 | { |
4063 | if (cp_type_quals (base) != 0) |
4064 | { |
4065 | /* DR 484: Can a base-specifier name a cv-qualified |
4066 | class type? */ |
4067 | base = TYPE_MAIN_VARIANT (base); |
4068 | } |
4069 | result = build_tree_list (access, base); |
4070 | if (virtual_p) |
4071 | TREE_TYPE (result) = integer_type_node; |
4072 | } |
4073 | |
4074 | return result; |
4075 | } |
4076 | |
4077 | /* If FNS is a member function, a set of member functions, or a |
4078 | template-id referring to one or more member functions, return a |
4079 | BASELINK for FNS, incorporating the current access context. |
4080 | Otherwise, return FNS unchanged. */ |
4081 | |
4082 | tree |
4083 | baselink_for_fns (tree fns) |
4084 | { |
4085 | tree scope; |
4086 | tree cl; |
4087 | |
4088 | if (BASELINK_P (fns) |
4089 | || error_operand_p (t: fns)) |
4090 | return fns; |
4091 | |
4092 | scope = ovl_scope (fns); |
4093 | if (!CLASS_TYPE_P (scope)) |
4094 | return fns; |
4095 | |
4096 | cl = currently_open_derived_class (scope); |
4097 | if (!cl) |
4098 | cl = scope; |
4099 | tree access_path = TYPE_BINFO (cl); |
4100 | tree conv_path = (cl == scope ? access_path |
4101 | : lookup_base (cl, scope, ba_any, NULL, tf_none)); |
4102 | return build_baselink (conv_path, access_path, fns, /*optype=*/NULL_TREE); |
4103 | } |
4104 | |
4105 | /* Returns true iff DECL is a variable from a function outside |
4106 | the current one. */ |
4107 | |
4108 | static bool |
4109 | outer_var_p (tree decl) |
4110 | { |
4111 | /* These should have been stripped or otherwise handled by the caller. */ |
4112 | gcc_checking_assert (!REFERENCE_REF_P (decl)); |
4113 | |
4114 | return ((VAR_P (decl) || TREE_CODE (decl) == PARM_DECL) |
4115 | && DECL_FUNCTION_SCOPE_P (decl) |
4116 | /* Don't get confused by temporaries. */ |
4117 | && DECL_NAME (decl) |
4118 | && (DECL_CONTEXT (decl) != current_function_decl |
4119 | || parsing_nsdmi ())); |
4120 | } |
4121 | |
4122 | /* As above, but also checks that DECL is automatic. */ |
4123 | |
4124 | bool |
4125 | outer_automatic_var_p (tree decl) |
4126 | { |
4127 | return (outer_var_p (decl) |
4128 | && !TREE_STATIC (decl)); |
4129 | } |
4130 | |
4131 | /* DECL satisfies outer_automatic_var_p. Possibly complain about it or |
4132 | rewrite it for lambda capture. |
4133 | |
4134 | If ODR_USE is true, we're being called from mark_use, and we complain about |
4135 | use of constant variables. If ODR_USE is false, we're being called for the |
4136 | id-expression, and we do lambda capture. */ |
4137 | |
4138 | tree |
4139 | process_outer_var_ref (tree decl, tsubst_flags_t complain, bool odr_use) |
4140 | { |
4141 | if (cp_unevaluated_operand) |
4142 | { |
4143 | tree type = TREE_TYPE (decl); |
4144 | if (!dependent_type_p (type) |
4145 | && variably_modified_type_p (type, NULL_TREE)) |
4146 | /* VLAs are used even in unevaluated context. */; |
4147 | else |
4148 | /* It's not a use (3.2) if we're in an unevaluated context. */ |
4149 | return decl; |
4150 | } |
4151 | if (decl == error_mark_node) |
4152 | return decl; |
4153 | |
4154 | tree context = DECL_CONTEXT (decl); |
4155 | tree containing_function = current_function_decl; |
4156 | tree lambda_stack = NULL_TREE; |
4157 | tree lambda_expr = NULL_TREE; |
4158 | tree initializer = convert_from_reference (decl); |
4159 | |
4160 | /* Mark it as used now even if the use is ill-formed. */ |
4161 | if (!mark_used (decl, complain)) |
4162 | return error_mark_node; |
4163 | |
4164 | if (parsing_nsdmi ()) |
4165 | containing_function = NULL_TREE; |
4166 | |
4167 | if (containing_function && LAMBDA_FUNCTION_P (containing_function)) |
4168 | { |
4169 | /* Check whether we've already built a proxy. */ |
4170 | tree var = decl; |
4171 | while (is_normal_capture_proxy (var)) |
4172 | var = DECL_CAPTURED_VARIABLE (var); |
4173 | tree d = retrieve_local_specialization (var); |
4174 | |
4175 | if (d && d != decl && is_capture_proxy (d)) |
4176 | { |
4177 | if (DECL_CONTEXT (d) == containing_function) |
4178 | /* We already have an inner proxy. */ |
4179 | return d; |
4180 | else |
4181 | /* We need to capture an outer proxy. */ |
4182 | return process_outer_var_ref (decl: d, complain, odr_use); |
4183 | } |
4184 | } |
4185 | |
4186 | /* If we are in a lambda function, we can move out until we hit |
4187 | 1. the context, |
4188 | 2. a non-lambda function, or |
4189 | 3. a non-default capturing lambda function. */ |
4190 | while (context != containing_function |
4191 | /* containing_function can be null with invalid generic lambdas. */ |
4192 | && containing_function |
4193 | && LAMBDA_FUNCTION_P (containing_function)) |
4194 | { |
4195 | tree closure = DECL_CONTEXT (containing_function); |
4196 | lambda_expr = CLASSTYPE_LAMBDA_EXPR (closure); |
4197 | |
4198 | if (TYPE_CLASS_SCOPE_P (closure)) |
4199 | /* A lambda in an NSDMI (c++/64496). */ |
4200 | break; |
4201 | |
4202 | if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) == CPLD_NONE) |
4203 | break; |
4204 | |
4205 | lambda_stack = tree_cons (NULL_TREE, lambda_expr, lambda_stack); |
4206 | |
4207 | containing_function = decl_function_context (containing_function); |
4208 | } |
4209 | |
4210 | /* In a lambda within a template, wait until instantiation time to implicitly |
4211 | capture a parameter pack. We want to wait because we don't know if we're |
4212 | capturing the whole pack or a single element, and it's OK to wait because |
4213 | find_parameter_packs_r walks into the lambda body. */ |
4214 | if (context == containing_function |
4215 | && DECL_PACK_P (decl)) |
4216 | return decl; |
4217 | |
4218 | if (lambda_expr && VAR_P (decl) && DECL_ANON_UNION_VAR_P (decl)) |
4219 | { |
4220 | if (complain & tf_error) |
4221 | error ("cannot capture member %qD of anonymous union" , decl); |
4222 | return error_mark_node; |
4223 | } |
4224 | /* Do lambda capture when processing the id-expression, not when |
4225 | odr-using a variable. */ |
4226 | if (!odr_use && context == containing_function) |
4227 | decl = add_default_capture (lambda_stack, |
4228 | /*id=*/DECL_NAME (decl), initializer); |
4229 | /* Only an odr-use of an outer automatic variable causes an |
4230 | error, and a constant variable can decay to a prvalue |
4231 | constant without odr-use. So don't complain yet. */ |
4232 | else if (!odr_use && decl_constant_var_p (decl)) |
4233 | return decl; |
4234 | else if (lambda_expr) |
4235 | { |
4236 | if (complain & tf_error) |
4237 | { |
4238 | error ("%qD is not captured" , decl); |
4239 | tree closure = LAMBDA_EXPR_CLOSURE (lambda_expr); |
4240 | if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) == CPLD_NONE) |
4241 | inform (location_of (closure), |
4242 | "the lambda has no capture-default" ); |
4243 | else if (TYPE_CLASS_SCOPE_P (closure)) |
4244 | inform (UNKNOWN_LOCATION, "lambda in local class %q+T cannot " |
4245 | "capture variables from the enclosing context" , |
4246 | TYPE_CONTEXT (closure)); |
4247 | inform (DECL_SOURCE_LOCATION (decl), "%q#D declared here" , decl); |
4248 | } |
4249 | return error_mark_node; |
4250 | } |
4251 | else if (processing_contract_condition && (TREE_CODE (decl) == PARM_DECL)) |
4252 | /* Use of a parameter in a contract condition is fine. */ |
4253 | return decl; |
4254 | else |
4255 | { |
4256 | if (complain & tf_error) |
4257 | { |
4258 | error (VAR_P (decl) |
4259 | ? G_("use of local variable with automatic storage from " |
4260 | "containing function" ) |
4261 | : G_("use of parameter from containing function" )); |
4262 | inform (DECL_SOURCE_LOCATION (decl), "%q#D declared here" , decl); |
4263 | } |
4264 | return error_mark_node; |
4265 | } |
4266 | return decl; |
4267 | } |
4268 | |
4269 | /* ID_EXPRESSION is a representation of parsed, but unprocessed, |
4270 | id-expression. (See cp_parser_id_expression for details.) SCOPE, |
4271 | if non-NULL, is the type or namespace used to explicitly qualify |
4272 | ID_EXPRESSION. DECL is the entity to which that name has been |
4273 | resolved. |
4274 | |
4275 | *CONSTANT_EXPRESSION_P is true if we are presently parsing a |
4276 | constant-expression. In that case, *NON_CONSTANT_EXPRESSION_P will |
4277 | be set to true if this expression isn't permitted in a |
4278 | constant-expression, but it is otherwise not set by this function. |
4279 | *ALLOW_NON_CONSTANT_EXPRESSION_P is true if we are parsing a |
4280 | constant-expression, but a non-constant expression is also |
4281 | permissible. |
4282 | |
4283 | DONE is true if this expression is a complete postfix-expression; |
4284 | it is false if this expression is followed by '->', '[', '(', etc. |
4285 | ADDRESS_P is true iff this expression is the operand of '&'. |
4286 | TEMPLATE_P is true iff the qualified-id was of the form |
4287 | "A::template B". TEMPLATE_ARG_P is true iff this qualified name |
4288 | appears as a template argument. |
4289 | |
4290 | If an error occurs, and it is the kind of error that might cause |
4291 | the parser to abort a tentative parse, *ERROR_MSG is filled in. It |
4292 | is the caller's responsibility to issue the message. *ERROR_MSG |
4293 | will be a string with static storage duration, so the caller need |
4294 | not "free" it. |
4295 | |
4296 | Return an expression for the entity, after issuing appropriate |
4297 | diagnostics. This function is also responsible for transforming a |
4298 | reference to a non-static member into a COMPONENT_REF that makes |
4299 | the use of "this" explicit. |
4300 | |
4301 | Upon return, *IDK will be filled in appropriately. */ |
4302 | static cp_expr |
4303 | finish_id_expression_1 (tree id_expression, |
4304 | tree decl, |
4305 | tree scope, |
4306 | cp_id_kind *idk, |
4307 | bool integral_constant_expression_p, |
4308 | bool allow_non_integral_constant_expression_p, |
4309 | bool *non_integral_constant_expression_p, |
4310 | bool template_p, |
4311 | bool done, |
4312 | bool address_p, |
4313 | bool template_arg_p, |
4314 | const char **error_msg, |
4315 | location_t location) |
4316 | { |
4317 | decl = strip_using_decl (decl); |
4318 | |
4319 | /* Initialize the output parameters. */ |
4320 | *idk = CP_ID_KIND_NONE; |
4321 | *error_msg = NULL; |
4322 | |
4323 | if (id_expression == error_mark_node) |
4324 | return error_mark_node; |
4325 | /* If we have a template-id, then no further lookup is |
4326 | required. If the template-id was for a template-class, we |
4327 | will sometimes have a TYPE_DECL at this point. */ |
4328 | else if (TREE_CODE (decl) == TEMPLATE_ID_EXPR |
4329 | || TREE_CODE (decl) == TYPE_DECL) |
4330 | ; |
4331 | /* Look up the name. */ |
4332 | else |
4333 | { |
4334 | if (decl == error_mark_node) |
4335 | { |
4336 | /* Name lookup failed. */ |
4337 | if (scope |
4338 | && (!TYPE_P (scope) |
4339 | || (!dependent_type_p (scope) |
4340 | && !(identifier_p (t: id_expression) |
4341 | && IDENTIFIER_CONV_OP_P (id_expression) |
4342 | && dependent_type_p (TREE_TYPE (id_expression)))))) |
4343 | { |
4344 | /* If the qualifying type is non-dependent (and the name |
4345 | does not name a conversion operator to a dependent |
4346 | type), issue an error. */ |
4347 | qualified_name_lookup_error (scope, id_expression, decl, location); |
4348 | return error_mark_node; |
4349 | } |
4350 | else if (!scope) |
4351 | { |
4352 | /* It may be resolved via Koenig lookup. */ |
4353 | *idk = CP_ID_KIND_UNQUALIFIED; |
4354 | return id_expression; |
4355 | } |
4356 | else |
4357 | decl = id_expression; |
4358 | } |
4359 | |
4360 | /* Remember that the name was used in the definition of |
4361 | the current class so that we can check later to see if |
4362 | the meaning would have been different after the class |
4363 | was entirely defined. */ |
4364 | if (!scope && decl != error_mark_node && identifier_p (t: id_expression)) |
4365 | maybe_note_name_used_in_class (id_expression, decl); |
4366 | |
4367 | /* A use in unevaluated operand might not be instantiated appropriately |
4368 | if tsubst_copy builds a dummy parm, or if we never instantiate a |
4369 | generic lambda, so mark it now. */ |
4370 | if (processing_template_decl && cp_unevaluated_operand) |
4371 | mark_type_use (decl); |
4372 | |
4373 | /* Disallow uses of local variables from containing functions, except |
4374 | within lambda-expressions. */ |
4375 | if (outer_automatic_var_p (decl)) |
4376 | { |
4377 | decl = process_outer_var_ref (decl, complain: tf_warning_or_error); |
4378 | if (decl == error_mark_node) |
4379 | return error_mark_node; |
4380 | } |
4381 | |
4382 | /* Also disallow uses of function parameters outside the function |
4383 | body, except inside an unevaluated context (i.e. decltype). */ |
4384 | if (TREE_CODE (decl) == PARM_DECL |
4385 | && DECL_CONTEXT (decl) == NULL_TREE |
4386 | && !cp_unevaluated_operand |
4387 | && !processing_contract_condition) |
4388 | { |
4389 | *error_msg = G_("use of parameter outside function body" ); |
4390 | return error_mark_node; |
4391 | } |
4392 | } |
4393 | |
4394 | /* If we didn't find anything, or what we found was a type, |
4395 | then this wasn't really an id-expression. */ |
4396 | if (TREE_CODE (decl) == TEMPLATE_DECL |
4397 | && !DECL_FUNCTION_TEMPLATE_P (decl)) |
4398 | { |
4399 | *error_msg = G_("missing template arguments" ); |
4400 | return error_mark_node; |
4401 | } |
4402 | else if (TREE_CODE (decl) == TYPE_DECL |
4403 | || TREE_CODE (decl) == NAMESPACE_DECL) |
4404 | { |
4405 | *error_msg = G_("expected primary-expression" ); |
4406 | return error_mark_node; |
4407 | } |
4408 | |
4409 | /* If the name resolved to a template parameter, there is no |
4410 | need to look it up again later. */ |
4411 | if ((TREE_CODE (decl) == CONST_DECL && DECL_TEMPLATE_PARM_P (decl)) |
4412 | || TREE_CODE (decl) == TEMPLATE_PARM_INDEX) |
4413 | { |
4414 | tree r; |
4415 | |
4416 | *idk = CP_ID_KIND_NONE; |
4417 | if (TREE_CODE (decl) == TEMPLATE_PARM_INDEX) |
4418 | decl = TEMPLATE_PARM_DECL (decl); |
4419 | r = DECL_INITIAL (decl); |
4420 | if (CLASS_TYPE_P (TREE_TYPE (r)) && !CP_TYPE_CONST_P (TREE_TYPE (r))) |
4421 | { |
4422 | /* If the entity is a template parameter object for a template |
4423 | parameter of type T, the type of the expression is const T. */ |
4424 | tree ctype = TREE_TYPE (r); |
4425 | ctype = cp_build_qualified_type (ctype, (cp_type_quals (ctype) |
4426 | | TYPE_QUAL_CONST)); |
4427 | r = build1 (VIEW_CONVERT_EXPR, ctype, r); |
4428 | } |
4429 | r = convert_from_reference (r); |
4430 | if (integral_constant_expression_p |
4431 | && !dependent_type_p (TREE_TYPE (decl)) |
4432 | && !(INTEGRAL_OR_ENUMERATION_TYPE_P (TREE_TYPE (r)))) |
4433 | { |
4434 | if (!allow_non_integral_constant_expression_p) |
4435 | error ("template parameter %qD of type %qT is not allowed in " |
4436 | "an integral constant expression because it is not of " |
4437 | "integral or enumeration type" , decl, TREE_TYPE (decl)); |
4438 | *non_integral_constant_expression_p = true; |
4439 | } |
4440 | return r; |
4441 | } |
4442 | else if (TREE_CODE (decl) == UNBOUND_CLASS_TEMPLATE) |
4443 | { |
4444 | gcc_checking_assert (scope); |
4445 | *idk = CP_ID_KIND_QUALIFIED; |
4446 | cp_warn_deprecated_use_scopes (scope); |
4447 | decl = finish_qualified_id_expr (qualifying_class: scope, expr: decl, done, address_p, |
4448 | template_p, template_arg_p, |
4449 | complain: tf_warning_or_error); |
4450 | } |
4451 | else |
4452 | { |
4453 | if (TREE_CODE (decl) == TEMPLATE_ID_EXPR |
4454 | && variable_template_p (TREE_OPERAND (decl, 0)) |
4455 | && !concept_check_p (t: decl)) |
4456 | /* Try resolving this variable TEMPLATE_ID_EXPR (which is always |
4457 | considered type-dependent) now, so that the dependence test that |
4458 | follows gives us the right answer: if it represents a non-dependent |
4459 | variable template-id then finish_template_variable will yield the |
4460 | corresponding non-dependent VAR_DECL. */ |
4461 | decl = finish_template_variable (decl); |
4462 | |
4463 | bool dependent_p = type_dependent_expression_p (decl); |
4464 | |
4465 | /* If the declaration was explicitly qualified indicate |
4466 | that. The semantics of `A::f(3)' are different than |
4467 | `f(3)' if `f' is virtual. */ |
4468 | *idk = (scope |
4469 | ? CP_ID_KIND_QUALIFIED |
4470 | : (TREE_CODE (decl) == TEMPLATE_ID_EXPR |
4471 | ? CP_ID_KIND_TEMPLATE_ID |
4472 | : (dependent_p |
4473 | ? CP_ID_KIND_UNQUALIFIED_DEPENDENT |
4474 | : CP_ID_KIND_UNQUALIFIED))); |
4475 | |
4476 | if (dependent_p |
4477 | && !scope |
4478 | && DECL_P (decl) |
4479 | && any_dependent_type_attributes_p (DECL_ATTRIBUTES (decl))) |
4480 | /* Dependent type attributes on the decl mean that the TREE_TYPE is |
4481 | wrong, so just return the identifier. */ |
4482 | return id_expression; |
4483 | |
4484 | if (DECL_CLASS_TEMPLATE_P (decl)) |
4485 | { |
4486 | error ("use of class template %qT as expression" , decl); |
4487 | return error_mark_node; |
4488 | } |
4489 | |
4490 | if (TREE_CODE (decl) == TREE_LIST) |
4491 | { |
4492 | /* Ambiguous reference to base members. */ |
4493 | error ("request for member %qD is ambiguous in " |
4494 | "multiple inheritance lattice" , id_expression); |
4495 | print_candidates (decl); |
4496 | return error_mark_node; |
4497 | } |
4498 | |
4499 | /* Mark variable-like entities as used. Functions are similarly |
4500 | marked either below or after overload resolution. */ |
4501 | if ((VAR_P (decl) |
4502 | || TREE_CODE (decl) == PARM_DECL |
4503 | || TREE_CODE (decl) == CONST_DECL |
4504 | || TREE_CODE (decl) == RESULT_DECL) |
4505 | && !mark_used (decl)) |
4506 | return error_mark_node; |
4507 | |
4508 | /* Only certain kinds of names are allowed in constant |
4509 | expression. Template parameters have already |
4510 | been handled above. */ |
4511 | if (! error_operand_p (t: decl) |
4512 | && !dependent_p |
4513 | && integral_constant_expression_p |
4514 | && !decl_constant_var_p (decl) |
4515 | && TREE_CODE (decl) != CONST_DECL |
4516 | && !builtin_valid_in_constant_expr_p (decl) |
4517 | && !concept_check_p (t: decl)) |
4518 | { |
4519 | if (!allow_non_integral_constant_expression_p) |
4520 | { |
4521 | error ("%qD cannot appear in a constant-expression" , decl); |
4522 | return error_mark_node; |
4523 | } |
4524 | *non_integral_constant_expression_p = true; |
4525 | } |
4526 | |
4527 | if (tree wrap = maybe_get_tls_wrapper_call (decl)) |
4528 | /* Replace an evaluated use of the thread_local variable with |
4529 | a call to its wrapper. */ |
4530 | decl = wrap; |
4531 | else if (concept_check_p (t: decl)) |
4532 | { |
4533 | /* Nothing more to do. All of the analysis for concept checks |
4534 | is done by build_conept_id, called from the parser. */ |
4535 | } |
4536 | else if (scope) |
4537 | { |
4538 | if (TREE_CODE (decl) == SCOPE_REF) |
4539 | { |
4540 | gcc_assert (same_type_p (scope, TREE_OPERAND (decl, 0))); |
4541 | decl = TREE_OPERAND (decl, 1); |
4542 | } |
4543 | |
4544 | decl = (adjust_result_of_qualified_name_lookup |
4545 | (decl, scope, current_nonlambda_class_type())); |
4546 | |
4547 | cp_warn_deprecated_use_scopes (scope); |
4548 | |
4549 | if (TYPE_P (scope)) |
4550 | decl = finish_qualified_id_expr (qualifying_class: scope, |
4551 | expr: decl, |
4552 | done, |
4553 | address_p, |
4554 | template_p, |
4555 | template_arg_p, |
4556 | complain: tf_warning_or_error); |
4557 | else |
4558 | decl = convert_from_reference (decl); |
4559 | } |
4560 | else if (TREE_CODE (decl) == FIELD_DECL) |
4561 | { |
4562 | /* Since SCOPE is NULL here, this is an unqualified name. |
4563 | Access checking has been performed during name lookup |
4564 | already. Turn off checking to avoid duplicate errors. */ |
4565 | push_deferring_access_checks (deferring: dk_no_check); |
4566 | decl = finish_non_static_data_member (decl, NULL_TREE, |
4567 | /*qualifying_scope=*/NULL_TREE); |
4568 | pop_deferring_access_checks (); |
4569 | } |
4570 | else if (is_overloaded_fn (decl)) |
4571 | { |
4572 | /* We only need to look at the first function, |
4573 | because all the fns share the attribute we're |
4574 | concerned with (all member fns or all non-members). */ |
4575 | tree first_fn = get_first_fn (decl); |
4576 | first_fn = STRIP_TEMPLATE (first_fn); |
4577 | |
4578 | if (!template_arg_p |
4579 | && (TREE_CODE (first_fn) == USING_DECL |
4580 | || (TREE_CODE (first_fn) == FUNCTION_DECL |
4581 | && DECL_FUNCTION_MEMBER_P (first_fn) |
4582 | && !shared_member_p (decl)))) |
4583 | { |
4584 | /* A set of member functions. */ |
4585 | decl = maybe_dummy_object (DECL_CONTEXT (first_fn), 0); |
4586 | return finish_class_member_access_expr (decl, id_expression, |
4587 | /*template_p=*/false, |
4588 | tf_warning_or_error); |
4589 | } |
4590 | |
4591 | decl = baselink_for_fns (fns: decl); |
4592 | } |
4593 | else |
4594 | { |
4595 | if (DECL_P (decl) && DECL_NONLOCAL (decl) |
4596 | && DECL_CLASS_SCOPE_P (decl)) |
4597 | { |
4598 | tree context = context_for_name_lookup (decl); |
4599 | if (context != current_class_type) |
4600 | { |
4601 | tree path = currently_open_derived_class (context); |
4602 | if (!path) |
4603 | /* PATH can be null for using an enum of an unrelated |
4604 | class; we checked its access in lookup_using_decl. |
4605 | |
4606 | ??? Should this case make a clone instead, like |
4607 | handle_using_decl? */ |
4608 | gcc_assert (TREE_CODE (decl) == CONST_DECL); |
4609 | else |
4610 | perform_or_defer_access_check (TYPE_BINFO (path), |
4611 | decl, diag_decl: decl, |
4612 | complain: tf_warning_or_error); |
4613 | } |
4614 | } |
4615 | |
4616 | decl = convert_from_reference (decl); |
4617 | } |
4618 | } |
4619 | |
4620 | return cp_expr (decl, location); |
4621 | } |
4622 | |
4623 | /* As per finish_id_expression_1, but adding a wrapper node |
4624 | around the result if needed to express LOCATION. */ |
4625 | |
4626 | cp_expr |
4627 | finish_id_expression (tree id_expression, |
4628 | tree decl, |
4629 | tree scope, |
4630 | cp_id_kind *idk, |
4631 | bool integral_constant_expression_p, |
4632 | bool allow_non_integral_constant_expression_p, |
4633 | bool *non_integral_constant_expression_p, |
4634 | bool template_p, |
4635 | bool done, |
4636 | bool address_p, |
4637 | bool template_arg_p, |
4638 | const char **error_msg, |
4639 | location_t location) |
4640 | { |
4641 | cp_expr result |
4642 | = finish_id_expression_1 (id_expression, decl, scope, idk, |
4643 | integral_constant_expression_p, |
4644 | allow_non_integral_constant_expression_p, |
4645 | non_integral_constant_expression_p, |
4646 | template_p, done, address_p, template_arg_p, |
4647 | error_msg, location); |
4648 | return result.maybe_add_location_wrapper (); |
4649 | } |
4650 | |
4651 | /* Implement the __typeof keyword: Return the type of EXPR, suitable for |
4652 | use as a type-specifier. */ |
4653 | |
4654 | tree |
4655 | finish_typeof (tree expr) |
4656 | { |
4657 | tree type; |
4658 | |
4659 | if (type_dependent_expression_p (expr)) |
4660 | { |
4661 | type = cxx_make_type (TYPEOF_TYPE); |
4662 | TYPEOF_TYPE_EXPR (type) = expr; |
4663 | SET_TYPE_STRUCTURAL_EQUALITY (type); |
4664 | |
4665 | return type; |
4666 | } |
4667 | |
4668 | expr = mark_type_use (expr); |
4669 | |
4670 | type = unlowered_expr_type (expr); |
4671 | |
4672 | if (!type || type == unknown_type_node) |
4673 | { |
4674 | error ("type of %qE is unknown" , expr); |
4675 | return error_mark_node; |
4676 | } |
4677 | |
4678 | return type; |
4679 | } |
4680 | |
4681 | /* Implement the __underlying_type keyword: Return the underlying |
4682 | type of TYPE, suitable for use as a type-specifier. */ |
4683 | |
4684 | tree |
4685 | finish_underlying_type (tree type) |
4686 | { |
4687 | if (!complete_type_or_else (type, NULL_TREE)) |
4688 | return error_mark_node; |
4689 | |
4690 | if (TREE_CODE (type) != ENUMERAL_TYPE) |
4691 | { |
4692 | error ("%qT is not an enumeration type" , type); |
4693 | return error_mark_node; |
4694 | } |
4695 | |
4696 | tree underlying_type = ENUM_UNDERLYING_TYPE (type); |
4697 | |
4698 | /* Fixup necessary in this case because ENUM_UNDERLYING_TYPE |
4699 | includes TYPE_MIN_VALUE and TYPE_MAX_VALUE information. |
4700 | See finish_enum_value_list for details. */ |
4701 | if (!ENUM_FIXED_UNDERLYING_TYPE_P (type)) |
4702 | underlying_type |
4703 | = c_common_type_for_mode (TYPE_MODE (underlying_type), |
4704 | TYPE_UNSIGNED (underlying_type)); |
4705 | |
4706 | return underlying_type; |
4707 | } |
4708 | |
4709 | /* Implement the __type_pack_element keyword: Return the type |
4710 | at index IDX within TYPES. */ |
4711 | |
4712 | static tree |
4713 | finish_type_pack_element (tree idx, tree types, tsubst_flags_t complain) |
4714 | { |
4715 | idx = maybe_constant_value (idx); |
4716 | if (TREE_CODE (idx) != INTEGER_CST || !INTEGRAL_TYPE_P (TREE_TYPE (idx))) |
4717 | { |
4718 | if (complain & tf_error) |
4719 | error ("%<__type_pack_element%> index is not an integral constant" ); |
4720 | return error_mark_node; |
4721 | } |
4722 | if (tree_int_cst_sgn (idx) < 0) |
4723 | { |
4724 | if (complain & tf_error) |
4725 | error ("%<__type_pack_element%> index is negative" ); |
4726 | return error_mark_node; |
4727 | } |
4728 | if (wi::to_widest (t: idx) >= TREE_VEC_LENGTH (types)) |
4729 | { |
4730 | if (complain & tf_error) |
4731 | error ("%<__type_pack_element%> index is out of range" ); |
4732 | return error_mark_node; |
4733 | } |
4734 | return TREE_VEC_ELT (types, tree_to_shwi (idx)); |
4735 | } |
4736 | |
4737 | /* Implement the __direct_bases keyword: Return the direct base classes |
4738 | of type. */ |
4739 | |
4740 | tree |
4741 | calculate_direct_bases (tree type, tsubst_flags_t complain) |
4742 | { |
4743 | if (!complete_type_or_maybe_complain (type, NULL_TREE, complain) |
4744 | || !NON_UNION_CLASS_TYPE_P (type)) |
4745 | return make_tree_vec (0); |
4746 | |
4747 | releasing_vec vector; |
4748 | vec<tree, va_gc> *base_binfos = BINFO_BASE_BINFOS (TYPE_BINFO (type)); |
4749 | tree binfo; |
4750 | unsigned i; |
4751 | |
4752 | /* Virtual bases are initialized first */ |
4753 | for (i = 0; base_binfos->iterate (ix: i, ptr: &binfo); i++) |
4754 | if (BINFO_VIRTUAL_P (binfo)) |
4755 | vec_safe_push (r&: vector, t: binfo); |
4756 | |
4757 | /* Now non-virtuals */ |
4758 | for (i = 0; base_binfos->iterate (ix: i, ptr: &binfo); i++) |
4759 | if (!BINFO_VIRTUAL_P (binfo)) |
4760 | vec_safe_push (r&: vector, t: binfo); |
4761 | |
4762 | tree bases_vec = make_tree_vec (vector->length ()); |
4763 | |
4764 | for (i = 0; i < vector->length (); ++i) |
4765 | TREE_VEC_ELT (bases_vec, i) = BINFO_TYPE ((*vector)[i]); |
4766 | |
4767 | return bases_vec; |
4768 | } |
4769 | |
4770 | /* Implement the __bases keyword: Return the base classes |
4771 | of type */ |
4772 | |
4773 | /* Find morally non-virtual base classes by walking binfo hierarchy */ |
4774 | /* Virtual base classes are handled separately in finish_bases */ |
4775 | |
4776 | static tree |
4777 | dfs_calculate_bases_pre (tree binfo, void * /*data_*/) |
4778 | { |
4779 | /* Don't walk bases of virtual bases */ |
4780 | return BINFO_VIRTUAL_P (binfo) ? dfs_skip_bases : NULL_TREE; |
4781 | } |
4782 | |
4783 | static tree |
4784 | dfs_calculate_bases_post (tree binfo, void *data_) |
4785 | { |
4786 | vec<tree, va_gc> **data = ((vec<tree, va_gc> **) data_); |
4787 | if (!BINFO_VIRTUAL_P (binfo)) |
4788 | vec_safe_push (v&: *data, BINFO_TYPE (binfo)); |
4789 | return NULL_TREE; |
4790 | } |
4791 | |
4792 | /* Calculates the morally non-virtual base classes of a class */ |
4793 | static vec<tree, va_gc> * |
4794 | calculate_bases_helper (tree type) |
4795 | { |
4796 | vec<tree, va_gc> *vector = make_tree_vector (); |
4797 | |
4798 | /* Now add non-virtual base classes in order of construction */ |
4799 | if (TYPE_BINFO (type)) |
4800 | dfs_walk_all (TYPE_BINFO (type), |
4801 | dfs_calculate_bases_pre, dfs_calculate_bases_post, &vector); |
4802 | return vector; |
4803 | } |
4804 | |
4805 | tree |
4806 | calculate_bases (tree type, tsubst_flags_t complain) |
4807 | { |
4808 | if (!complete_type_or_maybe_complain (type, NULL_TREE, complain) |
4809 | || !NON_UNION_CLASS_TYPE_P (type)) |
4810 | return make_tree_vec (0); |
4811 | |
4812 | releasing_vec vector; |
4813 | tree bases_vec = NULL_TREE; |
4814 | unsigned i; |
4815 | vec<tree, va_gc> *vbases; |
4816 | tree binfo; |
4817 | |
4818 | /* First go through virtual base classes */ |
4819 | for (vbases = CLASSTYPE_VBASECLASSES (type), i = 0; |
4820 | vec_safe_iterate (v: vbases, ix: i, ptr: &binfo); i++) |
4821 | { |
4822 | releasing_vec vbase_bases |
4823 | = calculate_bases_helper (BINFO_TYPE (binfo)); |
4824 | vec_safe_splice (r&: vector, p: vbase_bases); |
4825 | } |
4826 | |
4827 | /* Now for the non-virtual bases */ |
4828 | releasing_vec nonvbases = calculate_bases_helper (type); |
4829 | vec_safe_splice (r&: vector, p: nonvbases); |
4830 | |
4831 | /* Note that during error recovery vector->length can even be zero. */ |
4832 | if (vector->length () > 1) |
4833 | { |
4834 | /* Last element is entire class, so don't copy */ |
4835 | bases_vec = make_tree_vec (vector->length () - 1); |
4836 | |
4837 | for (i = 0; i < vector->length () - 1; ++i) |
4838 | TREE_VEC_ELT (bases_vec, i) = (*vector)[i]; |
4839 | } |
4840 | else |
4841 | bases_vec = make_tree_vec (0); |
4842 | |
4843 | return bases_vec; |
4844 | } |
4845 | |
4846 | tree |
4847 | finish_bases (tree type, bool direct) |
4848 | { |
4849 | tree bases = NULL_TREE; |
4850 | |
4851 | if (!processing_template_decl) |
4852 | { |
4853 | /* Parameter packs can only be used in templates */ |
4854 | error ("parameter pack %<__bases%> only valid in template declaration" ); |
4855 | return error_mark_node; |
4856 | } |
4857 | |
4858 | bases = cxx_make_type (BASES); |
4859 | BASES_TYPE (bases) = type; |
4860 | BASES_DIRECT (bases) = direct; |
4861 | SET_TYPE_STRUCTURAL_EQUALITY (bases); |
4862 | |
4863 | return bases; |
4864 | } |
4865 | |
4866 | /* Perform C++-specific checks for __builtin_offsetof before calling |
4867 | fold_offsetof. */ |
4868 | |
4869 | tree |
4870 | finish_offsetof (tree object_ptr, tree expr, location_t loc) |
4871 | { |
4872 | /* If we're processing a template, we can't finish the semantics yet. |
4873 | Otherwise we can fold the entire expression now. */ |
4874 | if (processing_template_decl) |
4875 | { |
4876 | expr = build2 (OFFSETOF_EXPR, size_type_node, expr, object_ptr); |
4877 | SET_EXPR_LOCATION (expr, loc); |
4878 | return expr; |
4879 | } |
4880 | |
4881 | if (expr == error_mark_node) |
4882 | return error_mark_node; |
4883 | |
4884 | if (TREE_CODE (expr) == PSEUDO_DTOR_EXPR) |
4885 | { |
4886 | error ("cannot apply %<offsetof%> to destructor %<~%T%>" , |
4887 | TREE_OPERAND (expr, 2)); |
4888 | return error_mark_node; |
4889 | } |
4890 | if (FUNC_OR_METHOD_TYPE_P (TREE_TYPE (expr)) |
4891 | || TREE_TYPE (expr) == unknown_type_node) |
4892 | { |
4893 | while (TREE_CODE (expr) == COMPONENT_REF |
4894 | || TREE_CODE (expr) == COMPOUND_EXPR) |
4895 | expr = TREE_OPERAND (expr, 1); |
4896 | |
4897 | if (DECL_P (expr)) |
4898 | { |
4899 | error ("cannot apply %<offsetof%> to member function %qD" , expr); |
4900 | inform (DECL_SOURCE_LOCATION (expr), "declared here" ); |
4901 | } |
4902 | else |
4903 | error ("cannot apply %<offsetof%> to member function" ); |
4904 | return error_mark_node; |
4905 | } |
4906 | if (TREE_CODE (expr) == CONST_DECL) |
4907 | { |
4908 | error ("cannot apply %<offsetof%> to an enumerator %qD" , expr); |
4909 | return error_mark_node; |
4910 | } |
4911 | if (REFERENCE_REF_P (expr)) |
4912 | expr = TREE_OPERAND (expr, 0); |
4913 | if (!complete_type_or_else (TREE_TYPE (TREE_TYPE (object_ptr)), object_ptr)) |
4914 | return error_mark_node; |
4915 | if (warn_invalid_offsetof |
4916 | && CLASS_TYPE_P (TREE_TYPE (TREE_TYPE (object_ptr))) |
4917 | && CLASSTYPE_NON_STD_LAYOUT (TREE_TYPE (TREE_TYPE (object_ptr))) |
4918 | && cp_unevaluated_operand == 0) |
4919 | warning_at (loc, OPT_Winvalid_offsetof, "%<offsetof%> within " |
4920 | "non-standard-layout type %qT is conditionally-supported" , |
4921 | TREE_TYPE (TREE_TYPE (object_ptr))); |
4922 | return fold_offsetof (expr); |
4923 | } |
4924 | |
4925 | /* Replace the AGGR_INIT_EXPR at *TP with an equivalent CALL_EXPR. This |
4926 | function is broken out from the above for the benefit of the tree-ssa |
4927 | project. */ |
4928 | |
4929 | void |
4930 | simplify_aggr_init_expr (tree *tp) |
4931 | { |
4932 | tree aggr_init_expr = *tp; |
4933 | |
4934 | /* Form an appropriate CALL_EXPR. */ |
4935 | tree fn = AGGR_INIT_EXPR_FN (aggr_init_expr); |
4936 | tree slot = AGGR_INIT_EXPR_SLOT (aggr_init_expr); |
4937 | tree type = TREE_TYPE (slot); |
4938 | |
4939 | tree call_expr; |
4940 | enum style_t { ctor, arg, pcc } style; |
4941 | |
4942 | if (AGGR_INIT_VIA_CTOR_P (aggr_init_expr)) |
4943 | style = ctor; |
4944 | #ifdef PCC_STATIC_STRUCT_RETURN |
4945 | else if (1) |
4946 | style = pcc; |
4947 | #endif |
4948 | else |
4949 | { |
4950 | gcc_assert (TREE_ADDRESSABLE (type)); |
4951 | style = arg; |
4952 | } |
4953 | |
4954 | call_expr = build_call_array_loc (input_location, |
4955 | TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))), |
4956 | fn, |
4957 | aggr_init_expr_nargs (aggr_init_expr), |
4958 | AGGR_INIT_EXPR_ARGP (aggr_init_expr)); |
4959 | TREE_NOTHROW (call_expr) = TREE_NOTHROW (aggr_init_expr); |
4960 | CALL_FROM_THUNK_P (call_expr) = AGGR_INIT_FROM_THUNK_P (aggr_init_expr); |
4961 | CALL_EXPR_OPERATOR_SYNTAX (call_expr) |
4962 | = CALL_EXPR_OPERATOR_SYNTAX (aggr_init_expr); |
4963 | CALL_EXPR_ORDERED_ARGS (call_expr) = CALL_EXPR_ORDERED_ARGS (aggr_init_expr); |
4964 | CALL_EXPR_REVERSE_ARGS (call_expr) = CALL_EXPR_REVERSE_ARGS (aggr_init_expr); |
4965 | |
4966 | if (style == ctor) |
4967 | { |
4968 | /* Replace the first argument to the ctor with the address of the |
4969 | slot. */ |
4970 | cxx_mark_addressable (slot); |
4971 | CALL_EXPR_ARG (call_expr, 0) = |
4972 | build1 (ADDR_EXPR, build_pointer_type (type), slot); |
4973 | } |
4974 | else if (style == arg) |
4975 | { |
4976 | /* Just mark it addressable here, and leave the rest to |
4977 | expand_call{,_inline}. */ |
4978 | cxx_mark_addressable (slot); |
4979 | CALL_EXPR_RETURN_SLOT_OPT (call_expr) = true; |
4980 | call_expr = cp_build_init_expr (t: slot, i: call_expr); |
4981 | } |
4982 | else if (style == pcc) |
4983 | { |
4984 | /* If we're using the non-reentrant PCC calling convention, then we |
4985 | need to copy the returned value out of the static buffer into the |
4986 | SLOT. */ |
4987 | push_deferring_access_checks (deferring: dk_no_check); |
4988 | call_expr = build_aggr_init (slot, call_expr, |
4989 | DIRECT_BIND | LOOKUP_ONLYCONVERTING, |
4990 | tf_warning_or_error); |
4991 | pop_deferring_access_checks (); |
4992 | call_expr = build2 (COMPOUND_EXPR, TREE_TYPE (slot), call_expr, slot); |
4993 | } |
4994 | |
4995 | if (AGGR_INIT_ZERO_FIRST (aggr_init_expr)) |
4996 | { |
4997 | tree init = build_zero_init (type, NULL_TREE, |
4998 | /*static_storage_p=*/false); |
4999 | init = cp_build_init_expr (t: slot, i: init); |
5000 | call_expr = build2 (COMPOUND_EXPR, TREE_TYPE (call_expr), |
5001 | init, call_expr); |
5002 | } |
5003 | |
5004 | *tp = call_expr; |
5005 | } |
5006 | |
5007 | /* Emit all thunks to FN that should be emitted when FN is emitted. */ |
5008 | |
5009 | void |
5010 | emit_associated_thunks (tree fn) |
5011 | { |
5012 | /* When we use vcall offsets, we emit thunks with the virtual |
5013 | functions to which they thunk. The whole point of vcall offsets |
5014 | is so that you can know statically the entire set of thunks that |
5015 | will ever be needed for a given virtual function, thereby |
5016 | enabling you to output all the thunks with the function itself. */ |
5017 | if (DECL_VIRTUAL_P (fn) |
5018 | /* Do not emit thunks for extern template instantiations. */ |
5019 | && ! DECL_REALLY_EXTERN (fn)) |
5020 | { |
5021 | tree thunk; |
5022 | |
5023 | for (thunk = DECL_THUNKS (fn); thunk; thunk = DECL_CHAIN (thunk)) |
5024 | { |
5025 | if (!THUNK_ALIAS (thunk)) |
5026 | { |
5027 | use_thunk (thunk, /*emit_p=*/1); |
5028 | if (DECL_RESULT_THUNK_P (thunk)) |
5029 | { |
5030 | tree probe; |
5031 | |
5032 | for (probe = DECL_THUNKS (thunk); |
5033 | probe; probe = DECL_CHAIN (probe)) |
5034 | use_thunk (probe, /*emit_p=*/1); |
5035 | } |
5036 | } |
5037 | else |
5038 | gcc_assert (!DECL_THUNKS (thunk)); |
5039 | } |
5040 | } |
5041 | } |
5042 | |
5043 | /* Generate RTL for FN. */ |
5044 | |
5045 | bool |
5046 | expand_or_defer_fn_1 (tree fn) |
5047 | { |
5048 | /* When the parser calls us after finishing the body of a template |
5049 | function, we don't really want to expand the body. */ |
5050 | if (processing_template_decl) |
5051 | { |
5052 | /* Normally, collection only occurs in rest_of_compilation. So, |
5053 | if we don't collect here, we never collect junk generated |
5054 | during the processing of templates until we hit a |
5055 | non-template function. It's not safe to do this inside a |
5056 | nested class, though, as the parser may have local state that |
5057 | is not a GC root. */ |
5058 | if (!function_depth) |
5059 | ggc_collect (); |
5060 | return false; |
5061 | } |
5062 | |
5063 | gcc_assert (DECL_SAVED_TREE (fn)); |
5064 | |
5065 | /* We make a decision about linkage for these functions at the end |
5066 | of the compilation. Until that point, we do not want the back |
5067 | end to output them -- but we do want it to see the bodies of |
5068 | these functions so that it can inline them as appropriate. */ |
5069 | if (DECL_DECLARED_INLINE_P (fn) || DECL_IMPLICIT_INSTANTIATION (fn)) |
5070 | { |
5071 | if (DECL_INTERFACE_KNOWN (fn)) |
5072 | /* We've already made a decision as to how this function will |
5073 | be handled. */; |
5074 | else if (!at_eof |
5075 | || DECL_IMMEDIATE_FUNCTION_P (fn) |
5076 | || DECL_OMP_DECLARE_REDUCTION_P (fn)) |
5077 | tentative_decl_linkage (fn); |
5078 | else |
5079 | import_export_decl (fn); |
5080 | |
5081 | /* If the user wants us to keep all inline functions, then mark |
5082 | this function as needed so that finish_file will make sure to |
5083 | output it later. Similarly, all dllexport'd functions must |
5084 | be emitted; there may be callers in other DLLs. */ |
5085 | if (DECL_DECLARED_INLINE_P (fn) |
5086 | && !DECL_REALLY_EXTERN (fn) |
5087 | && !DECL_IMMEDIATE_FUNCTION_P (fn) |
5088 | && !DECL_OMP_DECLARE_REDUCTION_P (fn) |
5089 | && (flag_keep_inline_functions |
5090 | || (flag_keep_inline_dllexport |
5091 | && lookup_attribute (attr_name: "dllexport" , DECL_ATTRIBUTES (fn))))) |
5092 | { |
5093 | mark_needed (fn); |
5094 | DECL_EXTERNAL (fn) = 0; |
5095 | } |
5096 | } |
5097 | |
5098 | /* If this is a constructor or destructor body, we have to clone |
5099 | it. */ |
5100 | if (maybe_clone_body (fn)) |
5101 | { |
5102 | /* We don't want to process FN again, so pretend we've written |
5103 | it out, even though we haven't. */ |
5104 | TREE_ASM_WRITTEN (fn) = 1; |
5105 | /* If this is a constexpr function we still need the body to be |
5106 | able to evaluate it. Similarly, with modules we only stream |
5107 | the maybe-in-charge cdtor and regenerate the clones from it on |
5108 | demand, so we also need to keep the body. Otherwise we don't |
5109 | need it anymore. */ |
5110 | if (!DECL_DECLARED_CONSTEXPR_P (fn) |
5111 | && !(modules_p () && vague_linkage_p (fn))) |
5112 | DECL_SAVED_TREE (fn) = NULL_TREE; |
5113 | return false; |
5114 | } |
5115 | |
5116 | /* There's no reason to do any of the work here if we're only doing |
5117 | semantic analysis; this code just generates RTL. */ |
5118 | if (flag_syntax_only) |
5119 | { |
5120 | /* Pretend that this function has been written out so that we don't try |
5121 | to expand it again. */ |
5122 | TREE_ASM_WRITTEN (fn) = 1; |
5123 | return false; |
5124 | } |
5125 | |
5126 | if (DECL_OMP_DECLARE_REDUCTION_P (fn)) |
5127 | return false; |
5128 | |
5129 | return true; |
5130 | } |
5131 | |
5132 | void |
5133 | expand_or_defer_fn (tree fn) |
5134 | { |
5135 | if (expand_or_defer_fn_1 (fn)) |
5136 | { |
5137 | function_depth++; |
5138 | |
5139 | /* Expand or defer, at the whim of the compilation unit manager. */ |
5140 | cgraph_node::finalize_function (fn, function_depth > 1); |
5141 | emit_associated_thunks (fn); |
5142 | |
5143 | function_depth--; |
5144 | |
5145 | if (DECL_IMMEDIATE_FUNCTION_P (fn)) |
5146 | { |
5147 | if (cgraph_node *node = cgraph_node::get (decl: fn)) |
5148 | { |
5149 | node->body_removed = true; |
5150 | node->analyzed = false; |
5151 | node->definition = false; |
5152 | node->force_output = false; |
5153 | } |
5154 | } |
5155 | } |
5156 | } |
5157 | |
5158 | class nrv_data |
5159 | { |
5160 | public: |
5161 | nrv_data () : visited (37) {} |
5162 | |
5163 | tree var; |
5164 | tree result; |
5165 | hash_set<tree> visited; |
5166 | bool simple; |
5167 | bool in_nrv_cleanup; |
5168 | }; |
5169 | |
5170 | /* Helper function for walk_tree, used by finalize_nrv below. */ |
5171 | |
5172 | static tree |
5173 | finalize_nrv_r (tree* tp, int* walk_subtrees, void* data) |
5174 | { |
5175 | class nrv_data *dp = (class nrv_data *)data; |
5176 | |
5177 | /* No need to walk into types. There wouldn't be any need to walk into |
5178 | non-statements, except that we have to consider STMT_EXPRs. */ |
5179 | if (TYPE_P (*tp)) |
5180 | *walk_subtrees = 0; |
5181 | |
5182 | /* Replace all uses of the NRV with the RESULT_DECL. */ |
5183 | else if (*tp == dp->var) |
5184 | *tp = dp->result; |
5185 | |
5186 | /* Avoid walking into the same tree more than once. Unfortunately, we |
5187 | can't just use walk_tree_without duplicates because it would only call |
5188 | us for the first occurrence of dp->var in the function body. */ |
5189 | else if (dp->visited.add (k: *tp)) |
5190 | *walk_subtrees = 0; |
5191 | |
5192 | /* If there's a label, we might need to destroy the NRV on goto (92407). */ |
5193 | else if (TREE_CODE (*tp) == LABEL_EXPR && !dp->in_nrv_cleanup) |
5194 | dp->simple = false; |
5195 | /* Change NRV returns to just refer to the RESULT_DECL; this is a nop, |
5196 | but differs from using NULL_TREE in that it indicates that we care |
5197 | about the value of the RESULT_DECL. But preserve anything appended |
5198 | by check_return_expr. */ |
5199 | else if (TREE_CODE (*tp) == RETURN_EXPR) |
5200 | { |
5201 | tree *p = &TREE_OPERAND (*tp, 0); |
5202 | while (TREE_CODE (*p) == COMPOUND_EXPR) |
5203 | p = &TREE_OPERAND (*p, 0); |
5204 | if (TREE_CODE (*p) == INIT_EXPR |
5205 | && INIT_EXPR_NRV_P (*p)) |
5206 | *p = dp->result; |
5207 | } |
5208 | /* Change all cleanups for the NRV to only run when not returning. */ |
5209 | else if (TREE_CODE (*tp) == CLEANUP_STMT |
5210 | && CLEANUP_DECL (*tp) == dp->var) |
5211 | { |
5212 | dp->in_nrv_cleanup = true; |
5213 | cp_walk_tree (&CLEANUP_BODY (*tp), finalize_nrv_r, data, 0); |
5214 | dp->in_nrv_cleanup = false; |
5215 | cp_walk_tree (&CLEANUP_EXPR (*tp), finalize_nrv_r, data, 0); |
5216 | *walk_subtrees = 0; |
5217 | |
5218 | if (dp->simple) |
5219 | /* For a simple NRV, just run it on the EH path. */ |
5220 | CLEANUP_EH_ONLY (*tp) = true; |
5221 | else |
5222 | { |
5223 | /* Not simple, we need to check current_retval_sentinel to decide |
5224 | whether to run it. If it's set, we're returning normally and |
5225 | don't want to destroy the NRV. If the sentinel is not set, we're |
5226 | leaving scope some other way, either by flowing off the end of its |
5227 | scope or throwing an exception. */ |
5228 | tree cond = build3 (COND_EXPR, void_type_node, |
5229 | current_retval_sentinel, |
5230 | void_node, CLEANUP_EXPR (*tp)); |
5231 | CLEANUP_EXPR (*tp) = cond; |
5232 | } |
5233 | |
5234 | /* If a cleanup might throw, we need to clear current_retval_sentinel on |
5235 | the exception path, both so the check above succeeds and so an outer |
5236 | cleanup added by maybe_splice_retval_cleanup doesn't run. */ |
5237 | if (cp_function_chain->throwing_cleanup) |
5238 | { |
5239 | tree clear = build2 (MODIFY_EXPR, boolean_type_node, |
5240 | current_retval_sentinel, |
5241 | boolean_false_node); |
5242 | if (dp->simple) |
5243 | { |
5244 | /* We're already only on the EH path, just prepend it. */ |
5245 | tree &exp = CLEANUP_EXPR (*tp); |
5246 | exp = build2 (COMPOUND_EXPR, void_type_node, clear, exp); |
5247 | } |
5248 | else |
5249 | { |
5250 | /* The cleanup runs on both normal and EH paths, we need another |
5251 | CLEANUP_STMT to clear the flag only on the EH path. */ |
5252 | tree &bod = CLEANUP_BODY (*tp); |
5253 | bod = build_stmt (EXPR_LOCATION (*tp), CLEANUP_STMT, |
5254 | bod, clear, current_retval_sentinel); |
5255 | CLEANUP_EH_ONLY (bod) = true; |
5256 | } |
5257 | } |
5258 | } |
5259 | /* Disable maybe_splice_retval_cleanup within the NRV cleanup scope, we don't |
5260 | want to destroy the retval before the variable goes out of scope. */ |
5261 | else if (TREE_CODE (*tp) == CLEANUP_STMT |
5262 | && dp->in_nrv_cleanup |
5263 | && CLEANUP_DECL (*tp) == dp->result) |
5264 | CLEANUP_EXPR (*tp) = void_node; |
5265 | /* Replace the DECL_EXPR for the NRV with an initialization of the |
5266 | RESULT_DECL, if needed. */ |
5267 | else if (TREE_CODE (*tp) == DECL_EXPR |
5268 | && DECL_EXPR_DECL (*tp) == dp->var) |
5269 | { |
5270 | tree init; |
5271 | if (DECL_INITIAL (dp->var) |
5272 | && DECL_INITIAL (dp->var) != error_mark_node) |
5273 | init = cp_build_init_expr (t: dp->result, |
5274 | DECL_INITIAL (dp->var)); |
5275 | else |
5276 | init = build_empty_stmt (EXPR_LOCATION (*tp)); |
5277 | DECL_INITIAL (dp->var) = NULL_TREE; |
5278 | SET_EXPR_LOCATION (init, EXPR_LOCATION (*tp)); |
5279 | *tp = init; |
5280 | } |
5281 | |
5282 | /* Keep iterating. */ |
5283 | return NULL_TREE; |
5284 | } |
5285 | |
5286 | /* Called from finish_function to implement the named return value |
5287 | optimization by overriding all the RETURN_EXPRs and pertinent |
5288 | CLEANUP_STMTs and replacing all occurrences of VAR with RESULT, the |
5289 | RESULT_DECL for the function. */ |
5290 | |
5291 | void |
5292 | finalize_nrv (tree fndecl, tree var) |
5293 | { |
5294 | class nrv_data data; |
5295 | tree result = DECL_RESULT (fndecl); |
5296 | |
5297 | /* Copy name from VAR to RESULT. */ |
5298 | DECL_NAME (result) = DECL_NAME (var); |
5299 | /* Don't forget that we take its address. */ |
5300 | TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (var); |
5301 | /* Finally set DECL_VALUE_EXPR to avoid assigning |
5302 | a stack slot at -O0 for the original var and debug info |
5303 | uses RESULT location for VAR. */ |
5304 | SET_DECL_VALUE_EXPR (var, result); |
5305 | DECL_HAS_VALUE_EXPR_P (var) = 1; |
5306 | |
5307 | data.var = var; |
5308 | data.result = result; |
5309 | data.in_nrv_cleanup = false; |
5310 | |
5311 | /* This is simpler for variables declared in the outer scope of |
5312 | the function so we know that their lifetime always ends with a |
5313 | return; see g++.dg/opt/nrv6.C. */ |
5314 | tree outer = outer_curly_brace_block (fndecl); |
5315 | data.simple = chain_member (var, BLOCK_VARS (outer)); |
5316 | |
5317 | cp_walk_tree (&DECL_SAVED_TREE (fndecl), finalize_nrv_r, &data, 0); |
5318 | } |
5319 | |
5320 | /* Create CP_OMP_CLAUSE_INFO for clause C. Returns true if it is invalid. */ |
5321 | |
5322 | bool |
5323 | cxx_omp_create_clause_info (tree c, tree type, bool need_default_ctor, |
5324 | bool need_copy_ctor, bool need_copy_assignment, |
5325 | bool need_dtor) |
5326 | { |
5327 | int save_errorcount = errorcount; |
5328 | tree info, t; |
5329 | |
5330 | /* Always allocate 3 elements for simplicity. These are the |
5331 | function decls for the ctor, dtor, and assignment op. |
5332 | This layout is known to the three lang hooks, |
5333 | cxx_omp_clause_default_init, cxx_omp_clause_copy_init, |
5334 | and cxx_omp_clause_assign_op. */ |
5335 | info = make_tree_vec (3); |
5336 | CP_OMP_CLAUSE_INFO (c) = info; |
5337 | |
5338 | if (need_default_ctor || need_copy_ctor) |
5339 | { |
5340 | if (need_default_ctor) |
5341 | t = get_default_ctor (type); |
5342 | else |
5343 | t = get_copy_ctor (type, tf_warning_or_error); |
5344 | |
5345 | if (t && !trivial_fn_p (t)) |
5346 | TREE_VEC_ELT (info, 0) = t; |
5347 | } |
5348 | |
5349 | if (need_dtor && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type)) |
5350 | TREE_VEC_ELT (info, 1) = get_dtor (type, tf_warning_or_error); |
5351 | |
5352 | if (need_copy_assignment) |
5353 | { |
5354 | t = get_copy_assign (type); |
5355 | |
5356 | if (t && !trivial_fn_p (t)) |
5357 | TREE_VEC_ELT (info, 2) = t; |
5358 | } |
5359 | |
5360 | return errorcount != save_errorcount; |
5361 | } |
5362 | |
5363 | /* If DECL is DECL_OMP_PRIVATIZED_MEMBER, return corresponding |
5364 | FIELD_DECL, otherwise return DECL itself. */ |
5365 | |
5366 | static tree |
5367 | omp_clause_decl_field (tree decl) |
5368 | { |
5369 | if (VAR_P (decl) |
5370 | && DECL_HAS_VALUE_EXPR_P (decl) |
5371 | && DECL_ARTIFICIAL (decl) |
5372 | && DECL_LANG_SPECIFIC (decl) |
5373 | && DECL_OMP_PRIVATIZED_MEMBER (decl)) |
5374 | { |
5375 | tree f = DECL_VALUE_EXPR (decl); |
5376 | if (INDIRECT_REF_P (f)) |
5377 | f = TREE_OPERAND (f, 0); |
5378 | if (TREE_CODE (f) == COMPONENT_REF) |
5379 | { |
5380 | f = TREE_OPERAND (f, 1); |
5381 | gcc_assert (TREE_CODE (f) == FIELD_DECL); |
5382 | return f; |
5383 | } |
5384 | } |
5385 | return NULL_TREE; |
5386 | } |
5387 | |
5388 | /* Adjust DECL if needed for printing using %qE. */ |
5389 | |
5390 | static tree |
5391 | omp_clause_printable_decl (tree decl) |
5392 | { |
5393 | tree t = omp_clause_decl_field (decl); |
5394 | if (t) |
5395 | return t; |
5396 | return decl; |
5397 | } |
5398 | |
5399 | /* For a FIELD_DECL F and corresponding DECL_OMP_PRIVATIZED_MEMBER |
5400 | VAR_DECL T that doesn't need a DECL_EXPR added, record it for |
5401 | privatization. */ |
5402 | |
5403 | static void |
5404 | omp_note_field_privatization (tree f, tree t) |
5405 | { |
5406 | if (!omp_private_member_map) |
5407 | omp_private_member_map = new hash_map<tree, tree>; |
5408 | tree &v = omp_private_member_map->get_or_insert (k: f); |
5409 | if (v == NULL_TREE) |
5410 | { |
5411 | v = t; |
5412 | omp_private_member_vec.safe_push (obj: f); |
5413 | /* Signal that we don't want to create DECL_EXPR for this dummy var. */ |
5414 | omp_private_member_vec.safe_push (integer_zero_node); |
5415 | } |
5416 | } |
5417 | |
5418 | /* Privatize FIELD_DECL T, return corresponding DECL_OMP_PRIVATIZED_MEMBER |
5419 | dummy VAR_DECL. */ |
5420 | |
5421 | tree |
5422 | omp_privatize_field (tree t, bool shared) |
5423 | { |
5424 | tree m = finish_non_static_data_member (decl: t, NULL_TREE, NULL_TREE); |
5425 | if (m == error_mark_node) |
5426 | return error_mark_node; |
5427 | if (!omp_private_member_map && !shared) |
5428 | omp_private_member_map = new hash_map<tree, tree>; |
5429 | if (TYPE_REF_P (TREE_TYPE (t))) |
5430 | { |
5431 | gcc_assert (INDIRECT_REF_P (m)); |
5432 | m = TREE_OPERAND (m, 0); |
5433 | } |
5434 | tree vb = NULL_TREE; |
5435 | tree &v = shared ? vb : omp_private_member_map->get_or_insert (k: t); |
5436 | if (v == NULL_TREE) |
5437 | { |
5438 | v = create_temporary_var (TREE_TYPE (m)); |
5439 | retrofit_lang_decl (v); |
5440 | DECL_OMP_PRIVATIZED_MEMBER (v) = 1; |
5441 | SET_DECL_VALUE_EXPR (v, m); |
5442 | DECL_HAS_VALUE_EXPR_P (v) = 1; |
5443 | if (!shared) |
5444 | omp_private_member_vec.safe_push (obj: t); |
5445 | } |
5446 | return v; |
5447 | } |
5448 | |
5449 | /* C++ specialisation of the c_omp_address_inspector class. */ |
5450 | |
5451 | class cp_omp_address_inspector : public c_omp_address_inspector |
5452 | { |
5453 | public: |
5454 | cp_omp_address_inspector (location_t loc, tree t) |
5455 | : c_omp_address_inspector (loc, t) |
5456 | { |
5457 | } |
5458 | |
5459 | ~cp_omp_address_inspector () |
5460 | { |
5461 | } |
5462 | |
5463 | bool processing_template_decl_p () |
5464 | { |
5465 | return processing_template_decl; |
5466 | } |
5467 | |
5468 | void emit_unmappable_type_notes (tree t) |
5469 | { |
5470 | if (TREE_TYPE (t) != error_mark_node |
5471 | && !COMPLETE_TYPE_P (TREE_TYPE (t))) |
5472 | cxx_incomplete_type_inform (TREE_TYPE (t)); |
5473 | } |
5474 | |
5475 | tree convert_from_reference (tree x) |
5476 | { |
5477 | return ::convert_from_reference (x); |
5478 | } |
5479 | |
5480 | tree build_array_ref (location_t loc, tree arr, tree idx) |
5481 | { |
5482 | return ::build_array_ref (loc, arr, idx); |
5483 | } |
5484 | |
5485 | bool check_clause (tree clause) |
5486 | { |
5487 | if (TREE_CODE (orig) == COMPONENT_REF |
5488 | && invalid_nonstatic_memfn_p (EXPR_LOCATION (orig), orig, |
5489 | tf_warning_or_error)) |
5490 | return false; |
5491 | if (!c_omp_address_inspector::check_clause (clause)) |
5492 | return false; |
5493 | return true; |
5494 | } |
5495 | }; |
5496 | |
5497 | /* Helper function for handle_omp_array_sections. Called recursively |
5498 | to handle multiple array-section-subscripts. C is the clause, |
5499 | T current expression (initially OMP_CLAUSE_DECL), which is either |
5500 | a TREE_LIST for array-section-subscript (TREE_PURPOSE is low-bound |
5501 | expression if specified, TREE_VALUE length expression if specified, |
5502 | TREE_CHAIN is what it has been specified after, or some decl. |
5503 | TYPES vector is populated with array section types, MAYBE_ZERO_LEN |
5504 | set to true if any of the array-section-subscript could have length |
5505 | of zero (explicit or implicit), FIRST_NON_ONE is the index of the |
5506 | first array-section-subscript which is known not to have length |
5507 | of one. Given say: |
5508 | map(a[:b][2:1][:c][:2][:d][e:f][2:5]) |
5509 | FIRST_NON_ONE will be 3, array-section-subscript [:b], [2:1] and [:c] |
5510 | all are or may have length of 1, array-section-subscript [:2] is the |
5511 | first one known not to have length 1. For array-section-subscript |
5512 | <= FIRST_NON_ONE we diagnose non-contiguous arrays if low bound isn't |
5513 | 0 or length isn't the array domain max + 1, for > FIRST_NON_ONE we |
5514 | can if MAYBE_ZERO_LEN is false. MAYBE_ZERO_LEN will be true in the above |
5515 | case though, as some lengths could be zero. */ |
5516 | |
5517 | static tree |
5518 | handle_omp_array_sections_1 (tree c, tree t, vec<tree> &types, |
5519 | bool &maybe_zero_len, unsigned int &first_non_one, |
5520 | enum c_omp_region_type ort) |
5521 | { |
5522 | tree ret, low_bound, length, type; |
5523 | bool openacc = (ort & C_ORT_ACC) != 0; |
5524 | if (TREE_CODE (t) != OMP_ARRAY_SECTION) |
5525 | { |
5526 | if (error_operand_p (t)) |
5527 | return error_mark_node; |
5528 | |
5529 | cp_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
5530 | tree t_refto = ai.maybe_unconvert_ref (t); |
5531 | |
5532 | if (!ai.check_clause (clause: c)) |
5533 | return error_mark_node; |
5534 | else if (ai.component_access_p () |
5535 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
5536 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO |
5537 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM)) |
5538 | t = ai.get_root_term (true); |
5539 | else |
5540 | t = ai.unconverted_ref_origin (); |
5541 | if (t == error_mark_node) |
5542 | return error_mark_node; |
5543 | ret = t_refto; |
5544 | if (TREE_CODE (t) == FIELD_DECL) |
5545 | ret = finish_non_static_data_member (decl: t, NULL_TREE, NULL_TREE); |
5546 | else if (!VAR_P (t) |
5547 | && (openacc || !EXPR_P (t)) |
5548 | && TREE_CODE (t) != PARM_DECL) |
5549 | { |
5550 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
5551 | return NULL_TREE; |
5552 | if (DECL_P (t)) |
5553 | error_at (OMP_CLAUSE_LOCATION (c), |
5554 | "%qD is not a variable in %qs clause" , t, |
5555 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5556 | else |
5557 | error_at (OMP_CLAUSE_LOCATION (c), |
5558 | "%qE is not a variable in %qs clause" , t, |
5559 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5560 | return error_mark_node; |
5561 | } |
5562 | else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
5563 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
5564 | && VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t)) |
5565 | { |
5566 | error_at (OMP_CLAUSE_LOCATION (c), |
5567 | "%qD is threadprivate variable in %qs clause" , t, |
5568 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5569 | return error_mark_node; |
5570 | } |
5571 | if (type_dependent_expression_p (ret)) |
5572 | return NULL_TREE; |
5573 | ret = convert_from_reference (ret); |
5574 | return ret; |
5575 | } |
5576 | |
5577 | if ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP |
5578 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
5579 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
5580 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
5581 | && TREE_CODE (TREE_OPERAND (t, 0)) == FIELD_DECL) |
5582 | TREE_OPERAND (t, 0) = omp_privatize_field (TREE_OPERAND (t, 0), shared: false); |
5583 | ret = handle_omp_array_sections_1 (c, TREE_OPERAND (t, 0), types, |
5584 | maybe_zero_len, first_non_one, ort); |
5585 | if (ret == error_mark_node || ret == NULL_TREE) |
5586 | return ret; |
5587 | |
5588 | type = TREE_TYPE (ret); |
5589 | low_bound = TREE_OPERAND (t, 1); |
5590 | length = TREE_OPERAND (t, 2); |
5591 | if ((low_bound && type_dependent_expression_p (low_bound)) |
5592 | || (length && type_dependent_expression_p (length))) |
5593 | return NULL_TREE; |
5594 | |
5595 | if (low_bound == error_mark_node || length == error_mark_node) |
5596 | return error_mark_node; |
5597 | |
5598 | if (low_bound && !INTEGRAL_TYPE_P (TREE_TYPE (low_bound))) |
5599 | { |
5600 | error_at (OMP_CLAUSE_LOCATION (c), |
5601 | "low bound %qE of array section does not have integral type" , |
5602 | low_bound); |
5603 | return error_mark_node; |
5604 | } |
5605 | if (length && !INTEGRAL_TYPE_P (TREE_TYPE (length))) |
5606 | { |
5607 | error_at (OMP_CLAUSE_LOCATION (c), |
5608 | "length %qE of array section does not have integral type" , |
5609 | length); |
5610 | return error_mark_node; |
5611 | } |
5612 | if (low_bound) |
5613 | low_bound = mark_rvalue_use (low_bound); |
5614 | if (length) |
5615 | length = mark_rvalue_use (length); |
5616 | /* We need to reduce to real constant-values for checks below. */ |
5617 | if (length) |
5618 | length = fold_simple (length); |
5619 | if (low_bound) |
5620 | low_bound = fold_simple (low_bound); |
5621 | if (low_bound |
5622 | && TREE_CODE (low_bound) == INTEGER_CST |
5623 | && TYPE_PRECISION (TREE_TYPE (low_bound)) |
5624 | > TYPE_PRECISION (sizetype)) |
5625 | low_bound = fold_convert (sizetype, low_bound); |
5626 | if (length |
5627 | && TREE_CODE (length) == INTEGER_CST |
5628 | && TYPE_PRECISION (TREE_TYPE (length)) |
5629 | > TYPE_PRECISION (sizetype)) |
5630 | length = fold_convert (sizetype, length); |
5631 | if (low_bound == NULL_TREE) |
5632 | low_bound = integer_zero_node; |
5633 | |
5634 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
5635 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
5636 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)) |
5637 | { |
5638 | if (length != integer_one_node) |
5639 | { |
5640 | error_at (OMP_CLAUSE_LOCATION (c), |
5641 | "expected single pointer in %qs clause" , |
5642 | user_omp_clause_code_name (c, openacc)); |
5643 | return error_mark_node; |
5644 | } |
5645 | } |
5646 | if (length != NULL_TREE) |
5647 | { |
5648 | if (!integer_nonzerop (length)) |
5649 | { |
5650 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY |
5651 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
5652 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
5653 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
5654 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
5655 | { |
5656 | if (integer_zerop (length)) |
5657 | { |
5658 | error_at (OMP_CLAUSE_LOCATION (c), |
5659 | "zero length array section in %qs clause" , |
5660 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5661 | return error_mark_node; |
5662 | } |
5663 | } |
5664 | else |
5665 | maybe_zero_len = true; |
5666 | } |
5667 | if (first_non_one == types.length () |
5668 | && (TREE_CODE (length) != INTEGER_CST || integer_onep (length))) |
5669 | first_non_one++; |
5670 | } |
5671 | if (TREE_CODE (type) == ARRAY_TYPE) |
5672 | { |
5673 | if (length == NULL_TREE |
5674 | && (TYPE_DOMAIN (type) == NULL_TREE |
5675 | || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)) |
5676 | { |
5677 | error_at (OMP_CLAUSE_LOCATION (c), |
5678 | "for unknown bound array type length expression must " |
5679 | "be specified" ); |
5680 | return error_mark_node; |
5681 | } |
5682 | if (TREE_CODE (low_bound) == INTEGER_CST |
5683 | && tree_int_cst_sgn (low_bound) == -1) |
5684 | { |
5685 | error_at (OMP_CLAUSE_LOCATION (c), |
5686 | "negative low bound in array section in %qs clause" , |
5687 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5688 | return error_mark_node; |
5689 | } |
5690 | if (length != NULL_TREE |
5691 | && TREE_CODE (length) == INTEGER_CST |
5692 | && tree_int_cst_sgn (length) == -1) |
5693 | { |
5694 | error_at (OMP_CLAUSE_LOCATION (c), |
5695 | "negative length in array section in %qs clause" , |
5696 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5697 | return error_mark_node; |
5698 | } |
5699 | if (TYPE_DOMAIN (type) |
5700 | && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) |
5701 | && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) |
5702 | == INTEGER_CST) |
5703 | { |
5704 | tree size |
5705 | = fold_convert (sizetype, TYPE_MAX_VALUE (TYPE_DOMAIN (type))); |
5706 | size = size_binop (PLUS_EXPR, size, size_one_node); |
5707 | if (TREE_CODE (low_bound) == INTEGER_CST) |
5708 | { |
5709 | if (tree_int_cst_lt (t1: size, t2: low_bound)) |
5710 | { |
5711 | error_at (OMP_CLAUSE_LOCATION (c), |
5712 | "low bound %qE above array section size " |
5713 | "in %qs clause" , low_bound, |
5714 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5715 | return error_mark_node; |
5716 | } |
5717 | if (tree_int_cst_equal (size, low_bound)) |
5718 | { |
5719 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY |
5720 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
5721 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
5722 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
5723 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
5724 | { |
5725 | error_at (OMP_CLAUSE_LOCATION (c), |
5726 | "zero length array section in %qs clause" , |
5727 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5728 | return error_mark_node; |
5729 | } |
5730 | maybe_zero_len = true; |
5731 | } |
5732 | else if (length == NULL_TREE |
5733 | && first_non_one == types.length () |
5734 | && tree_int_cst_equal |
5735 | (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), |
5736 | low_bound)) |
5737 | first_non_one++; |
5738 | } |
5739 | else if (length == NULL_TREE) |
5740 | { |
5741 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
5742 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
5743 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
5744 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION |
5745 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION) |
5746 | maybe_zero_len = true; |
5747 | if (first_non_one == types.length ()) |
5748 | first_non_one++; |
5749 | } |
5750 | if (length && TREE_CODE (length) == INTEGER_CST) |
5751 | { |
5752 | if (tree_int_cst_lt (t1: size, t2: length)) |
5753 | { |
5754 | error_at (OMP_CLAUSE_LOCATION (c), |
5755 | "length %qE above array section size " |
5756 | "in %qs clause" , length, |
5757 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5758 | return error_mark_node; |
5759 | } |
5760 | if (TREE_CODE (low_bound) == INTEGER_CST) |
5761 | { |
5762 | tree lbpluslen |
5763 | = size_binop (PLUS_EXPR, |
5764 | fold_convert (sizetype, low_bound), |
5765 | fold_convert (sizetype, length)); |
5766 | if (TREE_CODE (lbpluslen) == INTEGER_CST |
5767 | && tree_int_cst_lt (t1: size, t2: lbpluslen)) |
5768 | { |
5769 | error_at (OMP_CLAUSE_LOCATION (c), |
5770 | "high bound %qE above array section size " |
5771 | "in %qs clause" , lbpluslen, |
5772 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5773 | return error_mark_node; |
5774 | } |
5775 | } |
5776 | } |
5777 | } |
5778 | else if (length == NULL_TREE) |
5779 | { |
5780 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
5781 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
5782 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
5783 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION |
5784 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION) |
5785 | maybe_zero_len = true; |
5786 | if (first_non_one == types.length ()) |
5787 | first_non_one++; |
5788 | } |
5789 | |
5790 | /* For [lb:] we will need to evaluate lb more than once. */ |
5791 | if (length == NULL_TREE && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) |
5792 | { |
5793 | tree lb = cp_save_expr (low_bound); |
5794 | if (lb != low_bound) |
5795 | { |
5796 | TREE_OPERAND (t, 1) = lb; |
5797 | low_bound = lb; |
5798 | } |
5799 | } |
5800 | } |
5801 | else if (TYPE_PTR_P (type)) |
5802 | { |
5803 | if (length == NULL_TREE) |
5804 | { |
5805 | if (TREE_CODE (ret) == PARM_DECL && DECL_ARRAY_PARAMETER_P (ret)) |
5806 | error_at (OMP_CLAUSE_LOCATION (c), |
5807 | "for array function parameter length expression " |
5808 | "must be specified" ); |
5809 | else |
5810 | error_at (OMP_CLAUSE_LOCATION (c), |
5811 | "for pointer type length expression must be specified" ); |
5812 | return error_mark_node; |
5813 | } |
5814 | if (length != NULL_TREE |
5815 | && TREE_CODE (length) == INTEGER_CST |
5816 | && tree_int_cst_sgn (length) == -1) |
5817 | { |
5818 | error_at (OMP_CLAUSE_LOCATION (c), |
5819 | "negative length in array section in %qs clause" , |
5820 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5821 | return error_mark_node; |
5822 | } |
5823 | /* If there is a pointer type anywhere but in the very first |
5824 | array-section-subscript, the array section could be non-contiguous. */ |
5825 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
5826 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
5827 | && TREE_CODE (TREE_OPERAND (t, 0)) == OMP_ARRAY_SECTION) |
5828 | { |
5829 | /* If any prior dimension has a non-one length, then deem this |
5830 | array section as non-contiguous. */ |
5831 | for (tree d = TREE_OPERAND (t, 0); TREE_CODE (d) == OMP_ARRAY_SECTION; |
5832 | d = TREE_OPERAND (d, 0)) |
5833 | { |
5834 | tree d_length = TREE_OPERAND (d, 2); |
5835 | if (d_length == NULL_TREE || !integer_onep (d_length)) |
5836 | { |
5837 | error_at (OMP_CLAUSE_LOCATION (c), |
5838 | "array section is not contiguous in %qs clause" , |
5839 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5840 | return error_mark_node; |
5841 | } |
5842 | } |
5843 | } |
5844 | } |
5845 | else |
5846 | { |
5847 | error_at (OMP_CLAUSE_LOCATION (c), |
5848 | "%qE does not have pointer or array type" , ret); |
5849 | return error_mark_node; |
5850 | } |
5851 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) |
5852 | types.safe_push (TREE_TYPE (ret)); |
5853 | /* We will need to evaluate lb more than once. */ |
5854 | tree lb = cp_save_expr (low_bound); |
5855 | if (lb != low_bound) |
5856 | { |
5857 | TREE_OPERAND (t, 1) = lb; |
5858 | low_bound = lb; |
5859 | } |
5860 | /* Temporarily disable -fstrong-eval-order for array reductions. |
5861 | The SAVE_EXPR and COMPOUND_EXPR added if low_bound has side-effects |
5862 | is something the middle-end can't cope with and more importantly, |
5863 | it needs to be the actual base variable that is privatized, not some |
5864 | temporary assigned previous value of it. That, together with OpenMP |
5865 | saying how many times the side-effects are evaluated is unspecified, |
5866 | makes int *a, *b; ... reduction(+:a[a = b, 3:10]) really unspecified. */ |
5867 | warning_sentinel s (flag_strong_eval_order, |
5868 | OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
5869 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
5870 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION); |
5871 | ret = grok_array_decl (OMP_CLAUSE_LOCATION (c), ret, low_bound, NULL, |
5872 | tf_warning_or_error); |
5873 | return ret; |
5874 | } |
5875 | |
5876 | /* Handle array sections for clause C. */ |
5877 | |
5878 | static bool |
5879 | handle_omp_array_sections (tree &c, enum c_omp_region_type ort) |
5880 | { |
5881 | bool maybe_zero_len = false; |
5882 | unsigned int first_non_one = 0; |
5883 | auto_vec<tree, 10> types; |
5884 | tree *tp = &OMP_CLAUSE_DECL (c); |
5885 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
5886 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY) |
5887 | && TREE_CODE (*tp) == TREE_LIST |
5888 | && TREE_PURPOSE (*tp) |
5889 | && TREE_CODE (TREE_PURPOSE (*tp)) == TREE_VEC) |
5890 | tp = &TREE_VALUE (*tp); |
5891 | tree first = handle_omp_array_sections_1 (c, t: *tp, types, |
5892 | maybe_zero_len, first_non_one, |
5893 | ort); |
5894 | if (first == error_mark_node) |
5895 | return true; |
5896 | if (first == NULL_TREE) |
5897 | return false; |
5898 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
5899 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY) |
5900 | { |
5901 | tree t = *tp; |
5902 | tree tem = NULL_TREE; |
5903 | if (processing_template_decl) |
5904 | return false; |
5905 | /* Need to evaluate side effects in the length expressions |
5906 | if any. */ |
5907 | while (TREE_CODE (t) == TREE_LIST) |
5908 | { |
5909 | if (TREE_VALUE (t) && TREE_SIDE_EFFECTS (TREE_VALUE (t))) |
5910 | { |
5911 | if (tem == NULL_TREE) |
5912 | tem = TREE_VALUE (t); |
5913 | else |
5914 | tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), |
5915 | TREE_VALUE (t), tem); |
5916 | } |
5917 | t = TREE_CHAIN (t); |
5918 | } |
5919 | if (tem) |
5920 | first = build2 (COMPOUND_EXPR, TREE_TYPE (first), tem, first); |
5921 | *tp = first; |
5922 | } |
5923 | else |
5924 | { |
5925 | unsigned int num = types.length (), i; |
5926 | tree t, side_effects = NULL_TREE, size = NULL_TREE; |
5927 | tree condition = NULL_TREE; |
5928 | |
5929 | if (int_size_in_bytes (TREE_TYPE (first)) <= 0) |
5930 | maybe_zero_len = true; |
5931 | if (processing_template_decl && maybe_zero_len) |
5932 | return false; |
5933 | |
5934 | for (i = num, t = OMP_CLAUSE_DECL (c); i > 0; |
5935 | t = TREE_OPERAND (t, 0)) |
5936 | { |
5937 | gcc_assert (TREE_CODE (t) == OMP_ARRAY_SECTION); |
5938 | |
5939 | tree low_bound = TREE_OPERAND (t, 1); |
5940 | tree length = TREE_OPERAND (t, 2); |
5941 | |
5942 | i--; |
5943 | if (low_bound |
5944 | && TREE_CODE (low_bound) == INTEGER_CST |
5945 | && TYPE_PRECISION (TREE_TYPE (low_bound)) |
5946 | > TYPE_PRECISION (sizetype)) |
5947 | low_bound = fold_convert (sizetype, low_bound); |
5948 | if (length |
5949 | && TREE_CODE (length) == INTEGER_CST |
5950 | && TYPE_PRECISION (TREE_TYPE (length)) |
5951 | > TYPE_PRECISION (sizetype)) |
5952 | length = fold_convert (sizetype, length); |
5953 | if (low_bound == NULL_TREE) |
5954 | low_bound = integer_zero_node; |
5955 | if (!maybe_zero_len && i > first_non_one) |
5956 | { |
5957 | if (integer_nonzerop (low_bound)) |
5958 | goto do_warn_noncontiguous; |
5959 | if (length != NULL_TREE |
5960 | && TREE_CODE (length) == INTEGER_CST |
5961 | && TYPE_DOMAIN (types[i]) |
5962 | && TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])) |
5963 | && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (types[i]))) |
5964 | == INTEGER_CST) |
5965 | { |
5966 | tree size; |
5967 | size = size_binop (PLUS_EXPR, |
5968 | TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])), |
5969 | size_one_node); |
5970 | if (!tree_int_cst_equal (length, size)) |
5971 | { |
5972 | do_warn_noncontiguous: |
5973 | error_at (OMP_CLAUSE_LOCATION (c), |
5974 | "array section is not contiguous in %qs " |
5975 | "clause" , |
5976 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5977 | return true; |
5978 | } |
5979 | } |
5980 | if (!processing_template_decl |
5981 | && length != NULL_TREE |
5982 | && TREE_SIDE_EFFECTS (length)) |
5983 | { |
5984 | if (side_effects == NULL_TREE) |
5985 | side_effects = length; |
5986 | else |
5987 | side_effects = build2 (COMPOUND_EXPR, |
5988 | TREE_TYPE (side_effects), |
5989 | length, side_effects); |
5990 | } |
5991 | } |
5992 | else if (processing_template_decl) |
5993 | continue; |
5994 | else |
5995 | { |
5996 | tree l; |
5997 | |
5998 | if (i > first_non_one |
5999 | && ((length && integer_nonzerop (length)) |
6000 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
6001 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
6002 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)) |
6003 | continue; |
6004 | if (length) |
6005 | l = fold_convert (sizetype, length); |
6006 | else |
6007 | { |
6008 | l = size_binop (PLUS_EXPR, |
6009 | TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])), |
6010 | size_one_node); |
6011 | l = size_binop (MINUS_EXPR, l, |
6012 | fold_convert (sizetype, low_bound)); |
6013 | } |
6014 | if (i > first_non_one) |
6015 | { |
6016 | l = fold_build2 (NE_EXPR, boolean_type_node, l, |
6017 | size_zero_node); |
6018 | if (condition == NULL_TREE) |
6019 | condition = l; |
6020 | else |
6021 | condition = fold_build2 (BIT_AND_EXPR, boolean_type_node, |
6022 | l, condition); |
6023 | } |
6024 | else if (size == NULL_TREE) |
6025 | { |
6026 | size = size_in_bytes (TREE_TYPE (types[i])); |
6027 | tree eltype = TREE_TYPE (types[num - 1]); |
6028 | while (TREE_CODE (eltype) == ARRAY_TYPE) |
6029 | eltype = TREE_TYPE (eltype); |
6030 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
6031 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
6032 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
6033 | size = size_binop (EXACT_DIV_EXPR, size, |
6034 | size_in_bytes (eltype)); |
6035 | size = size_binop (MULT_EXPR, size, l); |
6036 | if (condition) |
6037 | size = fold_build3 (COND_EXPR, sizetype, condition, |
6038 | size, size_zero_node); |
6039 | } |
6040 | else |
6041 | size = size_binop (MULT_EXPR, size, l); |
6042 | } |
6043 | } |
6044 | if (!processing_template_decl) |
6045 | { |
6046 | if (side_effects) |
6047 | size = build2 (COMPOUND_EXPR, sizetype, side_effects, size); |
6048 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
6049 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
6050 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
6051 | { |
6052 | size = size_binop (MINUS_EXPR, size, size_one_node); |
6053 | size = save_expr (size); |
6054 | tree index_type = build_index_type (size); |
6055 | tree eltype = TREE_TYPE (first); |
6056 | while (TREE_CODE (eltype) == ARRAY_TYPE) |
6057 | eltype = TREE_TYPE (eltype); |
6058 | tree type = build_array_type (eltype, index_type); |
6059 | tree ptype = build_pointer_type (eltype); |
6060 | if (TYPE_REF_P (TREE_TYPE (t)) |
6061 | && INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (t)))) |
6062 | t = convert_from_reference (t); |
6063 | else if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) |
6064 | t = build_fold_addr_expr (t); |
6065 | tree t2 = build_fold_addr_expr (first); |
6066 | t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c), |
6067 | ptrdiff_type_node, t2); |
6068 | t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR, |
6069 | ptrdiff_type_node, t2, |
6070 | fold_convert_loc (OMP_CLAUSE_LOCATION (c), |
6071 | ptrdiff_type_node, t)); |
6072 | if (tree_fits_shwi_p (t2)) |
6073 | t = build2 (MEM_REF, type, t, |
6074 | build_int_cst (ptype, tree_to_shwi (t2))); |
6075 | else |
6076 | { |
6077 | t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c), |
6078 | sizetype, t2); |
6079 | t = build2_loc (OMP_CLAUSE_LOCATION (c), code: POINTER_PLUS_EXPR, |
6080 | TREE_TYPE (t), arg0: t, arg1: t2); |
6081 | t = build2 (MEM_REF, type, t, build_int_cst (ptype, 0)); |
6082 | } |
6083 | OMP_CLAUSE_DECL (c) = t; |
6084 | return false; |
6085 | } |
6086 | OMP_CLAUSE_DECL (c) = first; |
6087 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
6088 | return false; |
6089 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
6090 | || (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH |
6091 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH |
6092 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DETACH)) |
6093 | OMP_CLAUSE_SIZE (c) = size; |
6094 | if (TREE_CODE (t) == FIELD_DECL) |
6095 | t = finish_non_static_data_member (decl: t, NULL_TREE, NULL_TREE); |
6096 | |
6097 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
6098 | return false; |
6099 | |
6100 | if (TREE_CODE (first) == INDIRECT_REF) |
6101 | { |
6102 | /* Detect and skip adding extra nodes for pointer-to-member |
6103 | mappings. These are unsupported for now. */ |
6104 | tree tmp = TREE_OPERAND (first, 0); |
6105 | |
6106 | if (TREE_CODE (tmp) == NON_LVALUE_EXPR) |
6107 | tmp = TREE_OPERAND (tmp, 0); |
6108 | |
6109 | if (TREE_CODE (tmp) == INDIRECT_REF) |
6110 | tmp = TREE_OPERAND (tmp, 0); |
6111 | |
6112 | if (TREE_CODE (tmp) == POINTER_PLUS_EXPR) |
6113 | { |
6114 | tree offset = TREE_OPERAND (tmp, 1); |
6115 | STRIP_NOPS (offset); |
6116 | if (TYPE_PTRMEM_P (TREE_TYPE (offset))) |
6117 | { |
6118 | sorry_at (OMP_CLAUSE_LOCATION (c), |
6119 | "pointer-to-member mapping %qE not supported" , |
6120 | OMP_CLAUSE_DECL (c)); |
6121 | return true; |
6122 | } |
6123 | } |
6124 | } |
6125 | |
6126 | /* FIRST represents the first item of data that we are mapping. |
6127 | E.g. if we're mapping an array, FIRST might resemble |
6128 | "foo.bar.myarray[0]". */ |
6129 | |
6130 | auto_vec<omp_addr_token *, 10> addr_tokens; |
6131 | |
6132 | if (!omp_parse_expr (addr_tokens, first)) |
6133 | return true; |
6134 | |
6135 | cp_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
6136 | |
6137 | tree nc = ai.expand_map_clause (c, first, addr_tokens, ort); |
6138 | if (nc != error_mark_node) |
6139 | { |
6140 | using namespace omp_addr_tokenizer; |
6141 | |
6142 | if (ai.maybe_zero_length_array_section (c)) |
6143 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
6144 | |
6145 | /* !!! If we're accessing a base decl via chained access |
6146 | methods (e.g. multiple indirections), duplicate clause |
6147 | detection won't work properly. Skip it in that case. */ |
6148 | if ((addr_tokens[0]->type == STRUCTURE_BASE |
6149 | || addr_tokens[0]->type == ARRAY_BASE) |
6150 | && addr_tokens[0]->u.structure_base_kind == BASE_DECL |
6151 | && addr_tokens[1]->type == ACCESS_METHOD |
6152 | && omp_access_chain_p (addr_tokens, 1)) |
6153 | c = nc; |
6154 | |
6155 | return false; |
6156 | } |
6157 | } |
6158 | } |
6159 | return false; |
6160 | } |
6161 | |
6162 | /* Return identifier to look up for omp declare reduction. */ |
6163 | |
6164 | tree |
6165 | omp_reduction_id (enum tree_code reduction_code, tree reduction_id, tree type) |
6166 | { |
6167 | const char *p = NULL; |
6168 | const char *m = NULL; |
6169 | switch (reduction_code) |
6170 | { |
6171 | case PLUS_EXPR: |
6172 | case MULT_EXPR: |
6173 | case MINUS_EXPR: |
6174 | case BIT_AND_EXPR: |
6175 | case BIT_XOR_EXPR: |
6176 | case BIT_IOR_EXPR: |
6177 | case TRUTH_ANDIF_EXPR: |
6178 | case TRUTH_ORIF_EXPR: |
6179 | reduction_id = ovl_op_identifier (isass: false, code: reduction_code); |
6180 | break; |
6181 | case MIN_EXPR: |
6182 | p = "min" ; |
6183 | break; |
6184 | case MAX_EXPR: |
6185 | p = "max" ; |
6186 | break; |
6187 | default: |
6188 | break; |
6189 | } |
6190 | |
6191 | if (p == NULL) |
6192 | { |
6193 | if (TREE_CODE (reduction_id) != IDENTIFIER_NODE) |
6194 | return error_mark_node; |
6195 | p = IDENTIFIER_POINTER (reduction_id); |
6196 | } |
6197 | |
6198 | if (type != NULL_TREE) |
6199 | m = mangle_type_string (TYPE_MAIN_VARIANT (type)); |
6200 | |
6201 | const char prefix[] = "omp declare reduction " ; |
6202 | size_t lenp = sizeof (prefix); |
6203 | if (strncmp (s1: p, s2: prefix, n: lenp - 1) == 0) |
6204 | lenp = 1; |
6205 | size_t len = strlen (s: p); |
6206 | size_t lenm = m ? strlen (s: m) + 1 : 0; |
6207 | char *name = XALLOCAVEC (char, lenp + len + lenm); |
6208 | if (lenp > 1) |
6209 | memcpy (dest: name, src: prefix, n: lenp - 1); |
6210 | memcpy (dest: name + lenp - 1, src: p, n: len + 1); |
6211 | if (m) |
6212 | { |
6213 | name[lenp + len - 1] = '~'; |
6214 | memcpy (dest: name + lenp + len, src: m, n: lenm); |
6215 | } |
6216 | return get_identifier (name); |
6217 | } |
6218 | |
6219 | /* Lookup OpenMP UDR ID for TYPE, return the corresponding artificial |
6220 | FUNCTION_DECL or NULL_TREE if not found. */ |
6221 | |
6222 | static tree |
6223 | omp_reduction_lookup (location_t loc, tree id, tree type, tree *baselinkp, |
6224 | vec<tree> *ambiguousp) |
6225 | { |
6226 | tree orig_id = id; |
6227 | tree baselink = NULL_TREE; |
6228 | if (identifier_p (t: id)) |
6229 | { |
6230 | cp_id_kind idk; |
6231 | bool nonint_cst_expression_p; |
6232 | const char *error_msg; |
6233 | id = omp_reduction_id (reduction_code: ERROR_MARK, reduction_id: id, type); |
6234 | tree decl = lookup_name (name: id); |
6235 | if (decl == NULL_TREE) |
6236 | decl = error_mark_node; |
6237 | id = finish_id_expression (id_expression: id, decl, NULL_TREE, idk: &idk, integral_constant_expression_p: false, allow_non_integral_constant_expression_p: true, |
6238 | non_integral_constant_expression_p: &nonint_cst_expression_p, template_p: false, done: true, address_p: false, |
6239 | template_arg_p: false, error_msg: &error_msg, location: loc); |
6240 | if (idk == CP_ID_KIND_UNQUALIFIED |
6241 | && identifier_p (t: id)) |
6242 | { |
6243 | vec<tree, va_gc> *args = NULL; |
6244 | vec_safe_push (v&: args, obj: build_reference_type (type)); |
6245 | id = perform_koenig_lookup (fn_expr: id, args, complain: tf_none); |
6246 | } |
6247 | } |
6248 | else if (TREE_CODE (id) == SCOPE_REF) |
6249 | id = lookup_qualified_name (TREE_OPERAND (id, 0), |
6250 | name: omp_reduction_id (reduction_code: ERROR_MARK, |
6251 | TREE_OPERAND (id, 1), |
6252 | type), |
6253 | LOOK_want::NORMAL, false); |
6254 | tree fns = id; |
6255 | id = NULL_TREE; |
6256 | if (fns && is_overloaded_fn (fns)) |
6257 | { |
6258 | for (lkp_iterator iter (get_fns (fns)); iter; ++iter) |
6259 | { |
6260 | tree fndecl = *iter; |
6261 | if (TREE_CODE (fndecl) == FUNCTION_DECL) |
6262 | { |
6263 | tree argtype = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl))); |
6264 | if (same_type_p (TREE_TYPE (argtype), type)) |
6265 | { |
6266 | id = fndecl; |
6267 | break; |
6268 | } |
6269 | } |
6270 | } |
6271 | |
6272 | if (id && BASELINK_P (fns)) |
6273 | { |
6274 | if (baselinkp) |
6275 | *baselinkp = fns; |
6276 | else |
6277 | baselink = fns; |
6278 | } |
6279 | } |
6280 | |
6281 | if (!id && CLASS_TYPE_P (type) && TYPE_BINFO (type)) |
6282 | { |
6283 | auto_vec<tree> ambiguous; |
6284 | tree binfo = TYPE_BINFO (type), base_binfo, ret = NULL_TREE; |
6285 | unsigned int ix; |
6286 | if (ambiguousp == NULL) |
6287 | ambiguousp = &ambiguous; |
6288 | for (ix = 0; BINFO_BASE_ITERATE (binfo, ix, base_binfo); ix++) |
6289 | { |
6290 | id = omp_reduction_lookup (loc, id: orig_id, BINFO_TYPE (base_binfo), |
6291 | baselinkp: baselinkp ? baselinkp : &baselink, |
6292 | ambiguousp); |
6293 | if (id == NULL_TREE) |
6294 | continue; |
6295 | if (!ambiguousp->is_empty ()) |
6296 | ambiguousp->safe_push (obj: id); |
6297 | else if (ret != NULL_TREE) |
6298 | { |
6299 | ambiguousp->safe_push (obj: ret); |
6300 | ambiguousp->safe_push (obj: id); |
6301 | ret = NULL_TREE; |
6302 | } |
6303 | else |
6304 | ret = id; |
6305 | } |
6306 | if (ambiguousp != &ambiguous) |
6307 | return ret; |
6308 | if (!ambiguous.is_empty ()) |
6309 | { |
6310 | const char *str = _("candidates are:" ); |
6311 | unsigned int idx; |
6312 | tree udr; |
6313 | error_at (loc, "user defined reduction lookup is ambiguous" ); |
6314 | FOR_EACH_VEC_ELT (ambiguous, idx, udr) |
6315 | { |
6316 | inform (DECL_SOURCE_LOCATION (udr), "%s %#qD" , str, udr); |
6317 | if (idx == 0) |
6318 | str = get_spaces (str); |
6319 | } |
6320 | ret = error_mark_node; |
6321 | baselink = NULL_TREE; |
6322 | } |
6323 | id = ret; |
6324 | } |
6325 | if (id && baselink) |
6326 | perform_or_defer_access_check (BASELINK_BINFO (baselink), |
6327 | decl: id, diag_decl: id, complain: tf_warning_or_error); |
6328 | return id; |
6329 | } |
6330 | |
6331 | /* Helper function for cp_parser_omp_declare_reduction_exprs |
6332 | and tsubst_omp_udr. |
6333 | Remove CLEANUP_STMT for data (omp_priv variable). |
6334 | Also append INIT_EXPR for DECL_INITIAL of omp_priv after its |
6335 | DECL_EXPR. */ |
6336 | |
6337 | tree |
6338 | cp_remove_omp_priv_cleanup_stmt (tree *tp, int *walk_subtrees, void *data) |
6339 | { |
6340 | if (TYPE_P (*tp)) |
6341 | *walk_subtrees = 0; |
6342 | else if (TREE_CODE (*tp) == CLEANUP_STMT && CLEANUP_DECL (*tp) == (tree) data) |
6343 | *tp = CLEANUP_BODY (*tp); |
6344 | else if (TREE_CODE (*tp) == DECL_EXPR) |
6345 | { |
6346 | tree decl = DECL_EXPR_DECL (*tp); |
6347 | if (!processing_template_decl |
6348 | && decl == (tree) data |
6349 | && DECL_INITIAL (decl) |
6350 | && DECL_INITIAL (decl) != error_mark_node) |
6351 | { |
6352 | tree list = NULL_TREE; |
6353 | append_to_statement_list_force (*tp, &list); |
6354 | tree init_expr = build2 (INIT_EXPR, void_type_node, |
6355 | decl, DECL_INITIAL (decl)); |
6356 | DECL_INITIAL (decl) = NULL_TREE; |
6357 | append_to_statement_list_force (init_expr, &list); |
6358 | *tp = list; |
6359 | } |
6360 | } |
6361 | return NULL_TREE; |
6362 | } |
6363 | |
6364 | /* Data passed from cp_check_omp_declare_reduction to |
6365 | cp_check_omp_declare_reduction_r. */ |
6366 | |
6367 | struct cp_check_omp_declare_reduction_data |
6368 | { |
6369 | location_t loc; |
6370 | tree stmts[7]; |
6371 | bool combiner_p; |
6372 | }; |
6373 | |
6374 | /* Helper function for cp_check_omp_declare_reduction, called via |
6375 | cp_walk_tree. */ |
6376 | |
6377 | static tree |
6378 | cp_check_omp_declare_reduction_r (tree *tp, int *, void *data) |
6379 | { |
6380 | struct cp_check_omp_declare_reduction_data *udr_data |
6381 | = (struct cp_check_omp_declare_reduction_data *) data; |
6382 | if (SSA_VAR_P (*tp) |
6383 | && !DECL_ARTIFICIAL (*tp) |
6384 | && *tp != DECL_EXPR_DECL (udr_data->stmts[udr_data->combiner_p ? 0 : 3]) |
6385 | && *tp != DECL_EXPR_DECL (udr_data->stmts[udr_data->combiner_p ? 1 : 4])) |
6386 | { |
6387 | location_t loc = udr_data->loc; |
6388 | if (udr_data->combiner_p) |
6389 | error_at (loc, "%<#pragma omp declare reduction%> combiner refers to " |
6390 | "variable %qD which is not %<omp_out%> nor %<omp_in%>" , |
6391 | *tp); |
6392 | else |
6393 | error_at (loc, "%<#pragma omp declare reduction%> initializer refers " |
6394 | "to variable %qD which is not %<omp_priv%> nor " |
6395 | "%<omp_orig%>" , |
6396 | *tp); |
6397 | return *tp; |
6398 | } |
6399 | return NULL_TREE; |
6400 | } |
6401 | |
6402 | /* Diagnose violation of OpenMP #pragma omp declare reduction restrictions. */ |
6403 | |
6404 | bool |
6405 | cp_check_omp_declare_reduction (tree udr) |
6406 | { |
6407 | tree type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (udr))); |
6408 | gcc_assert (TYPE_REF_P (type)); |
6409 | type = TREE_TYPE (type); |
6410 | int i; |
6411 | location_t loc = DECL_SOURCE_LOCATION (udr); |
6412 | |
6413 | if (type == error_mark_node) |
6414 | return false; |
6415 | if (ARITHMETIC_TYPE_P (type)) |
6416 | { |
6417 | static enum tree_code predef_codes[] |
6418 | = { PLUS_EXPR, MULT_EXPR, MINUS_EXPR, BIT_AND_EXPR, BIT_XOR_EXPR, |
6419 | BIT_IOR_EXPR, TRUTH_ANDIF_EXPR, TRUTH_ORIF_EXPR }; |
6420 | for (i = 0; i < 8; i++) |
6421 | { |
6422 | tree id = omp_reduction_id (reduction_code: predef_codes[i], NULL_TREE, NULL_TREE); |
6423 | const char *n1 = IDENTIFIER_POINTER (DECL_NAME (udr)); |
6424 | const char *n2 = IDENTIFIER_POINTER (id); |
6425 | if (strncmp (s1: n1, s2: n2, IDENTIFIER_LENGTH (id)) == 0 |
6426 | && (n1[IDENTIFIER_LENGTH (id)] == '~' |
6427 | || n1[IDENTIFIER_LENGTH (id)] == '\0')) |
6428 | break; |
6429 | } |
6430 | |
6431 | if (i == 8 |
6432 | && TREE_CODE (type) != COMPLEX_EXPR) |
6433 | { |
6434 | const char prefix_minmax[] = "omp declare reduction m" ; |
6435 | size_t prefix_size = sizeof (prefix_minmax) - 1; |
6436 | const char *n = IDENTIFIER_POINTER (DECL_NAME (udr)); |
6437 | if (strncmp (IDENTIFIER_POINTER (DECL_NAME (udr)), |
6438 | s2: prefix_minmax, n: prefix_size) == 0 |
6439 | && ((n[prefix_size] == 'i' && n[prefix_size + 1] == 'n') |
6440 | || (n[prefix_size] == 'a' && n[prefix_size + 1] == 'x')) |
6441 | && (n[prefix_size + 2] == '~' || n[prefix_size + 2] == '\0')) |
6442 | i = 0; |
6443 | } |
6444 | if (i < 8) |
6445 | { |
6446 | error_at (loc, "predeclared arithmetic type %qT in " |
6447 | "%<#pragma omp declare reduction%>" , type); |
6448 | return false; |
6449 | } |
6450 | } |
6451 | else if (FUNC_OR_METHOD_TYPE_P (type) |
6452 | || TREE_CODE (type) == ARRAY_TYPE) |
6453 | { |
6454 | error_at (loc, "function or array type %qT in " |
6455 | "%<#pragma omp declare reduction%>" , type); |
6456 | return false; |
6457 | } |
6458 | else if (TYPE_REF_P (type)) |
6459 | { |
6460 | error_at (loc, "reference type %qT in %<#pragma omp declare reduction%>" , |
6461 | type); |
6462 | return false; |
6463 | } |
6464 | else if (TYPE_QUALS_NO_ADDR_SPACE (type)) |
6465 | { |
6466 | error_at (loc, "%<const%>, %<volatile%> or %<__restrict%>-qualified " |
6467 | "type %qT in %<#pragma omp declare reduction%>" , type); |
6468 | return false; |
6469 | } |
6470 | |
6471 | tree body = DECL_SAVED_TREE (udr); |
6472 | if (body == NULL_TREE || TREE_CODE (body) != STATEMENT_LIST) |
6473 | return true; |
6474 | |
6475 | tree_stmt_iterator tsi; |
6476 | struct cp_check_omp_declare_reduction_data data; |
6477 | memset (s: data.stmts, c: 0, n: sizeof data.stmts); |
6478 | for (i = 0, tsi = tsi_start (t: body); |
6479 | i < 7 && !tsi_end_p (i: tsi); |
6480 | i++, tsi_next (i: &tsi)) |
6481 | data.stmts[i] = tsi_stmt (i: tsi); |
6482 | data.loc = loc; |
6483 | gcc_assert (tsi_end_p (tsi)); |
6484 | if (i >= 3) |
6485 | { |
6486 | gcc_assert (TREE_CODE (data.stmts[0]) == DECL_EXPR |
6487 | && TREE_CODE (data.stmts[1]) == DECL_EXPR); |
6488 | if (warning_suppressed_p (DECL_EXPR_DECL (data.stmts[0]) /* What warning? */)) |
6489 | return true; |
6490 | data.combiner_p = true; |
6491 | if (cp_walk_tree (&data.stmts[2], cp_check_omp_declare_reduction_r, |
6492 | &data, NULL)) |
6493 | suppress_warning (DECL_EXPR_DECL (data.stmts[0]) /* What warning? */); |
6494 | } |
6495 | if (i >= 6) |
6496 | { |
6497 | gcc_assert (TREE_CODE (data.stmts[3]) == DECL_EXPR |
6498 | && TREE_CODE (data.stmts[4]) == DECL_EXPR); |
6499 | data.combiner_p = false; |
6500 | if (cp_walk_tree (&data.stmts[5], cp_check_omp_declare_reduction_r, |
6501 | &data, NULL) |
6502 | || cp_walk_tree (&DECL_INITIAL (DECL_EXPR_DECL (data.stmts[3])), |
6503 | cp_check_omp_declare_reduction_r, &data, NULL)) |
6504 | suppress_warning (DECL_EXPR_DECL (data.stmts[0]) /* Wat warning? */); |
6505 | if (i == 7) |
6506 | gcc_assert (TREE_CODE (data.stmts[6]) == DECL_EXPR); |
6507 | } |
6508 | return true; |
6509 | } |
6510 | |
6511 | /* Helper function of finish_omp_clauses. Clone STMT as if we were making |
6512 | an inline call. But, remap |
6513 | the OMP_DECL1 VAR_DECL (omp_out resp. omp_orig) to PLACEHOLDER |
6514 | and OMP_DECL2 VAR_DECL (omp_in resp. omp_priv) to DECL. */ |
6515 | |
6516 | static tree |
6517 | clone_omp_udr (tree stmt, tree omp_decl1, tree omp_decl2, |
6518 | tree decl, tree placeholder) |
6519 | { |
6520 | copy_body_data id; |
6521 | hash_map<tree, tree> decl_map; |
6522 | |
6523 | decl_map.put (k: omp_decl1, v: placeholder); |
6524 | decl_map.put (k: omp_decl2, v: decl); |
6525 | memset (s: &id, c: 0, n: sizeof (id)); |
6526 | id.src_fn = DECL_CONTEXT (omp_decl1); |
6527 | id.dst_fn = current_function_decl; |
6528 | id.src_cfun = DECL_STRUCT_FUNCTION (id.src_fn); |
6529 | id.decl_map = &decl_map; |
6530 | |
6531 | id.copy_decl = copy_decl_no_change; |
6532 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; |
6533 | id.transform_new_cfg = true; |
6534 | id.transform_return_to_modify = false; |
6535 | id.eh_lp_nr = 0; |
6536 | walk_tree (&stmt, copy_tree_body_r, &id, NULL); |
6537 | return stmt; |
6538 | } |
6539 | |
6540 | /* Helper function of finish_omp_clauses, called via cp_walk_tree. |
6541 | Find OMP_CLAUSE_PLACEHOLDER (passed in DATA) in *TP. */ |
6542 | |
6543 | static tree |
6544 | find_omp_placeholder_r (tree *tp, int *, void *data) |
6545 | { |
6546 | if (*tp == (tree) data) |
6547 | return *tp; |
6548 | return NULL_TREE; |
6549 | } |
6550 | |
6551 | /* Helper function of finish_omp_clauses. Handle OMP_CLAUSE_REDUCTION C. |
6552 | Return true if there is some error and the clause should be removed. */ |
6553 | |
6554 | static bool |
6555 | finish_omp_reduction_clause (tree c, bool *need_default_ctor, bool *need_dtor) |
6556 | { |
6557 | tree t = OMP_CLAUSE_DECL (c); |
6558 | bool predefined = false; |
6559 | if (TREE_CODE (t) == TREE_LIST) |
6560 | { |
6561 | gcc_assert (processing_template_decl); |
6562 | return false; |
6563 | } |
6564 | tree type = TREE_TYPE (t); |
6565 | if (TREE_CODE (t) == MEM_REF) |
6566 | type = TREE_TYPE (type); |
6567 | if (TYPE_REF_P (type)) |
6568 | type = TREE_TYPE (type); |
6569 | if (TREE_CODE (type) == ARRAY_TYPE) |
6570 | { |
6571 | tree oatype = type; |
6572 | gcc_assert (TREE_CODE (t) != MEM_REF); |
6573 | while (TREE_CODE (type) == ARRAY_TYPE) |
6574 | type = TREE_TYPE (type); |
6575 | if (!processing_template_decl) |
6576 | { |
6577 | t = require_complete_type (t); |
6578 | if (t == error_mark_node |
6579 | || !complete_type_or_else (oatype, NULL_TREE)) |
6580 | return true; |
6581 | tree size = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (oatype), |
6582 | TYPE_SIZE_UNIT (type)); |
6583 | if (integer_zerop (size)) |
6584 | { |
6585 | error_at (OMP_CLAUSE_LOCATION (c), |
6586 | "%qE in %<reduction%> clause is a zero size array" , |
6587 | omp_clause_printable_decl (decl: t)); |
6588 | return true; |
6589 | } |
6590 | size = size_binop (MINUS_EXPR, size, size_one_node); |
6591 | size = save_expr (size); |
6592 | tree index_type = build_index_type (size); |
6593 | tree atype = build_array_type (type, index_type); |
6594 | tree ptype = build_pointer_type (type); |
6595 | if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) |
6596 | t = build_fold_addr_expr (t); |
6597 | t = build2 (MEM_REF, atype, t, build_int_cst (ptype, 0)); |
6598 | OMP_CLAUSE_DECL (c) = t; |
6599 | } |
6600 | } |
6601 | if (type == error_mark_node) |
6602 | return true; |
6603 | else if (ARITHMETIC_TYPE_P (type)) |
6604 | switch (OMP_CLAUSE_REDUCTION_CODE (c)) |
6605 | { |
6606 | case PLUS_EXPR: |
6607 | case MULT_EXPR: |
6608 | case MINUS_EXPR: |
6609 | case TRUTH_ANDIF_EXPR: |
6610 | case TRUTH_ORIF_EXPR: |
6611 | predefined = true; |
6612 | break; |
6613 | case MIN_EXPR: |
6614 | case MAX_EXPR: |
6615 | if (TREE_CODE (type) == COMPLEX_TYPE) |
6616 | break; |
6617 | predefined = true; |
6618 | break; |
6619 | case BIT_AND_EXPR: |
6620 | case BIT_IOR_EXPR: |
6621 | case BIT_XOR_EXPR: |
6622 | if (FLOAT_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE) |
6623 | break; |
6624 | predefined = true; |
6625 | break; |
6626 | default: |
6627 | break; |
6628 | } |
6629 | else if (TYPE_READONLY (type)) |
6630 | { |
6631 | error_at (OMP_CLAUSE_LOCATION (c), |
6632 | "%qE has const type for %<reduction%>" , |
6633 | omp_clause_printable_decl (decl: t)); |
6634 | return true; |
6635 | } |
6636 | else if (!processing_template_decl) |
6637 | { |
6638 | t = require_complete_type (t); |
6639 | if (t == error_mark_node) |
6640 | return true; |
6641 | OMP_CLAUSE_DECL (c) = t; |
6642 | } |
6643 | |
6644 | if (predefined) |
6645 | { |
6646 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL_TREE; |
6647 | return false; |
6648 | } |
6649 | else if (processing_template_decl) |
6650 | { |
6651 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == error_mark_node) |
6652 | return true; |
6653 | return false; |
6654 | } |
6655 | |
6656 | tree id = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
6657 | |
6658 | type = TYPE_MAIN_VARIANT (type); |
6659 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL_TREE; |
6660 | if (id == NULL_TREE) |
6661 | id = omp_reduction_id (OMP_CLAUSE_REDUCTION_CODE (c), |
6662 | NULL_TREE, NULL_TREE); |
6663 | id = omp_reduction_lookup (OMP_CLAUSE_LOCATION (c), id, type, NULL, NULL); |
6664 | if (id) |
6665 | { |
6666 | if (id == error_mark_node) |
6667 | return true; |
6668 | mark_used (id); |
6669 | tree body = DECL_SAVED_TREE (id); |
6670 | if (!body) |
6671 | return true; |
6672 | if (TREE_CODE (body) == STATEMENT_LIST) |
6673 | { |
6674 | tree_stmt_iterator tsi; |
6675 | tree placeholder = NULL_TREE, decl_placeholder = NULL_TREE; |
6676 | int i; |
6677 | tree stmts[7]; |
6678 | tree atype = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (id))); |
6679 | atype = TREE_TYPE (atype); |
6680 | bool need_static_cast = !same_type_p (type, atype); |
6681 | memset (s: stmts, c: 0, n: sizeof stmts); |
6682 | for (i = 0, tsi = tsi_start (t: body); |
6683 | i < 7 && !tsi_end_p (i: tsi); |
6684 | i++, tsi_next (i: &tsi)) |
6685 | stmts[i] = tsi_stmt (i: tsi); |
6686 | gcc_assert (tsi_end_p (tsi)); |
6687 | |
6688 | if (i >= 3) |
6689 | { |
6690 | gcc_assert (TREE_CODE (stmts[0]) == DECL_EXPR |
6691 | && TREE_CODE (stmts[1]) == DECL_EXPR); |
6692 | placeholder = build_lang_decl (VAR_DECL, NULL_TREE, type); |
6693 | DECL_ARTIFICIAL (placeholder) = 1; |
6694 | DECL_IGNORED_P (placeholder) = 1; |
6695 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = placeholder; |
6696 | if (TREE_CODE (t) == MEM_REF) |
6697 | { |
6698 | decl_placeholder = build_lang_decl (VAR_DECL, NULL_TREE, |
6699 | type); |
6700 | DECL_ARTIFICIAL (decl_placeholder) = 1; |
6701 | DECL_IGNORED_P (decl_placeholder) = 1; |
6702 | OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = decl_placeholder; |
6703 | } |
6704 | if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[0]))) |
6705 | cxx_mark_addressable (placeholder); |
6706 | if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[1])) |
6707 | && (decl_placeholder |
6708 | || !TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))) |
6709 | cxx_mark_addressable (decl_placeholder ? decl_placeholder |
6710 | : OMP_CLAUSE_DECL (c)); |
6711 | tree omp_out = placeholder; |
6712 | tree omp_in = decl_placeholder ? decl_placeholder |
6713 | : convert_from_reference (OMP_CLAUSE_DECL (c)); |
6714 | if (need_static_cast) |
6715 | { |
6716 | tree rtype = build_reference_type (atype); |
6717 | omp_out = build_static_cast (input_location, |
6718 | rtype, omp_out, |
6719 | tf_warning_or_error); |
6720 | omp_in = build_static_cast (input_location, |
6721 | rtype, omp_in, |
6722 | tf_warning_or_error); |
6723 | if (omp_out == error_mark_node || omp_in == error_mark_node) |
6724 | return true; |
6725 | omp_out = convert_from_reference (omp_out); |
6726 | omp_in = convert_from_reference (omp_in); |
6727 | } |
6728 | OMP_CLAUSE_REDUCTION_MERGE (c) |
6729 | = clone_omp_udr (stmt: stmts[2], DECL_EXPR_DECL (stmts[0]), |
6730 | DECL_EXPR_DECL (stmts[1]), decl: omp_in, placeholder: omp_out); |
6731 | } |
6732 | if (i >= 6) |
6733 | { |
6734 | gcc_assert (TREE_CODE (stmts[3]) == DECL_EXPR |
6735 | && TREE_CODE (stmts[4]) == DECL_EXPR); |
6736 | if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[3])) |
6737 | && (decl_placeholder |
6738 | || !TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))) |
6739 | cxx_mark_addressable (decl_placeholder ? decl_placeholder |
6740 | : OMP_CLAUSE_DECL (c)); |
6741 | if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[4]))) |
6742 | cxx_mark_addressable (placeholder); |
6743 | tree omp_priv = decl_placeholder ? decl_placeholder |
6744 | : convert_from_reference (OMP_CLAUSE_DECL (c)); |
6745 | tree omp_orig = placeholder; |
6746 | if (need_static_cast) |
6747 | { |
6748 | if (i == 7) |
6749 | { |
6750 | error_at (OMP_CLAUSE_LOCATION (c), |
6751 | "user defined reduction with constructor " |
6752 | "initializer for base class %qT" , atype); |
6753 | return true; |
6754 | } |
6755 | tree rtype = build_reference_type (atype); |
6756 | omp_priv = build_static_cast (input_location, |
6757 | rtype, omp_priv, |
6758 | tf_warning_or_error); |
6759 | omp_orig = build_static_cast (input_location, |
6760 | rtype, omp_orig, |
6761 | tf_warning_or_error); |
6762 | if (omp_priv == error_mark_node |
6763 | || omp_orig == error_mark_node) |
6764 | return true; |
6765 | omp_priv = convert_from_reference (omp_priv); |
6766 | omp_orig = convert_from_reference (omp_orig); |
6767 | } |
6768 | if (i == 6) |
6769 | *need_default_ctor = true; |
6770 | OMP_CLAUSE_REDUCTION_INIT (c) |
6771 | = clone_omp_udr (stmt: stmts[5], DECL_EXPR_DECL (stmts[4]), |
6772 | DECL_EXPR_DECL (stmts[3]), |
6773 | decl: omp_priv, placeholder: omp_orig); |
6774 | if (cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c), |
6775 | find_omp_placeholder_r, placeholder, NULL)) |
6776 | OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1; |
6777 | } |
6778 | else if (i >= 3) |
6779 | { |
6780 | if (CLASS_TYPE_P (type) && !pod_type_p (type)) |
6781 | *need_default_ctor = true; |
6782 | else |
6783 | { |
6784 | tree init; |
6785 | tree v = decl_placeholder ? decl_placeholder |
6786 | : convert_from_reference (t); |
6787 | if (AGGREGATE_TYPE_P (TREE_TYPE (v))) |
6788 | init = build_constructor (TREE_TYPE (v), NULL); |
6789 | else |
6790 | init = fold_convert (TREE_TYPE (v), integer_zero_node); |
6791 | OMP_CLAUSE_REDUCTION_INIT (c) |
6792 | = cp_build_init_expr (t: v, i: init); |
6793 | } |
6794 | } |
6795 | } |
6796 | } |
6797 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
6798 | *need_dtor = true; |
6799 | else |
6800 | { |
6801 | error_at (OMP_CLAUSE_LOCATION (c), |
6802 | "user defined reduction not found for %qE" , |
6803 | omp_clause_printable_decl (decl: t)); |
6804 | return true; |
6805 | } |
6806 | if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF) |
6807 | gcc_assert (TYPE_SIZE_UNIT (type) |
6808 | && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST); |
6809 | return false; |
6810 | } |
6811 | |
6812 | /* Called from finish_struct_1. linear(this) or linear(this:step) |
6813 | clauses might not be finalized yet because the class has been incomplete |
6814 | when parsing #pragma omp declare simd methods. Fix those up now. */ |
6815 | |
6816 | void |
6817 | finish_omp_declare_simd_methods (tree t) |
6818 | { |
6819 | if (processing_template_decl) |
6820 | return; |
6821 | |
6822 | for (tree x = TYPE_FIELDS (t); x; x = DECL_CHAIN (x)) |
6823 | { |
6824 | if (TREE_CODE (x) == USING_DECL |
6825 | || !DECL_IOBJ_MEMBER_FUNCTION_P (x)) |
6826 | continue; |
6827 | tree ods = lookup_attribute (attr_name: "omp declare simd" , DECL_ATTRIBUTES (x)); |
6828 | if (!ods || !TREE_VALUE (ods)) |
6829 | continue; |
6830 | for (tree c = TREE_VALUE (TREE_VALUE (ods)); c; c = OMP_CLAUSE_CHAIN (c)) |
6831 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR |
6832 | && integer_zerop (OMP_CLAUSE_DECL (c)) |
6833 | && OMP_CLAUSE_LINEAR_STEP (c) |
6834 | && TYPE_PTR_P (TREE_TYPE (OMP_CLAUSE_LINEAR_STEP (c)))) |
6835 | { |
6836 | tree s = OMP_CLAUSE_LINEAR_STEP (c); |
6837 | s = fold_convert_loc (OMP_CLAUSE_LOCATION (c), sizetype, s); |
6838 | s = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MULT_EXPR, |
6839 | sizetype, s, TYPE_SIZE_UNIT (t)); |
6840 | OMP_CLAUSE_LINEAR_STEP (c) = s; |
6841 | } |
6842 | } |
6843 | } |
6844 | |
6845 | /* Adjust sink depend/doacross clause to take into account pointer offsets. |
6846 | |
6847 | Return TRUE if there was a problem processing the offset, and the |
6848 | whole clause should be removed. */ |
6849 | |
6850 | static bool |
6851 | cp_finish_omp_clause_doacross_sink (tree sink_clause) |
6852 | { |
6853 | tree t = OMP_CLAUSE_DECL (sink_clause); |
6854 | gcc_assert (TREE_CODE (t) == TREE_LIST); |
6855 | |
6856 | /* Make sure we don't adjust things twice for templates. */ |
6857 | if (processing_template_decl) |
6858 | return false; |
6859 | |
6860 | for (; t; t = TREE_CHAIN (t)) |
6861 | { |
6862 | tree decl = TREE_VALUE (t); |
6863 | if (TYPE_PTR_P (TREE_TYPE (decl))) |
6864 | { |
6865 | tree offset = TREE_PURPOSE (t); |
6866 | bool neg = wi::neg_p (x: wi::to_wide (t: offset)); |
6867 | offset = fold_unary (ABS_EXPR, TREE_TYPE (offset), offset); |
6868 | decl = mark_rvalue_use (decl); |
6869 | decl = convert_from_reference (decl); |
6870 | tree t2 = pointer_int_sum (OMP_CLAUSE_LOCATION (sink_clause), |
6871 | neg ? MINUS_EXPR : PLUS_EXPR, |
6872 | decl, offset); |
6873 | t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (sink_clause), |
6874 | MINUS_EXPR, sizetype, |
6875 | fold_convert (sizetype, t2), |
6876 | fold_convert (sizetype, decl)); |
6877 | if (t2 == error_mark_node) |
6878 | return true; |
6879 | TREE_PURPOSE (t) = t2; |
6880 | } |
6881 | } |
6882 | return false; |
6883 | } |
6884 | |
6885 | /* Finish OpenMP iterators ITER. Return true if they are errorneous |
6886 | and clauses containing them should be removed. */ |
6887 | |
6888 | static bool |
6889 | cp_omp_finish_iterators (tree iter) |
6890 | { |
6891 | bool ret = false; |
6892 | for (tree it = iter; it; it = TREE_CHAIN (it)) |
6893 | { |
6894 | tree var = TREE_VEC_ELT (it, 0); |
6895 | tree begin = TREE_VEC_ELT (it, 1); |
6896 | tree end = TREE_VEC_ELT (it, 2); |
6897 | tree step = TREE_VEC_ELT (it, 3); |
6898 | tree orig_step; |
6899 | tree type = TREE_TYPE (var); |
6900 | location_t loc = DECL_SOURCE_LOCATION (var); |
6901 | if (type == error_mark_node) |
6902 | { |
6903 | ret = true; |
6904 | continue; |
6905 | } |
6906 | if (type_dependent_expression_p (var)) |
6907 | continue; |
6908 | if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) |
6909 | { |
6910 | error_at (loc, "iterator %qD has neither integral nor pointer type" , |
6911 | var); |
6912 | ret = true; |
6913 | continue; |
6914 | } |
6915 | else if (TYPE_READONLY (type)) |
6916 | { |
6917 | error_at (loc, "iterator %qD has const qualified type" , var); |
6918 | ret = true; |
6919 | continue; |
6920 | } |
6921 | if (type_dependent_expression_p (begin) |
6922 | || type_dependent_expression_p (end) |
6923 | || type_dependent_expression_p (step)) |
6924 | continue; |
6925 | else if (error_operand_p (t: step)) |
6926 | { |
6927 | ret = true; |
6928 | continue; |
6929 | } |
6930 | else if (!INTEGRAL_TYPE_P (TREE_TYPE (step))) |
6931 | { |
6932 | error_at (EXPR_LOC_OR_LOC (step, loc), |
6933 | "iterator step with non-integral type" ); |
6934 | ret = true; |
6935 | continue; |
6936 | } |
6937 | |
6938 | begin = mark_rvalue_use (begin); |
6939 | end = mark_rvalue_use (end); |
6940 | step = mark_rvalue_use (step); |
6941 | begin = cp_build_c_cast (input_location, type, begin, |
6942 | tf_warning_or_error); |
6943 | end = cp_build_c_cast (input_location, type, end, |
6944 | tf_warning_or_error); |
6945 | orig_step = step; |
6946 | if (!processing_template_decl) |
6947 | step = orig_step = save_expr (step); |
6948 | tree stype = POINTER_TYPE_P (type) ? sizetype : type; |
6949 | step = cp_build_c_cast (input_location, stype, step, |
6950 | tf_warning_or_error); |
6951 | if (POINTER_TYPE_P (type) && !processing_template_decl) |
6952 | { |
6953 | begin = save_expr (begin); |
6954 | step = pointer_int_sum (loc, PLUS_EXPR, begin, step); |
6955 | step = fold_build2_loc (loc, MINUS_EXPR, sizetype, |
6956 | fold_convert (sizetype, step), |
6957 | fold_convert (sizetype, begin)); |
6958 | step = fold_convert (ssizetype, step); |
6959 | } |
6960 | if (!processing_template_decl) |
6961 | { |
6962 | begin = maybe_constant_value (begin); |
6963 | end = maybe_constant_value (end); |
6964 | step = maybe_constant_value (step); |
6965 | orig_step = maybe_constant_value (orig_step); |
6966 | } |
6967 | if (integer_zerop (step)) |
6968 | { |
6969 | error_at (loc, "iterator %qD has zero step" , var); |
6970 | ret = true; |
6971 | continue; |
6972 | } |
6973 | |
6974 | if (begin == error_mark_node |
6975 | || end == error_mark_node |
6976 | || step == error_mark_node |
6977 | || orig_step == error_mark_node) |
6978 | { |
6979 | ret = true; |
6980 | continue; |
6981 | } |
6982 | |
6983 | if (!processing_template_decl) |
6984 | { |
6985 | begin = fold_build_cleanup_point_expr (TREE_TYPE (begin), expr: begin); |
6986 | end = fold_build_cleanup_point_expr (TREE_TYPE (end), expr: end); |
6987 | step = fold_build_cleanup_point_expr (TREE_TYPE (step), expr: step); |
6988 | orig_step = fold_build_cleanup_point_expr (TREE_TYPE (orig_step), |
6989 | expr: orig_step); |
6990 | } |
6991 | hash_set<tree> pset; |
6992 | tree it2; |
6993 | for (it2 = TREE_CHAIN (it); it2; it2 = TREE_CHAIN (it2)) |
6994 | { |
6995 | tree var2 = TREE_VEC_ELT (it2, 0); |
6996 | tree begin2 = TREE_VEC_ELT (it2, 1); |
6997 | tree end2 = TREE_VEC_ELT (it2, 2); |
6998 | tree step2 = TREE_VEC_ELT (it2, 3); |
6999 | location_t loc2 = DECL_SOURCE_LOCATION (var2); |
7000 | if (cp_walk_tree (&begin2, find_omp_placeholder_r, var, &pset)) |
7001 | { |
7002 | error_at (EXPR_LOC_OR_LOC (begin2, loc2), |
7003 | "begin expression refers to outer iterator %qD" , var); |
7004 | break; |
7005 | } |
7006 | else if (cp_walk_tree (&end2, find_omp_placeholder_r, var, &pset)) |
7007 | { |
7008 | error_at (EXPR_LOC_OR_LOC (end2, loc2), |
7009 | "end expression refers to outer iterator %qD" , var); |
7010 | break; |
7011 | } |
7012 | else if (cp_walk_tree (&step2, find_omp_placeholder_r, var, &pset)) |
7013 | { |
7014 | error_at (EXPR_LOC_OR_LOC (step2, loc2), |
7015 | "step expression refers to outer iterator %qD" , var); |
7016 | break; |
7017 | } |
7018 | } |
7019 | if (it2) |
7020 | { |
7021 | ret = true; |
7022 | continue; |
7023 | } |
7024 | TREE_VEC_ELT (it, 1) = begin; |
7025 | TREE_VEC_ELT (it, 2) = end; |
7026 | if (processing_template_decl) |
7027 | TREE_VEC_ELT (it, 3) = orig_step; |
7028 | else |
7029 | { |
7030 | TREE_VEC_ELT (it, 3) = step; |
7031 | TREE_VEC_ELT (it, 4) = orig_step; |
7032 | } |
7033 | } |
7034 | return ret; |
7035 | } |
7036 | |
7037 | /* Ensure that pointers are used in OpenACC attach and detach clauses. |
7038 | Return true if an error has been detected. */ |
7039 | |
7040 | static bool |
7041 | cp_oacc_check_attachments (tree c) |
7042 | { |
7043 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
7044 | return false; |
7045 | |
7046 | /* OpenACC attach / detach clauses must be pointers. */ |
7047 | if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
7048 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
7049 | { |
7050 | tree t = OMP_CLAUSE_DECL (c); |
7051 | tree type; |
7052 | |
7053 | while (TREE_CODE (t) == OMP_ARRAY_SECTION) |
7054 | t = TREE_OPERAND (t, 0); |
7055 | |
7056 | type = TREE_TYPE (t); |
7057 | |
7058 | if (TREE_CODE (type) == REFERENCE_TYPE) |
7059 | type = TREE_TYPE (type); |
7060 | |
7061 | if (TREE_CODE (type) != POINTER_TYPE) |
7062 | { |
7063 | error_at (OMP_CLAUSE_LOCATION (c), "expected pointer in %qs clause" , |
7064 | user_omp_clause_code_name (c, true)); |
7065 | return true; |
7066 | } |
7067 | } |
7068 | |
7069 | return false; |
7070 | } |
7071 | |
7072 | /* For all elements of CLAUSES, validate them vs OpenMP constraints. |
7073 | Remove any elements from the list that are invalid. */ |
7074 | |
7075 | tree |
7076 | finish_omp_clauses (tree clauses, enum c_omp_region_type ort) |
7077 | { |
7078 | bitmap_head generic_head, firstprivate_head, lastprivate_head; |
7079 | bitmap_head aligned_head, map_head, map_field_head, map_firstprivate_head; |
7080 | bitmap_head oacc_reduction_head, is_on_device_head; |
7081 | tree c, t, *pc; |
7082 | tree safelen = NULL_TREE; |
7083 | bool openacc = (ort & C_ORT_ACC) != 0; |
7084 | bool branch_seen = false; |
7085 | bool copyprivate_seen = false; |
7086 | bool ordered_seen = false; |
7087 | bool order_seen = false; |
7088 | bool schedule_seen = false; |
7089 | bool oacc_async = false; |
7090 | bool indir_component_ref_p = false; |
7091 | tree last_iterators = NULL_TREE; |
7092 | bool last_iterators_remove = false; |
7093 | /* 1 if normal/task reduction has been seen, -1 if inscan reduction |
7094 | has been seen, -2 if mixed inscan/normal reduction diagnosed. */ |
7095 | int reduction_seen = 0; |
7096 | bool allocate_seen = false; |
7097 | tree detach_seen = NULL_TREE; |
7098 | bool mergeable_seen = false; |
7099 | bool implicit_moved = false; |
7100 | bool target_in_reduction_seen = false; |
7101 | |
7102 | bitmap_obstack_initialize (NULL); |
7103 | bitmap_initialize (head: &generic_head, obstack: &bitmap_default_obstack); |
7104 | bitmap_initialize (head: &firstprivate_head, obstack: &bitmap_default_obstack); |
7105 | bitmap_initialize (head: &lastprivate_head, obstack: &bitmap_default_obstack); |
7106 | bitmap_initialize (head: &aligned_head, obstack: &bitmap_default_obstack); |
7107 | /* If ort == C_ORT_OMP_DECLARE_SIMD used as uniform_head instead. */ |
7108 | bitmap_initialize (head: &map_head, obstack: &bitmap_default_obstack); |
7109 | bitmap_initialize (head: &map_field_head, obstack: &bitmap_default_obstack); |
7110 | bitmap_initialize (head: &map_firstprivate_head, obstack: &bitmap_default_obstack); |
7111 | /* If ort == C_ORT_OMP used as nontemporal_head or use_device_xxx_head |
7112 | instead and for ort == C_ORT_OMP_TARGET used as in_reduction_head. */ |
7113 | bitmap_initialize (head: &oacc_reduction_head, obstack: &bitmap_default_obstack); |
7114 | bitmap_initialize (head: &is_on_device_head, obstack: &bitmap_default_obstack); |
7115 | |
7116 | if (openacc) |
7117 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
7118 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ASYNC) |
7119 | { |
7120 | oacc_async = true; |
7121 | break; |
7122 | } |
7123 | |
7124 | tree *grp_start_p = NULL, grp_sentinel = NULL_TREE; |
7125 | |
7126 | for (pc = &clauses, c = clauses; c ; c = *pc) |
7127 | { |
7128 | bool remove = false; |
7129 | bool field_ok = false; |
7130 | |
7131 | /* We've reached the end of a list of expanded nodes. Reset the group |
7132 | start pointer. */ |
7133 | if (c == grp_sentinel) |
7134 | grp_start_p = NULL; |
7135 | |
7136 | switch (OMP_CLAUSE_CODE (c)) |
7137 | { |
7138 | case OMP_CLAUSE_SHARED: |
7139 | field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); |
7140 | goto check_dup_generic; |
7141 | case OMP_CLAUSE_PRIVATE: |
7142 | field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); |
7143 | goto check_dup_generic; |
7144 | case OMP_CLAUSE_REDUCTION: |
7145 | if (reduction_seen == 0) |
7146 | reduction_seen = OMP_CLAUSE_REDUCTION_INSCAN (c) ? -1 : 1; |
7147 | else if (reduction_seen != -2 |
7148 | && reduction_seen != (OMP_CLAUSE_REDUCTION_INSCAN (c) |
7149 | ? -1 : 1)) |
7150 | { |
7151 | error_at (OMP_CLAUSE_LOCATION (c), |
7152 | "%<inscan%> and non-%<inscan%> %<reduction%> clauses " |
7153 | "on the same construct" ); |
7154 | reduction_seen = -2; |
7155 | } |
7156 | /* FALLTHRU */ |
7157 | case OMP_CLAUSE_IN_REDUCTION: |
7158 | case OMP_CLAUSE_TASK_REDUCTION: |
7159 | field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); |
7160 | t = OMP_CLAUSE_DECL (c); |
7161 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
7162 | { |
7163 | if (handle_omp_array_sections (c, ort)) |
7164 | { |
7165 | remove = true; |
7166 | break; |
7167 | } |
7168 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
7169 | && OMP_CLAUSE_REDUCTION_INSCAN (c)) |
7170 | { |
7171 | error_at (OMP_CLAUSE_LOCATION (c), |
7172 | "%<inscan%> %<reduction%> clause with array " |
7173 | "section" ); |
7174 | remove = true; |
7175 | break; |
7176 | } |
7177 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
7178 | { |
7179 | while (TREE_CODE (t) == OMP_ARRAY_SECTION) |
7180 | t = TREE_OPERAND (t, 0); |
7181 | } |
7182 | else |
7183 | { |
7184 | gcc_assert (TREE_CODE (t) == MEM_REF); |
7185 | t = TREE_OPERAND (t, 0); |
7186 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
7187 | t = TREE_OPERAND (t, 0); |
7188 | if (TREE_CODE (t) == ADDR_EXPR |
7189 | || INDIRECT_REF_P (t)) |
7190 | t = TREE_OPERAND (t, 0); |
7191 | } |
7192 | tree n = omp_clause_decl_field (decl: t); |
7193 | if (n) |
7194 | t = n; |
7195 | goto check_dup_generic_t; |
7196 | } |
7197 | if (oacc_async) |
7198 | cxx_mark_addressable (t); |
7199 | goto check_dup_generic; |
7200 | case OMP_CLAUSE_COPYPRIVATE: |
7201 | copyprivate_seen = true; |
7202 | field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); |
7203 | goto check_dup_generic; |
7204 | case OMP_CLAUSE_COPYIN: |
7205 | goto check_dup_generic; |
7206 | case OMP_CLAUSE_LINEAR: |
7207 | field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); |
7208 | t = OMP_CLAUSE_DECL (c); |
7209 | if (ort != C_ORT_OMP_DECLARE_SIMD |
7210 | && OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_DEFAULT) |
7211 | { |
7212 | if (OMP_CLAUSE_LINEAR_OLD_LINEAR_MODIFIER (c)) |
7213 | { |
7214 | error_at (OMP_CLAUSE_LOCATION (c), |
7215 | "modifier should not be specified in %<linear%> " |
7216 | "clause on %<simd%> or %<for%> constructs when " |
7217 | "not using OpenMP 5.2 modifiers" ); |
7218 | OMP_CLAUSE_LINEAR_KIND (c) = OMP_CLAUSE_LINEAR_DEFAULT; |
7219 | } |
7220 | else if (OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_VAL) |
7221 | { |
7222 | error_at (OMP_CLAUSE_LOCATION (c), |
7223 | "modifier other than %<val%> specified in " |
7224 | "%<linear%> clause on %<simd%> or %<for%> " |
7225 | "constructs when using OpenMP 5.2 modifiers" ); |
7226 | OMP_CLAUSE_LINEAR_KIND (c) = OMP_CLAUSE_LINEAR_DEFAULT; |
7227 | } |
7228 | } |
7229 | if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
7230 | && !type_dependent_expression_p (t)) |
7231 | { |
7232 | tree type = TREE_TYPE (t); |
7233 | if ((OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF |
7234 | || OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_UVAL) |
7235 | && !TYPE_REF_P (type)) |
7236 | { |
7237 | error_at (OMP_CLAUSE_LOCATION (c), |
7238 | "linear clause with %qs modifier applied to " |
7239 | "non-reference variable with %qT type" , |
7240 | OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF |
7241 | ? "ref" : "uval" , TREE_TYPE (t)); |
7242 | remove = true; |
7243 | break; |
7244 | } |
7245 | if (TYPE_REF_P (type)) |
7246 | type = TREE_TYPE (type); |
7247 | if (OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_REF) |
7248 | { |
7249 | if (!INTEGRAL_TYPE_P (type) |
7250 | && !TYPE_PTR_P (type)) |
7251 | { |
7252 | error_at (OMP_CLAUSE_LOCATION (c), |
7253 | "linear clause applied to non-integral " |
7254 | "non-pointer variable with %qT type" , |
7255 | TREE_TYPE (t)); |
7256 | remove = true; |
7257 | break; |
7258 | } |
7259 | } |
7260 | } |
7261 | t = OMP_CLAUSE_LINEAR_STEP (c); |
7262 | if (t == NULL_TREE) |
7263 | t = integer_one_node; |
7264 | if (t == error_mark_node) |
7265 | { |
7266 | remove = true; |
7267 | break; |
7268 | } |
7269 | else if (!type_dependent_expression_p (t) |
7270 | && !INTEGRAL_TYPE_P (TREE_TYPE (t)) |
7271 | && (ort != C_ORT_OMP_DECLARE_SIMD |
7272 | || TREE_CODE (t) != PARM_DECL |
7273 | || !TYPE_REF_P (TREE_TYPE (t)) |
7274 | || !INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (t))))) |
7275 | { |
7276 | error_at (OMP_CLAUSE_LOCATION (c), |
7277 | "linear step expression must be integral" ); |
7278 | remove = true; |
7279 | break; |
7280 | } |
7281 | else |
7282 | { |
7283 | t = mark_rvalue_use (t); |
7284 | if (ort == C_ORT_OMP_DECLARE_SIMD && TREE_CODE (t) == PARM_DECL) |
7285 | { |
7286 | OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) = 1; |
7287 | goto check_dup_generic; |
7288 | } |
7289 | if (!processing_template_decl |
7290 | && (VAR_P (OMP_CLAUSE_DECL (c)) |
7291 | || TREE_CODE (OMP_CLAUSE_DECL (c)) == PARM_DECL)) |
7292 | { |
7293 | if (ort == C_ORT_OMP_DECLARE_SIMD) |
7294 | { |
7295 | t = maybe_constant_value (t); |
7296 | if (TREE_CODE (t) != INTEGER_CST) |
7297 | { |
7298 | error_at (OMP_CLAUSE_LOCATION (c), |
7299 | "%<linear%> clause step %qE is neither " |
7300 | "constant nor a parameter" , t); |
7301 | remove = true; |
7302 | break; |
7303 | } |
7304 | } |
7305 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7306 | tree type = TREE_TYPE (OMP_CLAUSE_DECL (c)); |
7307 | if (TYPE_REF_P (type)) |
7308 | type = TREE_TYPE (type); |
7309 | if (OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF) |
7310 | { |
7311 | type = build_pointer_type (type); |
7312 | tree d = fold_convert (type, OMP_CLAUSE_DECL (c)); |
7313 | t = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR, |
7314 | d, t); |
7315 | t = fold_build2_loc (OMP_CLAUSE_LOCATION (c), |
7316 | MINUS_EXPR, sizetype, |
7317 | fold_convert (sizetype, t), |
7318 | fold_convert (sizetype, d)); |
7319 | if (t == error_mark_node) |
7320 | { |
7321 | remove = true; |
7322 | break; |
7323 | } |
7324 | } |
7325 | else if (TYPE_PTR_P (type) |
7326 | /* Can't multiply the step yet if *this |
7327 | is still incomplete type. */ |
7328 | && (ort != C_ORT_OMP_DECLARE_SIMD |
7329 | || TREE_CODE (OMP_CLAUSE_DECL (c)) != PARM_DECL |
7330 | || !DECL_ARTIFICIAL (OMP_CLAUSE_DECL (c)) |
7331 | || DECL_NAME (OMP_CLAUSE_DECL (c)) |
7332 | != this_identifier |
7333 | || !TYPE_BEING_DEFINED (TREE_TYPE (type)))) |
7334 | { |
7335 | tree d = convert_from_reference (OMP_CLAUSE_DECL (c)); |
7336 | t = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR, |
7337 | d, t); |
7338 | t = fold_build2_loc (OMP_CLAUSE_LOCATION (c), |
7339 | MINUS_EXPR, sizetype, |
7340 | fold_convert (sizetype, t), |
7341 | fold_convert (sizetype, d)); |
7342 | if (t == error_mark_node) |
7343 | { |
7344 | remove = true; |
7345 | break; |
7346 | } |
7347 | } |
7348 | else |
7349 | t = fold_convert (type, t); |
7350 | } |
7351 | OMP_CLAUSE_LINEAR_STEP (c) = t; |
7352 | } |
7353 | goto check_dup_generic; |
7354 | check_dup_generic: |
7355 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
7356 | if (t) |
7357 | { |
7358 | if (!remove && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED) |
7359 | omp_note_field_privatization (f: t, OMP_CLAUSE_DECL (c)); |
7360 | } |
7361 | else |
7362 | t = OMP_CLAUSE_DECL (c); |
7363 | check_dup_generic_t: |
7364 | if (t == current_class_ptr |
7365 | && ((ort != C_ORT_OMP_DECLARE_SIMD && !openacc) |
7366 | || (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR |
7367 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_UNIFORM))) |
7368 | { |
7369 | error_at (OMP_CLAUSE_LOCATION (c), |
7370 | "%<this%> allowed in OpenMP only in %<declare simd%>" |
7371 | " clauses" ); |
7372 | remove = true; |
7373 | break; |
7374 | } |
7375 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL |
7376 | && (!field_ok || TREE_CODE (t) != FIELD_DECL)) |
7377 | { |
7378 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
7379 | break; |
7380 | if (DECL_P (t)) |
7381 | error_at (OMP_CLAUSE_LOCATION (c), |
7382 | "%qD is not a variable in clause %qs" , t, |
7383 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
7384 | else |
7385 | error_at (OMP_CLAUSE_LOCATION (c), |
7386 | "%qE is not a variable in clause %qs" , t, |
7387 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
7388 | remove = true; |
7389 | } |
7390 | else if ((openacc |
7391 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) |
7392 | || (ort == C_ORT_OMP |
7393 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR |
7394 | || (OMP_CLAUSE_CODE (c) |
7395 | == OMP_CLAUSE_USE_DEVICE_ADDR))) |
7396 | || (ort == C_ORT_OMP_TARGET |
7397 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)) |
7398 | { |
7399 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
7400 | && (bitmap_bit_p (&generic_head, DECL_UID (t)) |
7401 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)))) |
7402 | { |
7403 | error_at (OMP_CLAUSE_LOCATION (c), |
7404 | "%qD appears more than once in data-sharing " |
7405 | "clauses" , t); |
7406 | remove = true; |
7407 | break; |
7408 | } |
7409 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION) |
7410 | target_in_reduction_seen = true; |
7411 | if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
7412 | { |
7413 | error_at (OMP_CLAUSE_LOCATION (c), |
7414 | openacc |
7415 | ? "%qD appears more than once in reduction clauses" |
7416 | : "%qD appears more than once in data clauses" , |
7417 | t); |
7418 | remove = true; |
7419 | } |
7420 | else |
7421 | bitmap_set_bit (&oacc_reduction_head, DECL_UID (t)); |
7422 | } |
7423 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
7424 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
7425 | || bitmap_bit_p (&lastprivate_head, DECL_UID (t)) |
7426 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
7427 | { |
7428 | error_at (OMP_CLAUSE_LOCATION (c), |
7429 | "%qD appears more than once in data clauses" , t); |
7430 | remove = true; |
7431 | } |
7432 | else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE |
7433 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR |
7434 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR) |
7435 | && bitmap_bit_p (&map_head, DECL_UID (t))) |
7436 | { |
7437 | if (openacc) |
7438 | error_at (OMP_CLAUSE_LOCATION (c), |
7439 | "%qD appears more than once in data clauses" , t); |
7440 | else |
7441 | error_at (OMP_CLAUSE_LOCATION (c), |
7442 | "%qD appears both in data and map clauses" , t); |
7443 | remove = true; |
7444 | } |
7445 | else |
7446 | bitmap_set_bit (&generic_head, DECL_UID (t)); |
7447 | if (!field_ok) |
7448 | break; |
7449 | handle_field_decl: |
7450 | if (!remove |
7451 | && TREE_CODE (t) == FIELD_DECL |
7452 | && t == OMP_CLAUSE_DECL (c)) |
7453 | { |
7454 | OMP_CLAUSE_DECL (c) |
7455 | = omp_privatize_field (t, shared: (OMP_CLAUSE_CODE (c) |
7456 | == OMP_CLAUSE_SHARED)); |
7457 | if (OMP_CLAUSE_DECL (c) == error_mark_node) |
7458 | remove = true; |
7459 | } |
7460 | break; |
7461 | |
7462 | case OMP_CLAUSE_FIRSTPRIVATE: |
7463 | if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) && !implicit_moved) |
7464 | { |
7465 | move_implicit: |
7466 | implicit_moved = true; |
7467 | /* Move firstprivate and map clauses with |
7468 | OMP_CLAUSE_{FIRSTPRIVATE,MAP}_IMPLICIT set to the end of |
7469 | clauses chain. */ |
7470 | tree cl1 = NULL_TREE, cl2 = NULL_TREE; |
7471 | tree *pc1 = pc, *pc2 = &cl1, *pc3 = &cl2; |
7472 | while (*pc1) |
7473 | if (OMP_CLAUSE_CODE (*pc1) == OMP_CLAUSE_FIRSTPRIVATE |
7474 | && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (*pc1)) |
7475 | { |
7476 | *pc3 = *pc1; |
7477 | pc3 = &OMP_CLAUSE_CHAIN (*pc3); |
7478 | *pc1 = OMP_CLAUSE_CHAIN (*pc1); |
7479 | } |
7480 | else if (OMP_CLAUSE_CODE (*pc1) == OMP_CLAUSE_MAP |
7481 | && OMP_CLAUSE_MAP_IMPLICIT (*pc1)) |
7482 | { |
7483 | *pc2 = *pc1; |
7484 | pc2 = &OMP_CLAUSE_CHAIN (*pc2); |
7485 | *pc1 = OMP_CLAUSE_CHAIN (*pc1); |
7486 | } |
7487 | else |
7488 | pc1 = &OMP_CLAUSE_CHAIN (*pc1); |
7489 | *pc3 = NULL; |
7490 | *pc2 = cl2; |
7491 | *pc1 = cl1; |
7492 | continue; |
7493 | } |
7494 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
7495 | if (t) |
7496 | omp_note_field_privatization (f: t, OMP_CLAUSE_DECL (c)); |
7497 | else |
7498 | t = OMP_CLAUSE_DECL (c); |
7499 | if (!openacc && t == current_class_ptr) |
7500 | { |
7501 | error_at (OMP_CLAUSE_LOCATION (c), |
7502 | "%<this%> allowed in OpenMP only in %<declare simd%>" |
7503 | " clauses" ); |
7504 | remove = true; |
7505 | break; |
7506 | } |
7507 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL |
7508 | && ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP |
7509 | || TREE_CODE (t) != FIELD_DECL)) |
7510 | { |
7511 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
7512 | break; |
7513 | if (DECL_P (t)) |
7514 | error_at (OMP_CLAUSE_LOCATION (c), |
7515 | "%qD is not a variable in clause %<firstprivate%>" , |
7516 | t); |
7517 | else |
7518 | error_at (OMP_CLAUSE_LOCATION (c), |
7519 | "%qE is not a variable in clause %<firstprivate%>" , |
7520 | t); |
7521 | remove = true; |
7522 | } |
7523 | else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) |
7524 | && !OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) |
7525 | && bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
7526 | remove = true; |
7527 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
7528 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
7529 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
7530 | { |
7531 | error_at (OMP_CLAUSE_LOCATION (c), |
7532 | "%qD appears more than once in data clauses" , t); |
7533 | remove = true; |
7534 | } |
7535 | else if (bitmap_bit_p (&map_head, DECL_UID (t)) |
7536 | || bitmap_bit_p (&map_field_head, DECL_UID (t))) |
7537 | { |
7538 | if (openacc) |
7539 | error_at (OMP_CLAUSE_LOCATION (c), |
7540 | "%qD appears more than once in data clauses" , t); |
7541 | else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) |
7542 | && !OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c)) |
7543 | /* Silently drop the clause. */; |
7544 | else |
7545 | error_at (OMP_CLAUSE_LOCATION (c), |
7546 | "%qD appears both in data and map clauses" , t); |
7547 | remove = true; |
7548 | } |
7549 | else |
7550 | bitmap_set_bit (&firstprivate_head, DECL_UID (t)); |
7551 | goto handle_field_decl; |
7552 | |
7553 | case OMP_CLAUSE_LASTPRIVATE: |
7554 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
7555 | if (t) |
7556 | omp_note_field_privatization (f: t, OMP_CLAUSE_DECL (c)); |
7557 | else |
7558 | t = OMP_CLAUSE_DECL (c); |
7559 | if (!openacc && t == current_class_ptr) |
7560 | { |
7561 | error_at (OMP_CLAUSE_LOCATION (c), |
7562 | "%<this%> allowed in OpenMP only in %<declare simd%>" |
7563 | " clauses" ); |
7564 | remove = true; |
7565 | break; |
7566 | } |
7567 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL |
7568 | && ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP |
7569 | || TREE_CODE (t) != FIELD_DECL)) |
7570 | { |
7571 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
7572 | break; |
7573 | if (DECL_P (t)) |
7574 | error_at (OMP_CLAUSE_LOCATION (c), |
7575 | "%qD is not a variable in clause %<lastprivate%>" , |
7576 | t); |
7577 | else |
7578 | error_at (OMP_CLAUSE_LOCATION (c), |
7579 | "%qE is not a variable in clause %<lastprivate%>" , |
7580 | t); |
7581 | remove = true; |
7582 | } |
7583 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
7584 | || bitmap_bit_p (&lastprivate_head, DECL_UID (t))) |
7585 | { |
7586 | error_at (OMP_CLAUSE_LOCATION (c), |
7587 | "%qD appears more than once in data clauses" , t); |
7588 | remove = true; |
7589 | } |
7590 | else |
7591 | bitmap_set_bit (&lastprivate_head, DECL_UID (t)); |
7592 | goto handle_field_decl; |
7593 | |
7594 | case OMP_CLAUSE_IF: |
7595 | case OMP_CLAUSE_SELF: |
7596 | t = OMP_CLAUSE_OPERAND (c, 0); |
7597 | t = maybe_convert_cond (cond: t); |
7598 | if (t == error_mark_node) |
7599 | remove = true; |
7600 | else if (!processing_template_decl) |
7601 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7602 | OMP_CLAUSE_OPERAND (c, 0) = t; |
7603 | break; |
7604 | |
7605 | case OMP_CLAUSE_FINAL: |
7606 | t = OMP_CLAUSE_FINAL_EXPR (c); |
7607 | t = maybe_convert_cond (cond: t); |
7608 | if (t == error_mark_node) |
7609 | remove = true; |
7610 | else if (!processing_template_decl) |
7611 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7612 | OMP_CLAUSE_FINAL_EXPR (c) = t; |
7613 | break; |
7614 | |
7615 | case OMP_CLAUSE_GANG: |
7616 | /* Operand 1 is the gang static: argument. */ |
7617 | t = OMP_CLAUSE_OPERAND (c, 1); |
7618 | if (t != NULL_TREE) |
7619 | { |
7620 | if (t == error_mark_node) |
7621 | remove = true; |
7622 | else if (!type_dependent_expression_p (t) |
7623 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
7624 | { |
7625 | error_at (OMP_CLAUSE_LOCATION (c), |
7626 | "%<gang%> static expression must be integral" ); |
7627 | remove = true; |
7628 | } |
7629 | else |
7630 | { |
7631 | t = mark_rvalue_use (t); |
7632 | if (!processing_template_decl) |
7633 | { |
7634 | t = maybe_constant_value (t); |
7635 | if (TREE_CODE (t) == INTEGER_CST |
7636 | && tree_int_cst_sgn (t) != 1 |
7637 | && t != integer_minus_one_node) |
7638 | { |
7639 | warning_at (OMP_CLAUSE_LOCATION (c), 0, |
7640 | "%<gang%> static value must be " |
7641 | "positive" ); |
7642 | t = integer_one_node; |
7643 | } |
7644 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7645 | } |
7646 | } |
7647 | OMP_CLAUSE_OPERAND (c, 1) = t; |
7648 | } |
7649 | /* Check operand 0, the num argument. */ |
7650 | /* FALLTHRU */ |
7651 | |
7652 | case OMP_CLAUSE_WORKER: |
7653 | case OMP_CLAUSE_VECTOR: |
7654 | if (OMP_CLAUSE_OPERAND (c, 0) == NULL_TREE) |
7655 | break; |
7656 | /* FALLTHRU */ |
7657 | |
7658 | case OMP_CLAUSE_NUM_TASKS: |
7659 | case OMP_CLAUSE_NUM_TEAMS: |
7660 | case OMP_CLAUSE_NUM_THREADS: |
7661 | case OMP_CLAUSE_NUM_GANGS: |
7662 | case OMP_CLAUSE_NUM_WORKERS: |
7663 | case OMP_CLAUSE_VECTOR_LENGTH: |
7664 | t = OMP_CLAUSE_OPERAND (c, 0); |
7665 | if (t == error_mark_node) |
7666 | remove = true; |
7667 | else if (!type_dependent_expression_p (t) |
7668 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
7669 | { |
7670 | switch (OMP_CLAUSE_CODE (c)) |
7671 | { |
7672 | case OMP_CLAUSE_GANG: |
7673 | error_at (OMP_CLAUSE_LOCATION (c), |
7674 | "%<gang%> num expression must be integral" ); break; |
7675 | case OMP_CLAUSE_VECTOR: |
7676 | error_at (OMP_CLAUSE_LOCATION (c), |
7677 | "%<vector%> length expression must be integral" ); |
7678 | break; |
7679 | case OMP_CLAUSE_WORKER: |
7680 | error_at (OMP_CLAUSE_LOCATION (c), |
7681 | "%<worker%> num expression must be integral" ); |
7682 | break; |
7683 | default: |
7684 | error_at (OMP_CLAUSE_LOCATION (c), |
7685 | "%qs expression must be integral" , |
7686 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
7687 | } |
7688 | remove = true; |
7689 | } |
7690 | else |
7691 | { |
7692 | t = mark_rvalue_use (t); |
7693 | if (!processing_template_decl) |
7694 | { |
7695 | t = maybe_constant_value (t); |
7696 | if (TREE_CODE (t) == INTEGER_CST |
7697 | && tree_int_cst_sgn (t) != 1) |
7698 | { |
7699 | switch (OMP_CLAUSE_CODE (c)) |
7700 | { |
7701 | case OMP_CLAUSE_GANG: |
7702 | warning_at (OMP_CLAUSE_LOCATION (c), 0, |
7703 | "%<gang%> num value must be positive" ); |
7704 | break; |
7705 | case OMP_CLAUSE_VECTOR: |
7706 | warning_at (OMP_CLAUSE_LOCATION (c), 0, |
7707 | "%<vector%> length value must be " |
7708 | "positive" ); |
7709 | break; |
7710 | case OMP_CLAUSE_WORKER: |
7711 | warning_at (OMP_CLAUSE_LOCATION (c), 0, |
7712 | "%<worker%> num value must be " |
7713 | "positive" ); |
7714 | break; |
7715 | default: |
7716 | warning_at (OMP_CLAUSE_LOCATION (c), |
7717 | (flag_openmp || flag_openmp_simd) |
7718 | ? OPT_Wopenmp : 0, |
7719 | "%qs value must be positive" , |
7720 | omp_clause_code_name |
7721 | [OMP_CLAUSE_CODE (c)]); |
7722 | } |
7723 | t = integer_one_node; |
7724 | } |
7725 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7726 | } |
7727 | OMP_CLAUSE_OPERAND (c, 0) = t; |
7728 | } |
7729 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS |
7730 | && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) |
7731 | && !remove) |
7732 | { |
7733 | t = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c); |
7734 | if (t == error_mark_node) |
7735 | remove = true; |
7736 | else if (!type_dependent_expression_p (t) |
7737 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
7738 | { |
7739 | error_at (OMP_CLAUSE_LOCATION (c), |
7740 | "%qs expression must be integral" , |
7741 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
7742 | remove = true; |
7743 | } |
7744 | else |
7745 | { |
7746 | t = mark_rvalue_use (t); |
7747 | if (!processing_template_decl) |
7748 | { |
7749 | t = maybe_constant_value (t); |
7750 | if (TREE_CODE (t) == INTEGER_CST |
7751 | && tree_int_cst_sgn (t) != 1) |
7752 | { |
7753 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
7754 | "%qs value must be positive" , |
7755 | omp_clause_code_name |
7756 | [OMP_CLAUSE_CODE (c)]); |
7757 | t = NULL_TREE; |
7758 | } |
7759 | else |
7760 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7761 | tree upper = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c); |
7762 | if (t |
7763 | && TREE_CODE (t) == INTEGER_CST |
7764 | && TREE_CODE (upper) == INTEGER_CST |
7765 | && tree_int_cst_lt (t1: upper, t2: t)) |
7766 | { |
7767 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
7768 | "%<num_teams%> lower bound %qE bigger " |
7769 | "than upper bound %qE" , t, upper); |
7770 | t = NULL_TREE; |
7771 | } |
7772 | } |
7773 | OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = t; |
7774 | } |
7775 | } |
7776 | break; |
7777 | |
7778 | case OMP_CLAUSE_SCHEDULE: |
7779 | t = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c); |
7780 | if (t == NULL) |
7781 | ; |
7782 | else if (t == error_mark_node) |
7783 | remove = true; |
7784 | else if (!type_dependent_expression_p (t) |
7785 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
7786 | { |
7787 | error_at (OMP_CLAUSE_LOCATION (c), |
7788 | "schedule chunk size expression must be integral" ); |
7789 | remove = true; |
7790 | } |
7791 | else |
7792 | { |
7793 | t = mark_rvalue_use (t); |
7794 | if (!processing_template_decl) |
7795 | { |
7796 | t = maybe_constant_value (t); |
7797 | if (TREE_CODE (t) == INTEGER_CST |
7798 | && tree_int_cst_sgn (t) != 1) |
7799 | { |
7800 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
7801 | "chunk size value must be positive" ); |
7802 | t = integer_one_node; |
7803 | } |
7804 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7805 | } |
7806 | OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = t; |
7807 | } |
7808 | if (!remove) |
7809 | schedule_seen = true; |
7810 | break; |
7811 | |
7812 | case OMP_CLAUSE_SIMDLEN: |
7813 | case OMP_CLAUSE_SAFELEN: |
7814 | t = OMP_CLAUSE_OPERAND (c, 0); |
7815 | if (t == error_mark_node) |
7816 | remove = true; |
7817 | else if (!type_dependent_expression_p (t) |
7818 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
7819 | { |
7820 | error_at (OMP_CLAUSE_LOCATION (c), |
7821 | "%qs length expression must be integral" , |
7822 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
7823 | remove = true; |
7824 | } |
7825 | else |
7826 | { |
7827 | t = mark_rvalue_use (t); |
7828 | if (!processing_template_decl) |
7829 | { |
7830 | t = maybe_constant_value (t); |
7831 | if (TREE_CODE (t) != INTEGER_CST |
7832 | || tree_int_cst_sgn (t) != 1) |
7833 | { |
7834 | error_at (OMP_CLAUSE_LOCATION (c), |
7835 | "%qs length expression must be positive " |
7836 | "constant integer expression" , |
7837 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
7838 | remove = true; |
7839 | } |
7840 | } |
7841 | OMP_CLAUSE_OPERAND (c, 0) = t; |
7842 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SAFELEN) |
7843 | safelen = c; |
7844 | } |
7845 | break; |
7846 | |
7847 | case OMP_CLAUSE_ASYNC: |
7848 | t = OMP_CLAUSE_ASYNC_EXPR (c); |
7849 | if (t == error_mark_node) |
7850 | remove = true; |
7851 | else if (!type_dependent_expression_p (t) |
7852 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
7853 | { |
7854 | error_at (OMP_CLAUSE_LOCATION (c), |
7855 | "%<async%> expression must be integral" ); |
7856 | remove = true; |
7857 | } |
7858 | else |
7859 | { |
7860 | t = mark_rvalue_use (t); |
7861 | if (!processing_template_decl) |
7862 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7863 | OMP_CLAUSE_ASYNC_EXPR (c) = t; |
7864 | } |
7865 | break; |
7866 | |
7867 | case OMP_CLAUSE_WAIT: |
7868 | t = OMP_CLAUSE_WAIT_EXPR (c); |
7869 | if (t == error_mark_node) |
7870 | remove = true; |
7871 | else if (!processing_template_decl) |
7872 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7873 | OMP_CLAUSE_WAIT_EXPR (c) = t; |
7874 | break; |
7875 | |
7876 | case OMP_CLAUSE_THREAD_LIMIT: |
7877 | t = OMP_CLAUSE_THREAD_LIMIT_EXPR (c); |
7878 | if (t == error_mark_node) |
7879 | remove = true; |
7880 | else if (!type_dependent_expression_p (t) |
7881 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
7882 | { |
7883 | error_at (OMP_CLAUSE_LOCATION (c), |
7884 | "%<thread_limit%> expression must be integral" ); |
7885 | remove = true; |
7886 | } |
7887 | else |
7888 | { |
7889 | t = mark_rvalue_use (t); |
7890 | if (!processing_template_decl) |
7891 | { |
7892 | t = maybe_constant_value (t); |
7893 | if (TREE_CODE (t) == INTEGER_CST |
7894 | && tree_int_cst_sgn (t) != 1) |
7895 | { |
7896 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
7897 | "%<thread_limit%> value must be positive" ); |
7898 | t = integer_one_node; |
7899 | } |
7900 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7901 | } |
7902 | OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = t; |
7903 | } |
7904 | break; |
7905 | |
7906 | case OMP_CLAUSE_DEVICE: |
7907 | t = OMP_CLAUSE_DEVICE_ID (c); |
7908 | if (t == error_mark_node) |
7909 | remove = true; |
7910 | else if (!type_dependent_expression_p (t) |
7911 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
7912 | { |
7913 | error_at (OMP_CLAUSE_LOCATION (c), |
7914 | "%<device%> id must be integral" ); |
7915 | remove = true; |
7916 | } |
7917 | else if (OMP_CLAUSE_DEVICE_ANCESTOR (c) |
7918 | && TREE_CODE (t) == INTEGER_CST |
7919 | && !integer_onep (t)) |
7920 | { |
7921 | error_at (OMP_CLAUSE_LOCATION (c), |
7922 | "the %<device%> clause expression must evaluate to " |
7923 | "%<1%>" ); |
7924 | remove = true; |
7925 | } |
7926 | else |
7927 | { |
7928 | t = mark_rvalue_use (t); |
7929 | if (!processing_template_decl) |
7930 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7931 | OMP_CLAUSE_DEVICE_ID (c) = t; |
7932 | } |
7933 | break; |
7934 | |
7935 | case OMP_CLAUSE_DIST_SCHEDULE: |
7936 | t = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c); |
7937 | if (t == NULL) |
7938 | ; |
7939 | else if (t == error_mark_node) |
7940 | remove = true; |
7941 | else if (!type_dependent_expression_p (t) |
7942 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
7943 | { |
7944 | error_at (OMP_CLAUSE_LOCATION (c), |
7945 | "%<dist_schedule%> chunk size expression must be " |
7946 | "integral" ); |
7947 | remove = true; |
7948 | } |
7949 | else |
7950 | { |
7951 | t = mark_rvalue_use (t); |
7952 | if (!processing_template_decl) |
7953 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7954 | OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c) = t; |
7955 | } |
7956 | break; |
7957 | |
7958 | case OMP_CLAUSE_ALIGNED: |
7959 | t = OMP_CLAUSE_DECL (c); |
7960 | if (t == current_class_ptr && ort != C_ORT_OMP_DECLARE_SIMD) |
7961 | { |
7962 | error_at (OMP_CLAUSE_LOCATION (c), |
7963 | "%<this%> allowed in OpenMP only in %<declare simd%>" |
7964 | " clauses" ); |
7965 | remove = true; |
7966 | break; |
7967 | } |
7968 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
7969 | { |
7970 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
7971 | break; |
7972 | if (DECL_P (t)) |
7973 | error_at (OMP_CLAUSE_LOCATION (c), |
7974 | "%qD is not a variable in %<aligned%> clause" , t); |
7975 | else |
7976 | error_at (OMP_CLAUSE_LOCATION (c), |
7977 | "%qE is not a variable in %<aligned%> clause" , t); |
7978 | remove = true; |
7979 | } |
7980 | else if (!type_dependent_expression_p (t) |
7981 | && !TYPE_PTR_P (TREE_TYPE (t)) |
7982 | && TREE_CODE (TREE_TYPE (t)) != ARRAY_TYPE |
7983 | && (!TYPE_REF_P (TREE_TYPE (t)) |
7984 | || (!INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (t))) |
7985 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) |
7986 | != ARRAY_TYPE)))) |
7987 | { |
7988 | error_at (OMP_CLAUSE_LOCATION (c), |
7989 | "%qE in %<aligned%> clause is neither a pointer nor " |
7990 | "an array nor a reference to pointer or array" , t); |
7991 | remove = true; |
7992 | } |
7993 | else if (bitmap_bit_p (&aligned_head, DECL_UID (t))) |
7994 | { |
7995 | error_at (OMP_CLAUSE_LOCATION (c), |
7996 | "%qD appears more than once in %<aligned%> clauses" , |
7997 | t); |
7998 | remove = true; |
7999 | } |
8000 | else |
8001 | bitmap_set_bit (&aligned_head, DECL_UID (t)); |
8002 | t = OMP_CLAUSE_ALIGNED_ALIGNMENT (c); |
8003 | if (t == error_mark_node) |
8004 | remove = true; |
8005 | else if (t == NULL_TREE) |
8006 | break; |
8007 | else if (!type_dependent_expression_p (t) |
8008 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8009 | { |
8010 | error_at (OMP_CLAUSE_LOCATION (c), |
8011 | "%<aligned%> clause alignment expression must " |
8012 | "be integral" ); |
8013 | remove = true; |
8014 | } |
8015 | else |
8016 | { |
8017 | t = mark_rvalue_use (t); |
8018 | if (!processing_template_decl) |
8019 | { |
8020 | t = maybe_constant_value (t); |
8021 | if (TREE_CODE (t) != INTEGER_CST |
8022 | || tree_int_cst_sgn (t) != 1) |
8023 | { |
8024 | error_at (OMP_CLAUSE_LOCATION (c), |
8025 | "%<aligned%> clause alignment expression must " |
8026 | "be positive constant integer expression" ); |
8027 | remove = true; |
8028 | } |
8029 | else |
8030 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8031 | } |
8032 | OMP_CLAUSE_ALIGNED_ALIGNMENT (c) = t; |
8033 | } |
8034 | break; |
8035 | |
8036 | case OMP_CLAUSE_NONTEMPORAL: |
8037 | t = OMP_CLAUSE_DECL (c); |
8038 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
8039 | { |
8040 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
8041 | break; |
8042 | if (DECL_P (t)) |
8043 | error_at (OMP_CLAUSE_LOCATION (c), |
8044 | "%qD is not a variable in %<nontemporal%> clause" , |
8045 | t); |
8046 | else |
8047 | error_at (OMP_CLAUSE_LOCATION (c), |
8048 | "%qE is not a variable in %<nontemporal%> clause" , |
8049 | t); |
8050 | remove = true; |
8051 | } |
8052 | else if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
8053 | { |
8054 | error_at (OMP_CLAUSE_LOCATION (c), |
8055 | "%qD appears more than once in %<nontemporal%> " |
8056 | "clauses" , t); |
8057 | remove = true; |
8058 | } |
8059 | else |
8060 | bitmap_set_bit (&oacc_reduction_head, DECL_UID (t)); |
8061 | break; |
8062 | |
8063 | case OMP_CLAUSE_ALLOCATE: |
8064 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
8065 | if (t) |
8066 | omp_note_field_privatization (f: t, OMP_CLAUSE_DECL (c)); |
8067 | else |
8068 | t = OMP_CLAUSE_DECL (c); |
8069 | if (t == current_class_ptr) |
8070 | { |
8071 | error_at (OMP_CLAUSE_LOCATION (c), |
8072 | "%<this%> not allowed in %<allocate%> clause" ); |
8073 | remove = true; |
8074 | break; |
8075 | } |
8076 | if (!VAR_P (t) |
8077 | && TREE_CODE (t) != PARM_DECL |
8078 | && TREE_CODE (t) != FIELD_DECL) |
8079 | { |
8080 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
8081 | break; |
8082 | if (DECL_P (t)) |
8083 | error_at (OMP_CLAUSE_LOCATION (c), |
8084 | "%qD is not a variable in %<allocate%> clause" , t); |
8085 | else |
8086 | error_at (OMP_CLAUSE_LOCATION (c), |
8087 | "%qE is not a variable in %<allocate%> clause" , t); |
8088 | remove = true; |
8089 | } |
8090 | else if (bitmap_bit_p (&aligned_head, DECL_UID (t))) |
8091 | { |
8092 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
8093 | "%qD appears more than once in %<allocate%> clauses" , |
8094 | t); |
8095 | remove = true; |
8096 | } |
8097 | else |
8098 | { |
8099 | bitmap_set_bit (&aligned_head, DECL_UID (t)); |
8100 | allocate_seen = true; |
8101 | } |
8102 | tree allocator, align; |
8103 | align = OMP_CLAUSE_ALLOCATE_ALIGN (c); |
8104 | if (error_operand_p (t: align)) |
8105 | { |
8106 | remove = true; |
8107 | break; |
8108 | } |
8109 | if (align) |
8110 | { |
8111 | if (!type_dependent_expression_p (align) |
8112 | && !INTEGRAL_TYPE_P (TREE_TYPE (align))) |
8113 | { |
8114 | error_at (OMP_CLAUSE_LOCATION (c), |
8115 | "%<allocate%> clause %<align%> modifier " |
8116 | "argument needs to be positive constant " |
8117 | "power of two integer expression" ); |
8118 | remove = true; |
8119 | } |
8120 | else |
8121 | { |
8122 | align = mark_rvalue_use (align); |
8123 | if (!processing_template_decl) |
8124 | { |
8125 | align = maybe_constant_value (align); |
8126 | if (TREE_CODE (align) != INTEGER_CST |
8127 | || !tree_fits_uhwi_p (align) |
8128 | || !integer_pow2p (align)) |
8129 | { |
8130 | error_at (OMP_CLAUSE_LOCATION (c), |
8131 | "%<allocate%> clause %<align%> modifier " |
8132 | "argument needs to be positive constant " |
8133 | "power of two integer expression" ); |
8134 | remove = true; |
8135 | } |
8136 | } |
8137 | } |
8138 | OMP_CLAUSE_ALLOCATE_ALIGN (c) = align; |
8139 | } |
8140 | allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c); |
8141 | if (error_operand_p (t: allocator)) |
8142 | { |
8143 | remove = true; |
8144 | break; |
8145 | } |
8146 | if (allocator == NULL_TREE) |
8147 | goto handle_field_decl; |
8148 | tree allocatort; |
8149 | allocatort = TYPE_MAIN_VARIANT (TREE_TYPE (allocator)); |
8150 | if (!type_dependent_expression_p (allocator) |
8151 | && (TREE_CODE (allocatort) != ENUMERAL_TYPE |
8152 | || TYPE_NAME (allocatort) == NULL_TREE |
8153 | || TREE_CODE (TYPE_NAME (allocatort)) != TYPE_DECL |
8154 | || (DECL_NAME (TYPE_NAME (allocatort)) |
8155 | != get_identifier ("omp_allocator_handle_t" )) |
8156 | || (TYPE_CONTEXT (allocatort) |
8157 | != DECL_CONTEXT (global_namespace)))) |
8158 | { |
8159 | error_at (OMP_CLAUSE_LOCATION (c), |
8160 | "%<allocate%> clause allocator expression has " |
8161 | "type %qT rather than %<omp_allocator_handle_t%>" , |
8162 | TREE_TYPE (allocator)); |
8163 | remove = true; |
8164 | break; |
8165 | } |
8166 | else |
8167 | { |
8168 | allocator = mark_rvalue_use (allocator); |
8169 | if (!processing_template_decl) |
8170 | allocator = maybe_constant_value (allocator); |
8171 | OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) = allocator; |
8172 | } |
8173 | goto handle_field_decl; |
8174 | |
8175 | case OMP_CLAUSE_DOACROSS: |
8176 | t = OMP_CLAUSE_DECL (c); |
8177 | if (t == NULL_TREE) |
8178 | break; |
8179 | if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK) |
8180 | { |
8181 | if (cp_finish_omp_clause_doacross_sink (sink_clause: c)) |
8182 | remove = true; |
8183 | break; |
8184 | } |
8185 | gcc_unreachable (); |
8186 | case OMP_CLAUSE_DEPEND: |
8187 | case OMP_CLAUSE_AFFINITY: |
8188 | t = OMP_CLAUSE_DECL (c); |
8189 | if (TREE_CODE (t) == TREE_LIST |
8190 | && TREE_PURPOSE (t) |
8191 | && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC) |
8192 | { |
8193 | if (TREE_PURPOSE (t) != last_iterators) |
8194 | last_iterators_remove |
8195 | = cp_omp_finish_iterators (TREE_PURPOSE (t)); |
8196 | last_iterators = TREE_PURPOSE (t); |
8197 | t = TREE_VALUE (t); |
8198 | if (last_iterators_remove) |
8199 | t = error_mark_node; |
8200 | } |
8201 | else |
8202 | last_iterators = NULL_TREE; |
8203 | |
8204 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
8205 | { |
8206 | if (handle_omp_array_sections (c, ort)) |
8207 | remove = true; |
8208 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
8209 | && (OMP_CLAUSE_DEPEND_KIND (c) |
8210 | == OMP_CLAUSE_DEPEND_DEPOBJ)) |
8211 | { |
8212 | error_at (OMP_CLAUSE_LOCATION (c), |
8213 | "%<depend%> clause with %<depobj%> dependence " |
8214 | "type on array section" ); |
8215 | remove = true; |
8216 | } |
8217 | break; |
8218 | } |
8219 | if (t == error_mark_node) |
8220 | remove = true; |
8221 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
8222 | && t == ridpointers[RID_OMP_ALL_MEMORY]) |
8223 | { |
8224 | if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_OUT |
8225 | && OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_INOUT) |
8226 | { |
8227 | error_at (OMP_CLAUSE_LOCATION (c), |
8228 | "%<omp_all_memory%> used with %<depend%> kind " |
8229 | "other than %<out%> or %<inout%>" ); |
8230 | remove = true; |
8231 | } |
8232 | if (processing_template_decl) |
8233 | break; |
8234 | } |
8235 | else if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
8236 | break; |
8237 | else if (!lvalue_p (t)) |
8238 | { |
8239 | if (DECL_P (t)) |
8240 | error_at (OMP_CLAUSE_LOCATION (c), |
8241 | "%qD is not lvalue expression nor array section " |
8242 | "in %qs clause" , t, |
8243 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8244 | else |
8245 | error_at (OMP_CLAUSE_LOCATION (c), |
8246 | "%qE is not lvalue expression nor array section " |
8247 | "in %qs clause" , t, |
8248 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8249 | remove = true; |
8250 | } |
8251 | else if (TREE_CODE (t) == COMPONENT_REF |
8252 | && TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL |
8253 | && DECL_BIT_FIELD (TREE_OPERAND (t, 1))) |
8254 | { |
8255 | error_at (OMP_CLAUSE_LOCATION (c), |
8256 | "bit-field %qE in %qs clause" , t, |
8257 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8258 | remove = true; |
8259 | } |
8260 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
8261 | && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_DEPOBJ) |
8262 | { |
8263 | if (!c_omp_depend_t_p (TYPE_REF_P (TREE_TYPE (t)) |
8264 | ? TREE_TYPE (TREE_TYPE (t)) |
8265 | : TREE_TYPE (t))) |
8266 | { |
8267 | error_at (OMP_CLAUSE_LOCATION (c), |
8268 | "%qE does not have %<omp_depend_t%> type in " |
8269 | "%<depend%> clause with %<depobj%> dependence " |
8270 | "type" , t); |
8271 | remove = true; |
8272 | } |
8273 | } |
8274 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
8275 | && c_omp_depend_t_p (TYPE_REF_P (TREE_TYPE (t)) |
8276 | ? TREE_TYPE (TREE_TYPE (t)) |
8277 | : TREE_TYPE (t))) |
8278 | { |
8279 | error_at (OMP_CLAUSE_LOCATION (c), |
8280 | "%qE should not have %<omp_depend_t%> type in " |
8281 | "%<depend%> clause with dependence type other than " |
8282 | "%<depobj%>" , t); |
8283 | remove = true; |
8284 | } |
8285 | if (!remove) |
8286 | { |
8287 | if (t == ridpointers[RID_OMP_ALL_MEMORY]) |
8288 | t = null_pointer_node; |
8289 | else |
8290 | { |
8291 | tree addr = cp_build_addr_expr (t, tf_warning_or_error); |
8292 | if (addr == error_mark_node) |
8293 | { |
8294 | remove = true; |
8295 | break; |
8296 | } |
8297 | t = cp_build_indirect_ref (OMP_CLAUSE_LOCATION (c), |
8298 | addr, RO_UNARY_STAR, |
8299 | tf_warning_or_error); |
8300 | if (t == error_mark_node) |
8301 | { |
8302 | remove = true; |
8303 | break; |
8304 | } |
8305 | } |
8306 | if (TREE_CODE (OMP_CLAUSE_DECL (c)) == TREE_LIST |
8307 | && TREE_PURPOSE (OMP_CLAUSE_DECL (c)) |
8308 | && (TREE_CODE (TREE_PURPOSE (OMP_CLAUSE_DECL (c))) |
8309 | == TREE_VEC)) |
8310 | TREE_VALUE (OMP_CLAUSE_DECL (c)) = t; |
8311 | else |
8312 | OMP_CLAUSE_DECL (c) = t; |
8313 | } |
8314 | break; |
8315 | case OMP_CLAUSE_DETACH: |
8316 | t = OMP_CLAUSE_DECL (c); |
8317 | if (detach_seen) |
8318 | { |
8319 | error_at (OMP_CLAUSE_LOCATION (c), |
8320 | "too many %qs clauses on a task construct" , |
8321 | "detach" ); |
8322 | remove = true; |
8323 | break; |
8324 | } |
8325 | else if (error_operand_p (t)) |
8326 | { |
8327 | remove = true; |
8328 | break; |
8329 | } |
8330 | else |
8331 | { |
8332 | tree type = TYPE_MAIN_VARIANT (TREE_TYPE (t)); |
8333 | if (!type_dependent_expression_p (t) |
8334 | && (!INTEGRAL_TYPE_P (type) |
8335 | || TREE_CODE (type) != ENUMERAL_TYPE |
8336 | || TYPE_NAME (type) == NULL_TREE |
8337 | || (DECL_NAME (TYPE_NAME (type)) |
8338 | != get_identifier ("omp_event_handle_t" )))) |
8339 | { |
8340 | error_at (OMP_CLAUSE_LOCATION (c), |
8341 | "%<detach%> clause event handle " |
8342 | "has type %qT rather than " |
8343 | "%<omp_event_handle_t%>" , |
8344 | type); |
8345 | remove = true; |
8346 | } |
8347 | detach_seen = c; |
8348 | cxx_mark_addressable (t); |
8349 | } |
8350 | break; |
8351 | |
8352 | case OMP_CLAUSE_MAP: |
8353 | if (OMP_CLAUSE_MAP_IMPLICIT (c) && !implicit_moved) |
8354 | goto move_implicit; |
8355 | /* FALLTHRU */ |
8356 | case OMP_CLAUSE_TO: |
8357 | case OMP_CLAUSE_FROM: |
8358 | case OMP_CLAUSE__CACHE_: |
8359 | { |
8360 | using namespace omp_addr_tokenizer; |
8361 | auto_vec<omp_addr_token *, 10> addr_tokens; |
8362 | |
8363 | t = OMP_CLAUSE_DECL (c); |
8364 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
8365 | { |
8366 | grp_start_p = pc; |
8367 | grp_sentinel = OMP_CLAUSE_CHAIN (c); |
8368 | |
8369 | if (handle_omp_array_sections (c, ort)) |
8370 | remove = true; |
8371 | else |
8372 | { |
8373 | t = OMP_CLAUSE_DECL (c); |
8374 | if (TREE_CODE (t) != OMP_ARRAY_SECTION |
8375 | && !type_dependent_expression_p (t) |
8376 | && !omp_mappable_type (TREE_TYPE (t))) |
8377 | { |
8378 | error_at (OMP_CLAUSE_LOCATION (c), |
8379 | "array section does not have mappable type " |
8380 | "in %qs clause" , |
8381 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8382 | if (TREE_TYPE (t) != error_mark_node |
8383 | && !COMPLETE_TYPE_P (TREE_TYPE (t))) |
8384 | cxx_incomplete_type_inform (TREE_TYPE (t)); |
8385 | remove = true; |
8386 | } |
8387 | while (TREE_CODE (t) == ARRAY_REF) |
8388 | t = TREE_OPERAND (t, 0); |
8389 | |
8390 | if (type_dependent_expression_p (t)) |
8391 | break; |
8392 | |
8393 | cp_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
8394 | |
8395 | if (!ai.map_supported_p () |
8396 | || !omp_parse_expr (addr_tokens, t)) |
8397 | { |
8398 | sorry_at (OMP_CLAUSE_LOCATION (c), |
8399 | "unsupported map expression %qE" , |
8400 | OMP_CLAUSE_DECL (c)); |
8401 | remove = true; |
8402 | break; |
8403 | } |
8404 | |
8405 | /* This check is to determine if this will be the only map |
8406 | node created for this clause. Otherwise, we'll check |
8407 | the following FIRSTPRIVATE_POINTER, |
8408 | FIRSTPRIVATE_REFERENCE or ATTACH_DETACH node on the next |
8409 | iteration(s) of the loop. */ |
8410 | if (addr_tokens.length () >= 4 |
8411 | && addr_tokens[0]->type == STRUCTURE_BASE |
8412 | && addr_tokens[0]->u.structure_base_kind == BASE_DECL |
8413 | && addr_tokens[1]->type == ACCESS_METHOD |
8414 | && addr_tokens[2]->type == COMPONENT_SELECTOR |
8415 | && addr_tokens[3]->type == ACCESS_METHOD |
8416 | && (addr_tokens[3]->u.access_kind == ACCESS_DIRECT |
8417 | || (addr_tokens[3]->u.access_kind |
8418 | == ACCESS_INDEXED_ARRAY))) |
8419 | { |
8420 | tree rt = addr_tokens[1]->expr; |
8421 | |
8422 | gcc_assert (DECL_P (rt)); |
8423 | |
8424 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
8425 | && OMP_CLAUSE_MAP_IMPLICIT (c) |
8426 | && (bitmap_bit_p (&map_head, DECL_UID (rt)) |
8427 | || bitmap_bit_p (&map_field_head, DECL_UID (rt)) |
8428 | || bitmap_bit_p (&map_firstprivate_head, |
8429 | DECL_UID (rt)))) |
8430 | { |
8431 | remove = true; |
8432 | break; |
8433 | } |
8434 | if (bitmap_bit_p (&map_field_head, DECL_UID (rt))) |
8435 | break; |
8436 | if (bitmap_bit_p (&map_head, DECL_UID (rt))) |
8437 | { |
8438 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
8439 | error_at (OMP_CLAUSE_LOCATION (c), |
8440 | "%qD appears more than once in motion" |
8441 | " clauses" , rt); |
8442 | else if (openacc) |
8443 | error_at (OMP_CLAUSE_LOCATION (c), |
8444 | "%qD appears more than once in data" |
8445 | " clauses" , rt); |
8446 | else |
8447 | error_at (OMP_CLAUSE_LOCATION (c), |
8448 | "%qD appears more than once in map" |
8449 | " clauses" , rt); |
8450 | remove = true; |
8451 | } |
8452 | else |
8453 | { |
8454 | bitmap_set_bit (&map_head, DECL_UID (rt)); |
8455 | bitmap_set_bit (&map_field_head, DECL_UID (rt)); |
8456 | } |
8457 | } |
8458 | } |
8459 | if (cp_oacc_check_attachments (c)) |
8460 | remove = true; |
8461 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
8462 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
8463 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
8464 | && !OMP_CLAUSE_SIZE (c)) |
8465 | /* In this case, we have a single array element which is a |
8466 | pointer, and we already set OMP_CLAUSE_SIZE in |
8467 | handle_omp_array_sections above. For attach/detach |
8468 | clauses, reset the OMP_CLAUSE_SIZE (representing a bias) |
8469 | to zero here. */ |
8470 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
8471 | break; |
8472 | } |
8473 | else if (type_dependent_expression_p (t)) |
8474 | break; |
8475 | else if (!omp_parse_expr (addr_tokens, t)) |
8476 | { |
8477 | sorry_at (OMP_CLAUSE_LOCATION (c), |
8478 | "unsupported map expression %qE" , |
8479 | OMP_CLAUSE_DECL (c)); |
8480 | remove = true; |
8481 | break; |
8482 | } |
8483 | if (t == error_mark_node) |
8484 | { |
8485 | remove = true; |
8486 | break; |
8487 | } |
8488 | /* OpenACC attach / detach clauses must be pointers. */ |
8489 | if (cp_oacc_check_attachments (c)) |
8490 | { |
8491 | remove = true; |
8492 | break; |
8493 | } |
8494 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
8495 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
8496 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
8497 | && !OMP_CLAUSE_SIZE (c)) |
8498 | /* For attach/detach clauses, set OMP_CLAUSE_SIZE (representing a |
8499 | bias) to zero here, so it is not set erroneously to the |
8500 | pointer size later on in gimplify.cc. */ |
8501 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
8502 | |
8503 | cp_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
8504 | |
8505 | if (!ai.check_clause (clause: c)) |
8506 | { |
8507 | remove = true; |
8508 | break; |
8509 | } |
8510 | |
8511 | if (!ai.map_supported_p ()) |
8512 | { |
8513 | sorry_at (OMP_CLAUSE_LOCATION (c), |
8514 | "unsupported map expression %qE" , |
8515 | OMP_CLAUSE_DECL (c)); |
8516 | remove = true; |
8517 | break; |
8518 | } |
8519 | |
8520 | gcc_assert ((addr_tokens[0]->type == ARRAY_BASE |
8521 | || addr_tokens[0]->type == STRUCTURE_BASE) |
8522 | && addr_tokens[1]->type == ACCESS_METHOD); |
8523 | |
8524 | t = addr_tokens[1]->expr; |
8525 | |
8526 | /* This is used to prevent cxx_mark_addressable from being called |
8527 | on 'this' for expressions like 'this->a', i.e. typical member |
8528 | accesses. */ |
8529 | indir_component_ref_p |
8530 | = (addr_tokens[0]->type == STRUCTURE_BASE |
8531 | && addr_tokens[1]->u.access_kind != ACCESS_DIRECT); |
8532 | |
8533 | if (addr_tokens[0]->u.structure_base_kind != BASE_DECL) |
8534 | goto skip_decl_checks; |
8535 | |
8536 | /* For OpenMP, we can access a struct "t" and "t.d" on the same |
8537 | mapping. OpenACC allows multiple fields of the same structure |
8538 | to be written. */ |
8539 | if (addr_tokens[0]->type == STRUCTURE_BASE |
8540 | && (bitmap_bit_p (&map_field_head, DECL_UID (t)) |
8541 | || (!openacc && bitmap_bit_p (&map_head, DECL_UID (t))))) |
8542 | goto skip_decl_checks; |
8543 | |
8544 | if (!processing_template_decl && TREE_CODE (t) == FIELD_DECL) |
8545 | { |
8546 | OMP_CLAUSE_DECL (c) |
8547 | = finish_non_static_data_member (decl: t, NULL_TREE, NULL_TREE); |
8548 | break; |
8549 | } |
8550 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
8551 | { |
8552 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
8553 | break; |
8554 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
8555 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER |
8556 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER |
8557 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH |
8558 | || (!openacc && EXPR_P (t)))) |
8559 | break; |
8560 | if (DECL_P (t)) |
8561 | error_at (OMP_CLAUSE_LOCATION (c), |
8562 | "%qD is not a variable in %qs clause" , t, |
8563 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8564 | else |
8565 | error_at (OMP_CLAUSE_LOCATION (c), |
8566 | "%qE is not a variable in %qs clause" , t, |
8567 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8568 | remove = true; |
8569 | } |
8570 | else if (VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t)) |
8571 | { |
8572 | error_at (OMP_CLAUSE_LOCATION (c), |
8573 | "%qD is threadprivate variable in %qs clause" , t, |
8574 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8575 | remove = true; |
8576 | } |
8577 | else if (!processing_template_decl |
8578 | && !TYPE_REF_P (TREE_TYPE (t)) |
8579 | && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
8580 | || (OMP_CLAUSE_MAP_KIND (c) |
8581 | != GOMP_MAP_FIRSTPRIVATE_POINTER)) |
8582 | && !indir_component_ref_p |
8583 | && (t != current_class_ptr |
8584 | || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
8585 | || OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH) |
8586 | && !cxx_mark_addressable (t)) |
8587 | remove = true; |
8588 | else if (!(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
8589 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER |
8590 | || (OMP_CLAUSE_MAP_KIND (c) |
8591 | == GOMP_MAP_FIRSTPRIVATE_POINTER) |
8592 | || (OMP_CLAUSE_MAP_KIND (c) |
8593 | == GOMP_MAP_ATTACH_DETACH))) |
8594 | && t == OMP_CLAUSE_DECL (c) |
8595 | && !type_dependent_expression_p (t) |
8596 | && !omp_mappable_type (TYPE_REF_P (TREE_TYPE (t)) |
8597 | ? TREE_TYPE (TREE_TYPE (t)) |
8598 | : TREE_TYPE (t))) |
8599 | { |
8600 | error_at (OMP_CLAUSE_LOCATION (c), |
8601 | "%qD does not have a mappable type in %qs clause" , t, |
8602 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8603 | if (TREE_TYPE (t) != error_mark_node |
8604 | && !COMPLETE_TYPE_P (TREE_TYPE (t))) |
8605 | cxx_incomplete_type_inform (TREE_TYPE (t)); |
8606 | remove = true; |
8607 | } |
8608 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
8609 | && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FORCE_DEVICEPTR |
8610 | && !type_dependent_expression_p (t) |
8611 | && !INDIRECT_TYPE_P (TREE_TYPE (t))) |
8612 | { |
8613 | error_at (OMP_CLAUSE_LOCATION (c), |
8614 | "%qD is not a pointer variable" , t); |
8615 | remove = true; |
8616 | } |
8617 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
8618 | && OMP_CLAUSE_MAP_IMPLICIT (c) |
8619 | && (bitmap_bit_p (&map_head, DECL_UID (t)) |
8620 | || bitmap_bit_p (&map_field_head, DECL_UID (t)) |
8621 | || bitmap_bit_p (&map_firstprivate_head, |
8622 | DECL_UID (t)))) |
8623 | remove = true; |
8624 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
8625 | && (OMP_CLAUSE_MAP_KIND (c) |
8626 | == GOMP_MAP_FIRSTPRIVATE_POINTER)) |
8627 | { |
8628 | if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
8629 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
8630 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
8631 | { |
8632 | error_at (OMP_CLAUSE_LOCATION (c), |
8633 | "%qD appears more than once in data clauses" , t); |
8634 | remove = true; |
8635 | } |
8636 | else if (bitmap_bit_p (&map_head, DECL_UID (t)) |
8637 | && !bitmap_bit_p (&map_field_head, DECL_UID (t)) |
8638 | && openacc) |
8639 | { |
8640 | error_at (OMP_CLAUSE_LOCATION (c), |
8641 | "%qD appears more than once in data clauses" , t); |
8642 | remove = true; |
8643 | } |
8644 | else |
8645 | bitmap_set_bit (&map_firstprivate_head, DECL_UID (t)); |
8646 | } |
8647 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
8648 | && (OMP_CLAUSE_MAP_KIND (c) |
8649 | == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) |
8650 | bitmap_set_bit (&map_firstprivate_head, DECL_UID (t)); |
8651 | else if (bitmap_bit_p (&map_head, DECL_UID (t)) |
8652 | && !bitmap_bit_p (&map_field_head, DECL_UID (t)) |
8653 | && ort != C_ORT_OMP |
8654 | && ort != C_ORT_OMP_EXIT_DATA) |
8655 | { |
8656 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
8657 | error_at (OMP_CLAUSE_LOCATION (c), |
8658 | "%qD appears more than once in motion clauses" , t); |
8659 | else if (openacc) |
8660 | error_at (OMP_CLAUSE_LOCATION (c), |
8661 | "%qD appears more than once in data clauses" , t); |
8662 | else |
8663 | error_at (OMP_CLAUSE_LOCATION (c), |
8664 | "%qD appears more than once in map clauses" , t); |
8665 | remove = true; |
8666 | } |
8667 | else if (openacc && bitmap_bit_p (&generic_head, DECL_UID (t))) |
8668 | { |
8669 | error_at (OMP_CLAUSE_LOCATION (c), |
8670 | "%qD appears more than once in data clauses" , t); |
8671 | remove = true; |
8672 | } |
8673 | else if (bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
8674 | || bitmap_bit_p (&is_on_device_head, DECL_UID (t))) |
8675 | { |
8676 | if (openacc) |
8677 | error_at (OMP_CLAUSE_LOCATION (c), |
8678 | "%qD appears more than once in data clauses" , t); |
8679 | else |
8680 | error_at (OMP_CLAUSE_LOCATION (c), |
8681 | "%qD appears both in data and map clauses" , t); |
8682 | remove = true; |
8683 | } |
8684 | else if (!omp_access_chain_p (addr_tokens, 1)) |
8685 | { |
8686 | bitmap_set_bit (&map_head, DECL_UID (t)); |
8687 | |
8688 | tree decl = OMP_CLAUSE_DECL (c); |
8689 | if (t != decl |
8690 | && (TREE_CODE (decl) == COMPONENT_REF |
8691 | || (INDIRECT_REF_P (decl) |
8692 | && (TREE_CODE (TREE_OPERAND (decl, 0)) |
8693 | == COMPONENT_REF) |
8694 | && TYPE_REF_P (TREE_TYPE (TREE_OPERAND (decl, |
8695 | 0)))))) |
8696 | bitmap_set_bit (&map_field_head, DECL_UID (t)); |
8697 | } |
8698 | |
8699 | skip_decl_checks: |
8700 | /* If we call ai.expand_map_clause in handle_omp_array_sections, |
8701 | the containing loop (here) iterates through the new nodes |
8702 | created by that expansion. Avoid expanding those again (just |
8703 | by checking the node type). */ |
8704 | if (!remove |
8705 | && !processing_template_decl |
8706 | && ort != C_ORT_DECLARE_SIMD |
8707 | && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
8708 | || (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER |
8709 | && (OMP_CLAUSE_MAP_KIND (c) |
8710 | != GOMP_MAP_FIRSTPRIVATE_REFERENCE) |
8711 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER |
8712 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH |
8713 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH |
8714 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH))) |
8715 | { |
8716 | grp_start_p = pc; |
8717 | grp_sentinel = OMP_CLAUSE_CHAIN (c); |
8718 | tree nc = ai.expand_map_clause (c, OMP_CLAUSE_DECL (c), |
8719 | addr_tokens, ort); |
8720 | if (nc != error_mark_node) |
8721 | c = nc; |
8722 | } |
8723 | } |
8724 | break; |
8725 | |
8726 | case OMP_CLAUSE_ENTER: |
8727 | case OMP_CLAUSE_LINK: |
8728 | t = OMP_CLAUSE_DECL (c); |
8729 | const char *cname; |
8730 | cname = omp_clause_code_name[OMP_CLAUSE_CODE (c)]; |
8731 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER |
8732 | && OMP_CLAUSE_ENTER_TO (c)) |
8733 | cname = "to" ; |
8734 | if (TREE_CODE (t) == FUNCTION_DECL |
8735 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER) |
8736 | ; |
8737 | else if (!VAR_P (t)) |
8738 | { |
8739 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER) |
8740 | { |
8741 | if (TREE_CODE (t) == TEMPLATE_ID_EXPR) |
8742 | error_at (OMP_CLAUSE_LOCATION (c), |
8743 | "template %qE in clause %qs" , t, cname); |
8744 | else if (really_overloaded_fn (t)) |
8745 | error_at (OMP_CLAUSE_LOCATION (c), |
8746 | "overloaded function name %qE in clause %qs" , t, |
8747 | cname); |
8748 | else |
8749 | error_at (OMP_CLAUSE_LOCATION (c), |
8750 | "%qE is neither a variable nor a function name " |
8751 | "in clause %qs" , t, cname); |
8752 | } |
8753 | else |
8754 | error_at (OMP_CLAUSE_LOCATION (c), |
8755 | "%qE is not a variable in clause %qs" , t, cname); |
8756 | remove = true; |
8757 | } |
8758 | else if (DECL_THREAD_LOCAL_P (t)) |
8759 | { |
8760 | error_at (OMP_CLAUSE_LOCATION (c), |
8761 | "%qD is threadprivate variable in %qs clause" , t, |
8762 | cname); |
8763 | remove = true; |
8764 | } |
8765 | else if (!omp_mappable_type (TREE_TYPE (t))) |
8766 | { |
8767 | error_at (OMP_CLAUSE_LOCATION (c), |
8768 | "%qD does not have a mappable type in %qs clause" , t, |
8769 | cname); |
8770 | if (TREE_TYPE (t) != error_mark_node |
8771 | && !COMPLETE_TYPE_P (TREE_TYPE (t))) |
8772 | cxx_incomplete_type_inform (TREE_TYPE (t)); |
8773 | remove = true; |
8774 | } |
8775 | if (remove) |
8776 | break; |
8777 | if (bitmap_bit_p (&generic_head, DECL_UID (t))) |
8778 | { |
8779 | error_at (OMP_CLAUSE_LOCATION (c), |
8780 | "%qE appears more than once on the same " |
8781 | "%<declare target%> directive" , t); |
8782 | remove = true; |
8783 | } |
8784 | else |
8785 | bitmap_set_bit (&generic_head, DECL_UID (t)); |
8786 | break; |
8787 | |
8788 | case OMP_CLAUSE_UNIFORM: |
8789 | t = OMP_CLAUSE_DECL (c); |
8790 | if (TREE_CODE (t) != PARM_DECL) |
8791 | { |
8792 | if (processing_template_decl) |
8793 | break; |
8794 | if (DECL_P (t)) |
8795 | error_at (OMP_CLAUSE_LOCATION (c), |
8796 | "%qD is not an argument in %<uniform%> clause" , t); |
8797 | else |
8798 | error_at (OMP_CLAUSE_LOCATION (c), |
8799 | "%qE is not an argument in %<uniform%> clause" , t); |
8800 | remove = true; |
8801 | break; |
8802 | } |
8803 | /* map_head bitmap is used as uniform_head if declare_simd. */ |
8804 | bitmap_set_bit (&map_head, DECL_UID (t)); |
8805 | goto check_dup_generic; |
8806 | |
8807 | case OMP_CLAUSE_GRAINSIZE: |
8808 | t = OMP_CLAUSE_GRAINSIZE_EXPR (c); |
8809 | if (t == error_mark_node) |
8810 | remove = true; |
8811 | else if (!type_dependent_expression_p (t) |
8812 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8813 | { |
8814 | error_at (OMP_CLAUSE_LOCATION (c), |
8815 | "%<grainsize%> expression must be integral" ); |
8816 | remove = true; |
8817 | } |
8818 | else |
8819 | { |
8820 | t = mark_rvalue_use (t); |
8821 | if (!processing_template_decl) |
8822 | { |
8823 | t = maybe_constant_value (t); |
8824 | if (TREE_CODE (t) == INTEGER_CST |
8825 | && tree_int_cst_sgn (t) != 1) |
8826 | { |
8827 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
8828 | "%<grainsize%> value must be positive" ); |
8829 | t = integer_one_node; |
8830 | } |
8831 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8832 | } |
8833 | OMP_CLAUSE_GRAINSIZE_EXPR (c) = t; |
8834 | } |
8835 | break; |
8836 | |
8837 | case OMP_CLAUSE_PRIORITY: |
8838 | t = OMP_CLAUSE_PRIORITY_EXPR (c); |
8839 | if (t == error_mark_node) |
8840 | remove = true; |
8841 | else if (!type_dependent_expression_p (t) |
8842 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8843 | { |
8844 | error_at (OMP_CLAUSE_LOCATION (c), |
8845 | "%<priority%> expression must be integral" ); |
8846 | remove = true; |
8847 | } |
8848 | else |
8849 | { |
8850 | t = mark_rvalue_use (t); |
8851 | if (!processing_template_decl) |
8852 | { |
8853 | t = maybe_constant_value (t); |
8854 | if (TREE_CODE (t) == INTEGER_CST |
8855 | && tree_int_cst_sgn (t) == -1) |
8856 | { |
8857 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
8858 | "%<priority%> value must be non-negative" ); |
8859 | t = integer_one_node; |
8860 | } |
8861 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8862 | } |
8863 | OMP_CLAUSE_PRIORITY_EXPR (c) = t; |
8864 | } |
8865 | break; |
8866 | |
8867 | case OMP_CLAUSE_HINT: |
8868 | t = OMP_CLAUSE_HINT_EXPR (c); |
8869 | if (t == error_mark_node) |
8870 | remove = true; |
8871 | else if (!type_dependent_expression_p (t) |
8872 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8873 | { |
8874 | error_at (OMP_CLAUSE_LOCATION (c), |
8875 | "%<hint%> expression must be integral" ); |
8876 | remove = true; |
8877 | } |
8878 | else |
8879 | { |
8880 | t = mark_rvalue_use (t); |
8881 | if (!processing_template_decl) |
8882 | { |
8883 | t = maybe_constant_value (t); |
8884 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8885 | if (TREE_CODE (t) != INTEGER_CST) |
8886 | { |
8887 | error_at (OMP_CLAUSE_LOCATION (c), |
8888 | "%<hint%> expression must be constant integer " |
8889 | "expression" ); |
8890 | remove = true; |
8891 | } |
8892 | } |
8893 | OMP_CLAUSE_HINT_EXPR (c) = t; |
8894 | } |
8895 | break; |
8896 | |
8897 | case OMP_CLAUSE_FILTER: |
8898 | t = OMP_CLAUSE_FILTER_EXPR (c); |
8899 | if (t == error_mark_node) |
8900 | remove = true; |
8901 | else if (!type_dependent_expression_p (t) |
8902 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8903 | { |
8904 | error_at (OMP_CLAUSE_LOCATION (c), |
8905 | "%<filter%> expression must be integral" ); |
8906 | remove = true; |
8907 | } |
8908 | else |
8909 | { |
8910 | t = mark_rvalue_use (t); |
8911 | if (!processing_template_decl) |
8912 | { |
8913 | t = maybe_constant_value (t); |
8914 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8915 | } |
8916 | OMP_CLAUSE_FILTER_EXPR (c) = t; |
8917 | } |
8918 | break; |
8919 | |
8920 | case OMP_CLAUSE_IS_DEVICE_PTR: |
8921 | case OMP_CLAUSE_USE_DEVICE_PTR: |
8922 | field_ok = (ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP; |
8923 | t = OMP_CLAUSE_DECL (c); |
8924 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR) |
8925 | bitmap_set_bit (&is_on_device_head, DECL_UID (t)); |
8926 | if (!type_dependent_expression_p (t)) |
8927 | { |
8928 | tree type = TREE_TYPE (t); |
8929 | if (!TYPE_PTR_P (type) |
8930 | && (!TYPE_REF_P (type) || !TYPE_PTR_P (TREE_TYPE (type)))) |
8931 | { |
8932 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR |
8933 | && ort == C_ORT_OMP) |
8934 | { |
8935 | error_at (OMP_CLAUSE_LOCATION (c), |
8936 | "%qs variable is neither a pointer " |
8937 | "nor reference to pointer" , |
8938 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8939 | remove = true; |
8940 | } |
8941 | else if (TREE_CODE (type) != ARRAY_TYPE |
8942 | && (!TYPE_REF_P (type) |
8943 | || TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)) |
8944 | { |
8945 | error_at (OMP_CLAUSE_LOCATION (c), |
8946 | "%qs variable is neither a pointer, nor an " |
8947 | "array nor reference to pointer or array" , |
8948 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8949 | remove = true; |
8950 | } |
8951 | } |
8952 | } |
8953 | goto check_dup_generic; |
8954 | |
8955 | case OMP_CLAUSE_HAS_DEVICE_ADDR: |
8956 | t = OMP_CLAUSE_DECL (c); |
8957 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
8958 | { |
8959 | if (handle_omp_array_sections (c, ort)) |
8960 | remove = true; |
8961 | else |
8962 | { |
8963 | t = OMP_CLAUSE_DECL (c); |
8964 | while (TREE_CODE (t) == OMP_ARRAY_SECTION) |
8965 | t = TREE_OPERAND (t, 0); |
8966 | while (INDIRECT_REF_P (t) |
8967 | || TREE_CODE (t) == ARRAY_REF) |
8968 | t = TREE_OPERAND (t, 0); |
8969 | } |
8970 | } |
8971 | if (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
8972 | { |
8973 | bitmap_set_bit (&is_on_device_head, DECL_UID (t)); |
8974 | if (!processing_template_decl |
8975 | && !cxx_mark_addressable (t)) |
8976 | remove = true; |
8977 | } |
8978 | goto check_dup_generic_t; |
8979 | |
8980 | case OMP_CLAUSE_USE_DEVICE_ADDR: |
8981 | field_ok = true; |
8982 | t = OMP_CLAUSE_DECL (c); |
8983 | if (!processing_template_decl |
8984 | && (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
8985 | && !TYPE_REF_P (TREE_TYPE (t)) |
8986 | && !cxx_mark_addressable (t)) |
8987 | remove = true; |
8988 | goto check_dup_generic; |
8989 | |
8990 | case OMP_CLAUSE_NOWAIT: |
8991 | case OMP_CLAUSE_DEFAULT: |
8992 | case OMP_CLAUSE_UNTIED: |
8993 | case OMP_CLAUSE_COLLAPSE: |
8994 | case OMP_CLAUSE_PARALLEL: |
8995 | case OMP_CLAUSE_FOR: |
8996 | case OMP_CLAUSE_SECTIONS: |
8997 | case OMP_CLAUSE_TASKGROUP: |
8998 | case OMP_CLAUSE_PROC_BIND: |
8999 | case OMP_CLAUSE_DEVICE_TYPE: |
9000 | case OMP_CLAUSE_NOGROUP: |
9001 | case OMP_CLAUSE_THREADS: |
9002 | case OMP_CLAUSE_SIMD: |
9003 | case OMP_CLAUSE_DEFAULTMAP: |
9004 | case OMP_CLAUSE_BIND: |
9005 | case OMP_CLAUSE_AUTO: |
9006 | case OMP_CLAUSE_INDEPENDENT: |
9007 | case OMP_CLAUSE_SEQ: |
9008 | case OMP_CLAUSE_IF_PRESENT: |
9009 | case OMP_CLAUSE_FINALIZE: |
9010 | case OMP_CLAUSE_NOHOST: |
9011 | case OMP_CLAUSE_INDIRECT: |
9012 | break; |
9013 | |
9014 | case OMP_CLAUSE_MERGEABLE: |
9015 | mergeable_seen = true; |
9016 | break; |
9017 | |
9018 | case OMP_CLAUSE_TILE: |
9019 | for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list; |
9020 | list = TREE_CHAIN (list)) |
9021 | { |
9022 | t = TREE_VALUE (list); |
9023 | |
9024 | if (t == error_mark_node) |
9025 | remove = true; |
9026 | else if (!type_dependent_expression_p (t) |
9027 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
9028 | { |
9029 | error_at (OMP_CLAUSE_LOCATION (c), |
9030 | "%<tile%> argument needs integral type" ); |
9031 | remove = true; |
9032 | } |
9033 | else |
9034 | { |
9035 | t = mark_rvalue_use (t); |
9036 | if (!processing_template_decl) |
9037 | { |
9038 | /* Zero is used to indicate '*', we permit you |
9039 | to get there via an ICE of value zero. */ |
9040 | t = maybe_constant_value (t); |
9041 | if (!tree_fits_shwi_p (t) |
9042 | || tree_to_shwi (t) < 0) |
9043 | { |
9044 | error_at (OMP_CLAUSE_LOCATION (c), |
9045 | "%<tile%> argument needs positive " |
9046 | "integral constant" ); |
9047 | remove = true; |
9048 | } |
9049 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
9050 | } |
9051 | } |
9052 | |
9053 | /* Update list item. */ |
9054 | TREE_VALUE (list) = t; |
9055 | } |
9056 | break; |
9057 | |
9058 | case OMP_CLAUSE_ORDERED: |
9059 | ordered_seen = true; |
9060 | break; |
9061 | |
9062 | case OMP_CLAUSE_ORDER: |
9063 | if (order_seen) |
9064 | remove = true; |
9065 | else |
9066 | order_seen = true; |
9067 | break; |
9068 | |
9069 | case OMP_CLAUSE_INBRANCH: |
9070 | case OMP_CLAUSE_NOTINBRANCH: |
9071 | if (branch_seen) |
9072 | { |
9073 | error_at (OMP_CLAUSE_LOCATION (c), |
9074 | "%<inbranch%> clause is incompatible with " |
9075 | "%<notinbranch%>" ); |
9076 | remove = true; |
9077 | } |
9078 | branch_seen = true; |
9079 | break; |
9080 | |
9081 | case OMP_CLAUSE_INCLUSIVE: |
9082 | case OMP_CLAUSE_EXCLUSIVE: |
9083 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
9084 | if (!t) |
9085 | t = OMP_CLAUSE_DECL (c); |
9086 | if (t == current_class_ptr) |
9087 | { |
9088 | error_at (OMP_CLAUSE_LOCATION (c), |
9089 | "%<this%> allowed in OpenMP only in %<declare simd%>" |
9090 | " clauses" ); |
9091 | remove = true; |
9092 | break; |
9093 | } |
9094 | if (!VAR_P (t) |
9095 | && TREE_CODE (t) != PARM_DECL |
9096 | && TREE_CODE (t) != FIELD_DECL) |
9097 | { |
9098 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
9099 | break; |
9100 | if (DECL_P (t)) |
9101 | error_at (OMP_CLAUSE_LOCATION (c), |
9102 | "%qD is not a variable in clause %qs" , t, |
9103 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9104 | else |
9105 | error_at (OMP_CLAUSE_LOCATION (c), |
9106 | "%qE is not a variable in clause %qs" , t, |
9107 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9108 | remove = true; |
9109 | } |
9110 | break; |
9111 | |
9112 | default: |
9113 | gcc_unreachable (); |
9114 | } |
9115 | |
9116 | if (remove) |
9117 | { |
9118 | if (grp_start_p) |
9119 | { |
9120 | /* If we found a clause to remove, we want to remove the whole |
9121 | expanded group, otherwise gimplify |
9122 | (omp_resolve_clause_dependencies) can get confused. */ |
9123 | *grp_start_p = grp_sentinel; |
9124 | pc = grp_start_p; |
9125 | grp_start_p = NULL; |
9126 | } |
9127 | else |
9128 | *pc = OMP_CLAUSE_CHAIN (c); |
9129 | } |
9130 | else |
9131 | pc = &OMP_CLAUSE_CHAIN (c); |
9132 | } |
9133 | |
9134 | if (reduction_seen < 0 && (ordered_seen || schedule_seen)) |
9135 | reduction_seen = -2; |
9136 | |
9137 | for (pc = &clauses, c = clauses; c ; c = *pc) |
9138 | { |
9139 | enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c); |
9140 | bool remove = false; |
9141 | bool need_complete_type = false; |
9142 | bool need_default_ctor = false; |
9143 | bool need_copy_ctor = false; |
9144 | bool need_copy_assignment = false; |
9145 | bool need_implicitly_determined = false; |
9146 | bool need_dtor = false; |
9147 | tree type, inner_type; |
9148 | |
9149 | switch (c_kind) |
9150 | { |
9151 | case OMP_CLAUSE_SHARED: |
9152 | need_implicitly_determined = true; |
9153 | break; |
9154 | case OMP_CLAUSE_PRIVATE: |
9155 | need_complete_type = true; |
9156 | need_default_ctor = true; |
9157 | need_dtor = true; |
9158 | need_implicitly_determined = true; |
9159 | break; |
9160 | case OMP_CLAUSE_FIRSTPRIVATE: |
9161 | need_complete_type = true; |
9162 | need_copy_ctor = true; |
9163 | need_dtor = true; |
9164 | need_implicitly_determined = true; |
9165 | break; |
9166 | case OMP_CLAUSE_LASTPRIVATE: |
9167 | need_complete_type = true; |
9168 | need_copy_assignment = true; |
9169 | need_implicitly_determined = true; |
9170 | break; |
9171 | case OMP_CLAUSE_REDUCTION: |
9172 | if (reduction_seen == -2) |
9173 | OMP_CLAUSE_REDUCTION_INSCAN (c) = 0; |
9174 | if (OMP_CLAUSE_REDUCTION_INSCAN (c)) |
9175 | need_copy_assignment = true; |
9176 | need_implicitly_determined = true; |
9177 | break; |
9178 | case OMP_CLAUSE_IN_REDUCTION: |
9179 | case OMP_CLAUSE_TASK_REDUCTION: |
9180 | case OMP_CLAUSE_INCLUSIVE: |
9181 | case OMP_CLAUSE_EXCLUSIVE: |
9182 | need_implicitly_determined = true; |
9183 | break; |
9184 | case OMP_CLAUSE_LINEAR: |
9185 | if (ort != C_ORT_OMP_DECLARE_SIMD) |
9186 | need_implicitly_determined = true; |
9187 | else if (OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) |
9188 | && !bitmap_bit_p (&map_head, |
9189 | DECL_UID (OMP_CLAUSE_LINEAR_STEP (c)))) |
9190 | { |
9191 | error_at (OMP_CLAUSE_LOCATION (c), |
9192 | "%<linear%> clause step is a parameter %qD not " |
9193 | "specified in %<uniform%> clause" , |
9194 | OMP_CLAUSE_LINEAR_STEP (c)); |
9195 | *pc = OMP_CLAUSE_CHAIN (c); |
9196 | continue; |
9197 | } |
9198 | break; |
9199 | case OMP_CLAUSE_COPYPRIVATE: |
9200 | need_copy_assignment = true; |
9201 | break; |
9202 | case OMP_CLAUSE_COPYIN: |
9203 | need_copy_assignment = true; |
9204 | break; |
9205 | case OMP_CLAUSE_SIMDLEN: |
9206 | if (safelen |
9207 | && !processing_template_decl |
9208 | && tree_int_cst_lt (OMP_CLAUSE_SAFELEN_EXPR (safelen), |
9209 | OMP_CLAUSE_SIMDLEN_EXPR (c))) |
9210 | { |
9211 | error_at (OMP_CLAUSE_LOCATION (c), |
9212 | "%<simdlen%> clause value is bigger than " |
9213 | "%<safelen%> clause value" ); |
9214 | OMP_CLAUSE_SIMDLEN_EXPR (c) |
9215 | = OMP_CLAUSE_SAFELEN_EXPR (safelen); |
9216 | } |
9217 | pc = &OMP_CLAUSE_CHAIN (c); |
9218 | continue; |
9219 | case OMP_CLAUSE_SCHEDULE: |
9220 | if (ordered_seen |
9221 | && (OMP_CLAUSE_SCHEDULE_KIND (c) |
9222 | & OMP_CLAUSE_SCHEDULE_NONMONOTONIC)) |
9223 | { |
9224 | error_at (OMP_CLAUSE_LOCATION (c), |
9225 | "%<nonmonotonic%> schedule modifier specified " |
9226 | "together with %<ordered%> clause" ); |
9227 | OMP_CLAUSE_SCHEDULE_KIND (c) |
9228 | = (enum omp_clause_schedule_kind) |
9229 | (OMP_CLAUSE_SCHEDULE_KIND (c) |
9230 | & ~OMP_CLAUSE_SCHEDULE_NONMONOTONIC); |
9231 | } |
9232 | if (reduction_seen == -2) |
9233 | error_at (OMP_CLAUSE_LOCATION (c), |
9234 | "%qs clause specified together with %<inscan%> " |
9235 | "%<reduction%> clause" , "schedule" ); |
9236 | pc = &OMP_CLAUSE_CHAIN (c); |
9237 | continue; |
9238 | case OMP_CLAUSE_NOGROUP: |
9239 | if (reduction_seen) |
9240 | { |
9241 | error_at (OMP_CLAUSE_LOCATION (c), |
9242 | "%<nogroup%> clause must not be used together with " |
9243 | "%<reduction%> clause" ); |
9244 | *pc = OMP_CLAUSE_CHAIN (c); |
9245 | continue; |
9246 | } |
9247 | pc = &OMP_CLAUSE_CHAIN (c); |
9248 | continue; |
9249 | case OMP_CLAUSE_ORDERED: |
9250 | if (reduction_seen == -2) |
9251 | error_at (OMP_CLAUSE_LOCATION (c), |
9252 | "%qs clause specified together with %<inscan%> " |
9253 | "%<reduction%> clause" , "ordered" ); |
9254 | pc = &OMP_CLAUSE_CHAIN (c); |
9255 | continue; |
9256 | case OMP_CLAUSE_ORDER: |
9257 | if (ordered_seen) |
9258 | { |
9259 | error_at (OMP_CLAUSE_LOCATION (c), |
9260 | "%<order%> clause must not be used together " |
9261 | "with %<ordered%>" ); |
9262 | *pc = OMP_CLAUSE_CHAIN (c); |
9263 | continue; |
9264 | } |
9265 | pc = &OMP_CLAUSE_CHAIN (c); |
9266 | continue; |
9267 | case OMP_CLAUSE_DETACH: |
9268 | if (mergeable_seen) |
9269 | { |
9270 | error_at (OMP_CLAUSE_LOCATION (c), |
9271 | "%<detach%> clause must not be used together with " |
9272 | "%<mergeable%> clause" ); |
9273 | *pc = OMP_CLAUSE_CHAIN (c); |
9274 | continue; |
9275 | } |
9276 | pc = &OMP_CLAUSE_CHAIN (c); |
9277 | continue; |
9278 | case OMP_CLAUSE_MAP: |
9279 | if (target_in_reduction_seen && !processing_template_decl) |
9280 | { |
9281 | t = OMP_CLAUSE_DECL (c); |
9282 | while (handled_component_p (t) |
9283 | || INDIRECT_REF_P (t) |
9284 | || TREE_CODE (t) == ADDR_EXPR |
9285 | || TREE_CODE (t) == MEM_REF |
9286 | || TREE_CODE (t) == NON_LVALUE_EXPR) |
9287 | t = TREE_OPERAND (t, 0); |
9288 | if (DECL_P (t) |
9289 | && bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
9290 | OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1; |
9291 | } |
9292 | pc = &OMP_CLAUSE_CHAIN (c); |
9293 | continue; |
9294 | case OMP_CLAUSE_NOWAIT: |
9295 | if (copyprivate_seen) |
9296 | { |
9297 | error_at (OMP_CLAUSE_LOCATION (c), |
9298 | "%<nowait%> clause must not be used together " |
9299 | "with %<copyprivate%>" ); |
9300 | *pc = OMP_CLAUSE_CHAIN (c); |
9301 | continue; |
9302 | } |
9303 | /* FALLTHRU */ |
9304 | default: |
9305 | pc = &OMP_CLAUSE_CHAIN (c); |
9306 | continue; |
9307 | } |
9308 | |
9309 | t = OMP_CLAUSE_DECL (c); |
9310 | switch (c_kind) |
9311 | { |
9312 | case OMP_CLAUSE_LASTPRIVATE: |
9313 | if (DECL_P (t) |
9314 | && !bitmap_bit_p (&firstprivate_head, DECL_UID (t))) |
9315 | { |
9316 | need_default_ctor = true; |
9317 | need_dtor = true; |
9318 | } |
9319 | break; |
9320 | |
9321 | case OMP_CLAUSE_REDUCTION: |
9322 | case OMP_CLAUSE_IN_REDUCTION: |
9323 | case OMP_CLAUSE_TASK_REDUCTION: |
9324 | if (allocate_seen) |
9325 | { |
9326 | if (TREE_CODE (t) == MEM_REF) |
9327 | { |
9328 | t = TREE_OPERAND (t, 0); |
9329 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
9330 | t = TREE_OPERAND (t, 0); |
9331 | if (TREE_CODE (t) == ADDR_EXPR |
9332 | || INDIRECT_REF_P (t)) |
9333 | t = TREE_OPERAND (t, 0); |
9334 | if (DECL_P (t)) |
9335 | bitmap_clear_bit (&aligned_head, DECL_UID (t)); |
9336 | } |
9337 | else if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
9338 | { |
9339 | while (TREE_CODE (t) == OMP_ARRAY_SECTION) |
9340 | t = TREE_OPERAND (t, 0); |
9341 | if (DECL_P (t)) |
9342 | bitmap_clear_bit (&aligned_head, DECL_UID (t)); |
9343 | t = OMP_CLAUSE_DECL (c); |
9344 | } |
9345 | else if (DECL_P (t)) |
9346 | bitmap_clear_bit (&aligned_head, DECL_UID (t)); |
9347 | t = OMP_CLAUSE_DECL (c); |
9348 | } |
9349 | if (processing_template_decl |
9350 | && !VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
9351 | break; |
9352 | if (finish_omp_reduction_clause (c, need_default_ctor: &need_default_ctor, |
9353 | need_dtor: &need_dtor)) |
9354 | remove = true; |
9355 | else |
9356 | t = OMP_CLAUSE_DECL (c); |
9357 | break; |
9358 | |
9359 | case OMP_CLAUSE_COPYIN: |
9360 | if (processing_template_decl |
9361 | && !VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
9362 | break; |
9363 | if (!VAR_P (t) || !CP_DECL_THREAD_LOCAL_P (t)) |
9364 | { |
9365 | error_at (OMP_CLAUSE_LOCATION (c), |
9366 | "%qE must be %<threadprivate%> for %<copyin%>" , t); |
9367 | remove = true; |
9368 | } |
9369 | break; |
9370 | |
9371 | default: |
9372 | break; |
9373 | } |
9374 | |
9375 | if (processing_template_decl |
9376 | && !VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
9377 | { |
9378 | pc = &OMP_CLAUSE_CHAIN (c); |
9379 | continue; |
9380 | } |
9381 | |
9382 | if (need_complete_type || need_copy_assignment) |
9383 | { |
9384 | t = require_complete_type (t); |
9385 | if (t == error_mark_node) |
9386 | remove = true; |
9387 | else if (!processing_template_decl |
9388 | && TYPE_REF_P (TREE_TYPE (t)) |
9389 | && !complete_type_or_else (TREE_TYPE (TREE_TYPE (t)), t)) |
9390 | remove = true; |
9391 | } |
9392 | if (need_implicitly_determined) |
9393 | { |
9394 | const char *share_name = NULL; |
9395 | |
9396 | if (allocate_seen |
9397 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED |
9398 | && DECL_P (t)) |
9399 | bitmap_clear_bit (&aligned_head, DECL_UID (t)); |
9400 | |
9401 | if (VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t)) |
9402 | share_name = "threadprivate" ; |
9403 | else switch (cxx_omp_predetermined_sharing_1 (t)) |
9404 | { |
9405 | case OMP_CLAUSE_DEFAULT_UNSPECIFIED: |
9406 | break; |
9407 | case OMP_CLAUSE_DEFAULT_SHARED: |
9408 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
9409 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) |
9410 | && c_omp_predefined_variable (t)) |
9411 | /* The __func__ variable and similar function-local predefined |
9412 | variables may be listed in a shared or firstprivate |
9413 | clause. */ |
9414 | break; |
9415 | if (VAR_P (t) |
9416 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
9417 | && TREE_STATIC (t) |
9418 | && cxx_omp_const_qual_no_mutable (t)) |
9419 | { |
9420 | tree ctx = CP_DECL_CONTEXT (t); |
9421 | /* const qualified static data members without mutable |
9422 | member may be specified in firstprivate clause. */ |
9423 | if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx)) |
9424 | break; |
9425 | } |
9426 | share_name = "shared" ; |
9427 | break; |
9428 | case OMP_CLAUSE_DEFAULT_PRIVATE: |
9429 | share_name = "private" ; |
9430 | break; |
9431 | default: |
9432 | gcc_unreachable (); |
9433 | } |
9434 | if (share_name) |
9435 | { |
9436 | error_at (OMP_CLAUSE_LOCATION (c), |
9437 | "%qE is predetermined %qs for %qs" , |
9438 | omp_clause_printable_decl (decl: t), share_name, |
9439 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9440 | remove = true; |
9441 | } |
9442 | else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED |
9443 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE |
9444 | && cxx_omp_const_qual_no_mutable (t)) |
9445 | { |
9446 | error_at (OMP_CLAUSE_LOCATION (c), |
9447 | "%<const%> qualified %qE without %<mutable%> member " |
9448 | "may appear only in %<shared%> or %<firstprivate%> " |
9449 | "clauses" , omp_clause_printable_decl (decl: t)); |
9450 | remove = true; |
9451 | } |
9452 | } |
9453 | |
9454 | if (detach_seen |
9455 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
9456 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE |
9457 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
9458 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE) |
9459 | && OMP_CLAUSE_DECL (c) == OMP_CLAUSE_DECL (detach_seen)) |
9460 | { |
9461 | error_at (OMP_CLAUSE_LOCATION (c), |
9462 | "the event handle of a %<detach%> clause " |
9463 | "should not be in a data-sharing clause" ); |
9464 | remove = true; |
9465 | } |
9466 | |
9467 | /* We're interested in the base element, not arrays. */ |
9468 | inner_type = type = TREE_TYPE (t); |
9469 | if ((need_complete_type |
9470 | || need_copy_assignment |
9471 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
9472 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
9473 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
9474 | && TYPE_REF_P (inner_type)) |
9475 | inner_type = TREE_TYPE (inner_type); |
9476 | while (TREE_CODE (inner_type) == ARRAY_TYPE) |
9477 | inner_type = TREE_TYPE (inner_type); |
9478 | |
9479 | /* Check for special function availability by building a call to one. |
9480 | Save the results, because later we won't be in the right context |
9481 | for making these queries. */ |
9482 | if (CLASS_TYPE_P (inner_type) |
9483 | && COMPLETE_TYPE_P (inner_type) |
9484 | && (need_default_ctor || need_copy_ctor |
9485 | || need_copy_assignment || need_dtor) |
9486 | && !type_dependent_expression_p (t) |
9487 | && cxx_omp_create_clause_info (c, type: inner_type, need_default_ctor, |
9488 | need_copy_ctor, need_copy_assignment, |
9489 | need_dtor)) |
9490 | remove = true; |
9491 | |
9492 | if (!remove |
9493 | && c_kind == OMP_CLAUSE_SHARED |
9494 | && processing_template_decl) |
9495 | { |
9496 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
9497 | if (t) |
9498 | OMP_CLAUSE_DECL (c) = t; |
9499 | } |
9500 | |
9501 | if (remove) |
9502 | *pc = OMP_CLAUSE_CHAIN (c); |
9503 | else |
9504 | pc = &OMP_CLAUSE_CHAIN (c); |
9505 | } |
9506 | |
9507 | if (allocate_seen) |
9508 | for (pc = &clauses, c = clauses; c ; c = *pc) |
9509 | { |
9510 | bool remove = false; |
9511 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE |
9512 | && !OMP_CLAUSE_ALLOCATE_COMBINED (c) |
9513 | && DECL_P (OMP_CLAUSE_DECL (c)) |
9514 | && bitmap_bit_p (&aligned_head, DECL_UID (OMP_CLAUSE_DECL (c)))) |
9515 | { |
9516 | error_at (OMP_CLAUSE_LOCATION (c), |
9517 | "%qD specified in %<allocate%> clause but not in " |
9518 | "an explicit privatization clause" , OMP_CLAUSE_DECL (c)); |
9519 | remove = true; |
9520 | } |
9521 | if (remove) |
9522 | *pc = OMP_CLAUSE_CHAIN (c); |
9523 | else |
9524 | pc = &OMP_CLAUSE_CHAIN (c); |
9525 | } |
9526 | |
9527 | bitmap_obstack_release (NULL); |
9528 | return clauses; |
9529 | } |
9530 | |
9531 | /* Start processing OpenMP clauses that can include any |
9532 | privatization clauses for non-static data members. */ |
9533 | |
9534 | tree |
9535 | push_omp_privatization_clauses (bool ignore_next) |
9536 | { |
9537 | if (omp_private_member_ignore_next) |
9538 | { |
9539 | omp_private_member_ignore_next = ignore_next; |
9540 | return NULL_TREE; |
9541 | } |
9542 | omp_private_member_ignore_next = ignore_next; |
9543 | if (omp_private_member_map) |
9544 | omp_private_member_vec.safe_push (error_mark_node); |
9545 | return push_stmt_list (); |
9546 | } |
9547 | |
9548 | /* Revert remapping of any non-static data members since |
9549 | the last push_omp_privatization_clauses () call. */ |
9550 | |
9551 | void |
9552 | pop_omp_privatization_clauses (tree stmt) |
9553 | { |
9554 | if (stmt == NULL_TREE) |
9555 | return; |
9556 | stmt = pop_stmt_list (stmt); |
9557 | if (omp_private_member_map) |
9558 | { |
9559 | while (!omp_private_member_vec.is_empty ()) |
9560 | { |
9561 | tree t = omp_private_member_vec.pop (); |
9562 | if (t == error_mark_node) |
9563 | { |
9564 | add_stmt (t: stmt); |
9565 | return; |
9566 | } |
9567 | bool no_decl_expr = t == integer_zero_node; |
9568 | if (no_decl_expr) |
9569 | t = omp_private_member_vec.pop (); |
9570 | tree *v = omp_private_member_map->get (k: t); |
9571 | gcc_assert (v); |
9572 | if (!no_decl_expr) |
9573 | add_decl_expr (decl: *v); |
9574 | omp_private_member_map->remove (k: t); |
9575 | } |
9576 | delete omp_private_member_map; |
9577 | omp_private_member_map = NULL; |
9578 | } |
9579 | add_stmt (t: stmt); |
9580 | } |
9581 | |
9582 | /* Remember OpenMP privatization clauses mapping and clear it. |
9583 | Used for lambdas. */ |
9584 | |
9585 | void |
9586 | save_omp_privatization_clauses (vec<tree> &save) |
9587 | { |
9588 | save = vNULL; |
9589 | if (omp_private_member_ignore_next) |
9590 | save.safe_push (integer_one_node); |
9591 | omp_private_member_ignore_next = false; |
9592 | if (!omp_private_member_map) |
9593 | return; |
9594 | |
9595 | while (!omp_private_member_vec.is_empty ()) |
9596 | { |
9597 | tree t = omp_private_member_vec.pop (); |
9598 | if (t == error_mark_node) |
9599 | { |
9600 | save.safe_push (obj: t); |
9601 | continue; |
9602 | } |
9603 | tree n = t; |
9604 | if (t == integer_zero_node) |
9605 | t = omp_private_member_vec.pop (); |
9606 | tree *v = omp_private_member_map->get (k: t); |
9607 | gcc_assert (v); |
9608 | save.safe_push (obj: *v); |
9609 | save.safe_push (obj: t); |
9610 | if (n != t) |
9611 | save.safe_push (obj: n); |
9612 | } |
9613 | delete omp_private_member_map; |
9614 | omp_private_member_map = NULL; |
9615 | } |
9616 | |
9617 | /* Restore OpenMP privatization clauses mapping saved by the |
9618 | above function. */ |
9619 | |
9620 | void |
9621 | restore_omp_privatization_clauses (vec<tree> &save) |
9622 | { |
9623 | gcc_assert (omp_private_member_vec.is_empty ()); |
9624 | omp_private_member_ignore_next = false; |
9625 | if (save.is_empty ()) |
9626 | return; |
9627 | if (save.length () == 1 && save[0] == integer_one_node) |
9628 | { |
9629 | omp_private_member_ignore_next = true; |
9630 | save.release (); |
9631 | return; |
9632 | } |
9633 | |
9634 | omp_private_member_map = new hash_map <tree, tree>; |
9635 | while (!save.is_empty ()) |
9636 | { |
9637 | tree t = save.pop (); |
9638 | tree n = t; |
9639 | if (t != error_mark_node) |
9640 | { |
9641 | if (t == integer_one_node) |
9642 | { |
9643 | omp_private_member_ignore_next = true; |
9644 | gcc_assert (save.is_empty ()); |
9645 | break; |
9646 | } |
9647 | if (t == integer_zero_node) |
9648 | t = save.pop (); |
9649 | tree &v = omp_private_member_map->get_or_insert (k: t); |
9650 | v = save.pop (); |
9651 | } |
9652 | omp_private_member_vec.safe_push (obj: t); |
9653 | if (n != t) |
9654 | omp_private_member_vec.safe_push (obj: n); |
9655 | } |
9656 | save.release (); |
9657 | } |
9658 | |
9659 | /* For all variables in the tree_list VARS, mark them as thread local. */ |
9660 | |
9661 | void |
9662 | finish_omp_threadprivate (tree vars) |
9663 | { |
9664 | tree t; |
9665 | |
9666 | /* Mark every variable in VARS to be assigned thread local storage. */ |
9667 | for (t = vars; t; t = TREE_CHAIN (t)) |
9668 | { |
9669 | tree v = TREE_PURPOSE (t); |
9670 | |
9671 | if (error_operand_p (t: v)) |
9672 | ; |
9673 | else if (!VAR_P (v)) |
9674 | error ("%<threadprivate%> %qD is not file, namespace " |
9675 | "or block scope variable" , v); |
9676 | /* If V had already been marked threadprivate, it doesn't matter |
9677 | whether it had been used prior to this point. */ |
9678 | else if (TREE_USED (v) |
9679 | && (DECL_LANG_SPECIFIC (v) == NULL |
9680 | || !CP_DECL_THREADPRIVATE_P (v))) |
9681 | error ("%qE declared %<threadprivate%> after first use" , v); |
9682 | else if (! TREE_STATIC (v) && ! DECL_EXTERNAL (v)) |
9683 | error ("automatic variable %qE cannot be %<threadprivate%>" , v); |
9684 | else if (! COMPLETE_TYPE_P (complete_type (TREE_TYPE (v)))) |
9685 | error ("%<threadprivate%> %qE has incomplete type" , v); |
9686 | else if (TREE_STATIC (v) && TYPE_P (CP_DECL_CONTEXT (v)) |
9687 | && CP_DECL_CONTEXT (v) != current_class_type) |
9688 | error ("%<threadprivate%> %qE directive not " |
9689 | "in %qT definition" , v, CP_DECL_CONTEXT (v)); |
9690 | else |
9691 | { |
9692 | /* Allocate a LANG_SPECIFIC structure for V, if needed. */ |
9693 | if (DECL_LANG_SPECIFIC (v) == NULL) |
9694 | retrofit_lang_decl (v); |
9695 | |
9696 | if (! CP_DECL_THREAD_LOCAL_P (v)) |
9697 | { |
9698 | CP_DECL_THREAD_LOCAL_P (v) = true; |
9699 | set_decl_tls_model (v, decl_default_tls_model (v)); |
9700 | /* If rtl has been already set for this var, call |
9701 | make_decl_rtl once again, so that encode_section_info |
9702 | has a chance to look at the new decl flags. */ |
9703 | if (DECL_RTL_SET_P (v)) |
9704 | make_decl_rtl (v); |
9705 | } |
9706 | CP_DECL_THREADPRIVATE_P (v) = 1; |
9707 | } |
9708 | } |
9709 | } |
9710 | |
9711 | /* Build an OpenMP structured block. */ |
9712 | |
9713 | tree |
9714 | begin_omp_structured_block (void) |
9715 | { |
9716 | return do_pushlevel (sk: sk_omp); |
9717 | } |
9718 | |
9719 | tree |
9720 | finish_omp_structured_block (tree block) |
9721 | { |
9722 | return do_poplevel (stmt_list: block); |
9723 | } |
9724 | |
9725 | /* Similarly, except force the retention of the BLOCK. */ |
9726 | |
9727 | tree |
9728 | begin_omp_parallel (void) |
9729 | { |
9730 | keep_next_level (true); |
9731 | return begin_omp_structured_block (); |
9732 | } |
9733 | |
9734 | /* Generate OACC_DATA, with CLAUSES and BLOCK as its compound |
9735 | statement. */ |
9736 | |
9737 | tree |
9738 | finish_oacc_data (tree clauses, tree block) |
9739 | { |
9740 | tree stmt; |
9741 | |
9742 | block = finish_omp_structured_block (block); |
9743 | |
9744 | stmt = make_node (OACC_DATA); |
9745 | TREE_TYPE (stmt) = void_type_node; |
9746 | OACC_DATA_CLAUSES (stmt) = clauses; |
9747 | OACC_DATA_BODY (stmt) = block; |
9748 | |
9749 | return add_stmt (t: stmt); |
9750 | } |
9751 | |
9752 | /* Generate OACC_HOST_DATA, with CLAUSES and BLOCK as its compound |
9753 | statement. */ |
9754 | |
9755 | tree |
9756 | finish_oacc_host_data (tree clauses, tree block) |
9757 | { |
9758 | tree stmt; |
9759 | |
9760 | block = finish_omp_structured_block (block); |
9761 | |
9762 | stmt = make_node (OACC_HOST_DATA); |
9763 | TREE_TYPE (stmt) = void_type_node; |
9764 | OACC_HOST_DATA_CLAUSES (stmt) = clauses; |
9765 | OACC_HOST_DATA_BODY (stmt) = block; |
9766 | |
9767 | return add_stmt (t: stmt); |
9768 | } |
9769 | |
9770 | /* Generate OMP construct CODE, with BODY and CLAUSES as its compound |
9771 | statement. */ |
9772 | |
9773 | tree |
9774 | finish_omp_construct (enum tree_code code, tree body, tree clauses) |
9775 | { |
9776 | body = finish_omp_structured_block (block: body); |
9777 | |
9778 | tree stmt = make_node (code); |
9779 | TREE_TYPE (stmt) = void_type_node; |
9780 | OMP_BODY (stmt) = body; |
9781 | OMP_CLAUSES (stmt) = clauses; |
9782 | |
9783 | return add_stmt (t: stmt); |
9784 | } |
9785 | |
9786 | /* Used to walk OpenMP target directive body. */ |
9787 | |
9788 | struct omp_target_walk_data |
9789 | { |
9790 | /* Holds the 'this' expression found in current function. */ |
9791 | tree current_object; |
9792 | |
9793 | /* True if the 'this' expression was accessed in the target body. */ |
9794 | bool this_expr_accessed; |
9795 | |
9796 | /* For non-static functions, record which pointer-typed members were |
9797 | accessed, and the whole expression. */ |
9798 | hash_map<tree, tree> ptr_members_accessed; |
9799 | |
9800 | /* Record which lambda objects were accessed in target body. */ |
9801 | hash_set<tree> lambda_objects_accessed; |
9802 | |
9803 | /* For lambda functions, the __closure object expression of the current |
9804 | function, and the set of captured variables accessed in target body. */ |
9805 | tree current_closure; |
9806 | hash_set<tree> closure_vars_accessed; |
9807 | |
9808 | /* Local variables declared inside a BIND_EXPR, used to filter out such |
9809 | variables when recording lambda_objects_accessed. */ |
9810 | hash_set<tree> local_decls; |
9811 | }; |
9812 | |
9813 | /* Helper function of finish_omp_target_clauses, called via |
9814 | cp_walk_tree_without_duplicates. Traverse body of OpenMP target |
9815 | directive *TP, and fill out omp_target_walk_data passed in *PTR. */ |
9816 | |
9817 | static tree |
9818 | finish_omp_target_clauses_r (tree *tp, int *walk_subtrees, void *ptr) |
9819 | { |
9820 | tree t = *tp; |
9821 | struct omp_target_walk_data *data = (struct omp_target_walk_data *) ptr; |
9822 | tree current_object = data->current_object; |
9823 | tree current_closure = data->current_closure; |
9824 | |
9825 | /* References inside of these expression codes shouldn't incur any |
9826 | form of mapping, so return early. */ |
9827 | if (TREE_CODE (t) == SIZEOF_EXPR |
9828 | || TREE_CODE (t) == ALIGNOF_EXPR) |
9829 | { |
9830 | *walk_subtrees = 0; |
9831 | return NULL_TREE; |
9832 | } |
9833 | |
9834 | if (TREE_CODE (t) == OMP_CLAUSE) |
9835 | return NULL_TREE; |
9836 | |
9837 | if (current_object) |
9838 | { |
9839 | tree this_expr = TREE_OPERAND (current_object, 0); |
9840 | |
9841 | if (operand_equal_p (t, this_expr)) |
9842 | { |
9843 | data->this_expr_accessed = true; |
9844 | *walk_subtrees = 0; |
9845 | return NULL_TREE; |
9846 | } |
9847 | |
9848 | if (TREE_CODE (t) == COMPONENT_REF |
9849 | && POINTER_TYPE_P (TREE_TYPE (t)) |
9850 | && operand_equal_p (TREE_OPERAND (t, 0), current_object) |
9851 | && TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL) |
9852 | { |
9853 | data->this_expr_accessed = true; |
9854 | tree fld = TREE_OPERAND (t, 1); |
9855 | if (data->ptr_members_accessed.get (k: fld) == NULL) |
9856 | { |
9857 | if (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE) |
9858 | t = convert_from_reference (t); |
9859 | data->ptr_members_accessed.put (k: fld, v: t); |
9860 | } |
9861 | *walk_subtrees = 0; |
9862 | return NULL_TREE; |
9863 | } |
9864 | } |
9865 | |
9866 | /* When the current_function_decl is a lambda function, the closure object |
9867 | argument's type seems to not yet have fields layed out, so a recording |
9868 | of DECL_VALUE_EXPRs during the target body walk seems the only way to |
9869 | find them. */ |
9870 | if (current_closure |
9871 | && (VAR_P (t) |
9872 | || TREE_CODE (t) == PARM_DECL |
9873 | || TREE_CODE (t) == RESULT_DECL) |
9874 | && DECL_HAS_VALUE_EXPR_P (t) |
9875 | && TREE_CODE (DECL_VALUE_EXPR (t)) == COMPONENT_REF |
9876 | && operand_equal_p (current_closure, |
9877 | TREE_OPERAND (DECL_VALUE_EXPR (t), 0))) |
9878 | { |
9879 | if (!data->closure_vars_accessed.contains (k: t)) |
9880 | data->closure_vars_accessed.add (k: t); |
9881 | *walk_subtrees = 0; |
9882 | return NULL_TREE; |
9883 | } |
9884 | |
9885 | if (TREE_CODE (t) == BIND_EXPR) |
9886 | { |
9887 | tree block = BIND_EXPR_BLOCK (t); |
9888 | for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var)) |
9889 | if (!data->local_decls.contains (k: var)) |
9890 | data->local_decls.add (k: var); |
9891 | return NULL_TREE; |
9892 | } |
9893 | |
9894 | if (TREE_TYPE (t) && LAMBDA_TYPE_P (TREE_TYPE (t))) |
9895 | { |
9896 | tree lt = TREE_TYPE (t); |
9897 | gcc_assert (CLASS_TYPE_P (lt)); |
9898 | |
9899 | if (!data->lambda_objects_accessed.contains (k: t) |
9900 | /* Do not prepare to create target maps for locally declared |
9901 | lambdas or anonymous ones. */ |
9902 | && !data->local_decls.contains (k: t) |
9903 | && TREE_CODE (t) != TARGET_EXPR) |
9904 | data->lambda_objects_accessed.add (k: t); |
9905 | *walk_subtrees = 0; |
9906 | return NULL_TREE; |
9907 | } |
9908 | |
9909 | return NULL_TREE; |
9910 | } |
9911 | |
9912 | /* Helper function for finish_omp_target, and also from tsubst_expr. |
9913 | Create additional clauses for mapping of non-static members, lambda objects, |
9914 | etc. */ |
9915 | |
9916 | void |
9917 | finish_omp_target_clauses (location_t loc, tree body, tree *clauses_ptr) |
9918 | { |
9919 | omp_target_walk_data data; |
9920 | data.this_expr_accessed = false; |
9921 | data.current_object = NULL_TREE; |
9922 | |
9923 | if (DECL_NONSTATIC_MEMBER_P (current_function_decl) && current_class_ptr) |
9924 | if (tree ct = current_nonlambda_class_type ()) |
9925 | { |
9926 | tree object = maybe_dummy_object (ct, NULL); |
9927 | object = maybe_resolve_dummy (object, true); |
9928 | data.current_object = object; |
9929 | } |
9930 | |
9931 | if (DECL_LAMBDA_FUNCTION_P (current_function_decl)) |
9932 | { |
9933 | tree closure = DECL_ARGUMENTS (current_function_decl); |
9934 | data.current_closure = build_indirect_ref (loc, closure, RO_UNARY_STAR); |
9935 | } |
9936 | else |
9937 | data.current_closure = NULL_TREE; |
9938 | |
9939 | cp_walk_tree_without_duplicates (&body, finish_omp_target_clauses_r, &data); |
9940 | |
9941 | auto_vec<tree, 16> new_clauses; |
9942 | |
9943 | tree omp_target_this_expr = NULL_TREE; |
9944 | tree *explicit_this_deref_map = NULL; |
9945 | if (data.this_expr_accessed) |
9946 | { |
9947 | omp_target_this_expr = TREE_OPERAND (data.current_object, 0); |
9948 | |
9949 | /* See if explicit user-specified map(this[:]) clause already exists. |
9950 | If not, we create an implicit map(tofrom:this[:1]) clause. */ |
9951 | for (tree *cp = clauses_ptr; *cp; cp = &OMP_CLAUSE_CHAIN (*cp)) |
9952 | if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP |
9953 | && (TREE_CODE (OMP_CLAUSE_DECL (*cp)) == INDIRECT_REF |
9954 | || TREE_CODE (OMP_CLAUSE_DECL (*cp)) == MEM_REF) |
9955 | && operand_equal_p (TREE_OPERAND (OMP_CLAUSE_DECL (*cp), 0), |
9956 | omp_target_this_expr)) |
9957 | { |
9958 | explicit_this_deref_map = cp; |
9959 | break; |
9960 | } |
9961 | } |
9962 | |
9963 | if (DECL_LAMBDA_FUNCTION_P (current_function_decl) |
9964 | && (data.this_expr_accessed |
9965 | || !data.closure_vars_accessed.is_empty ())) |
9966 | { |
9967 | /* For lambda functions, we need to first create a copy of the |
9968 | __closure object. */ |
9969 | tree closure = DECL_ARGUMENTS (current_function_decl); |
9970 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
9971 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO); |
9972 | OMP_CLAUSE_DECL (c) |
9973 | = build_indirect_ref (loc, closure, RO_UNARY_STAR); |
9974 | OMP_CLAUSE_SIZE (c) |
9975 | = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (closure))); |
9976 | new_clauses.safe_push (obj: c); |
9977 | |
9978 | tree closure_obj = OMP_CLAUSE_DECL (c); |
9979 | tree closure_type = TREE_TYPE (closure_obj); |
9980 | |
9981 | gcc_assert (LAMBDA_TYPE_P (closure_type) |
9982 | && CLASS_TYPE_P (closure_type)); |
9983 | |
9984 | tree c2 = build_omp_clause (loc, OMP_CLAUSE_MAP); |
9985 | OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_POINTER); |
9986 | OMP_CLAUSE_DECL (c2) = closure; |
9987 | OMP_CLAUSE_SIZE (c2) = size_zero_node; |
9988 | new_clauses.safe_push (obj: c2); |
9989 | } |
9990 | |
9991 | if (data.this_expr_accessed) |
9992 | { |
9993 | /* If the this-expr was accessed, create a map(*this) clause. */ |
9994 | enum gomp_map_kind kind = GOMP_MAP_TOFROM; |
9995 | if (explicit_this_deref_map) |
9996 | { |
9997 | tree this_map = *explicit_this_deref_map; |
9998 | tree nc = OMP_CLAUSE_CHAIN (this_map); |
9999 | gcc_assert (nc != NULL_TREE |
10000 | && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP |
10001 | && (OMP_CLAUSE_MAP_KIND (nc) |
10002 | == GOMP_MAP_FIRSTPRIVATE_POINTER)); |
10003 | kind = OMP_CLAUSE_MAP_KIND (this_map); |
10004 | /* Remove the original 'map(*this) map(firstprivate_ptr:this)' |
10005 | two-map sequence away from the chain. */ |
10006 | *explicit_this_deref_map = OMP_CLAUSE_CHAIN (nc); |
10007 | } |
10008 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10009 | OMP_CLAUSE_SET_MAP_KIND (c, kind); |
10010 | OMP_CLAUSE_DECL (c) |
10011 | = build_indirect_ref (loc, omp_target_this_expr, RO_UNARY_STAR); |
10012 | OMP_CLAUSE_SIZE (c) |
10013 | = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (omp_target_this_expr))); |
10014 | new_clauses.safe_push (obj: c); |
10015 | |
10016 | /* If we're in a lambda function, the this-pointer will actually be |
10017 | '__closure->this', a mapped member of __closure, hence always_pointer. |
10018 | Otherwise it's a firstprivate pointer. */ |
10019 | enum gomp_map_kind ptr_kind |
10020 | = (DECL_LAMBDA_FUNCTION_P (current_function_decl) |
10021 | ? GOMP_MAP_ALWAYS_POINTER |
10022 | : GOMP_MAP_FIRSTPRIVATE_POINTER); |
10023 | c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10024 | OMP_CLAUSE_SET_MAP_KIND (c, ptr_kind); |
10025 | OMP_CLAUSE_DECL (c) = omp_target_this_expr; |
10026 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
10027 | new_clauses.safe_push (obj: c); |
10028 | } |
10029 | |
10030 | if (DECL_LAMBDA_FUNCTION_P (current_function_decl)) |
10031 | { |
10032 | if (omp_target_this_expr) |
10033 | { |
10034 | STRIP_NOPS (omp_target_this_expr); |
10035 | gcc_assert (DECL_HAS_VALUE_EXPR_P (omp_target_this_expr)); |
10036 | omp_target_this_expr = DECL_VALUE_EXPR (omp_target_this_expr); |
10037 | } |
10038 | |
10039 | for (hash_set<tree>::iterator i = data.closure_vars_accessed.begin (); |
10040 | i != data.closure_vars_accessed.end (); ++i) |
10041 | { |
10042 | tree orig_decl = *i; |
10043 | tree closure_expr = DECL_VALUE_EXPR (orig_decl); |
10044 | |
10045 | if (TREE_CODE (TREE_TYPE (orig_decl)) == POINTER_TYPE |
10046 | || TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE) |
10047 | { |
10048 | /* this-pointer is processed above, outside this loop. */ |
10049 | if (omp_target_this_expr |
10050 | && operand_equal_p (closure_expr, omp_target_this_expr)) |
10051 | continue; |
10052 | |
10053 | bool ptr_p = TREE_CODE (TREE_TYPE (orig_decl)) == POINTER_TYPE; |
10054 | enum gomp_map_kind kind, ptr_kind, nc_kind; |
10055 | tree size; |
10056 | |
10057 | if (ptr_p) |
10058 | { |
10059 | /* For pointers, default mapped as zero-length array |
10060 | section. */ |
10061 | kind = GOMP_MAP_ALLOC; |
10062 | nc_kind = GOMP_MAP_FIRSTPRIVATE_POINTER; |
10063 | ptr_kind = GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION; |
10064 | size = size_zero_node; |
10065 | } |
10066 | else |
10067 | { |
10068 | /* For references, default mapped as appearing on map |
10069 | clause. */ |
10070 | kind = GOMP_MAP_TOFROM; |
10071 | nc_kind = GOMP_MAP_FIRSTPRIVATE_REFERENCE; |
10072 | ptr_kind = GOMP_MAP_ALWAYS_POINTER; |
10073 | size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (closure_expr))); |
10074 | } |
10075 | |
10076 | for (tree *p = clauses_ptr; *p; p = &OMP_CLAUSE_CHAIN (*p)) |
10077 | if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_MAP |
10078 | && (TREE_CODE (OMP_CLAUSE_DECL (*p)) == INDIRECT_REF |
10079 | || TREE_CODE (OMP_CLAUSE_DECL (*p)) == MEM_REF) |
10080 | && operand_equal_p (TREE_OPERAND (OMP_CLAUSE_DECL (*p), 0), |
10081 | orig_decl)) |
10082 | { |
10083 | /* If this was already specified by user as a map, |
10084 | save the user specified map kind, delete the |
10085 | "map(*ptr/ref), map(firstprivate ptr/ref)" sequence, |
10086 | and insert our own sequence: |
10087 | "map(*__closure->ptr/ref), map(<ptr_kind>:__closure->ref" |
10088 | */ |
10089 | tree nc = OMP_CLAUSE_CHAIN (*p); |
10090 | gcc_assert (nc != NULL_TREE |
10091 | && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP |
10092 | && OMP_CLAUSE_MAP_KIND (nc) == nc_kind); |
10093 | /* Update with user specified kind and size. */ |
10094 | kind = OMP_CLAUSE_MAP_KIND (*p); |
10095 | size = OMP_CLAUSE_SIZE (*p); |
10096 | *p = OMP_CLAUSE_CHAIN (nc); |
10097 | break; |
10098 | } |
10099 | |
10100 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10101 | OMP_CLAUSE_SET_MAP_KIND (c, kind); |
10102 | OMP_CLAUSE_DECL (c) |
10103 | = build_indirect_ref (loc, closure_expr, RO_UNARY_STAR); |
10104 | OMP_CLAUSE_SIZE (c) = size; |
10105 | if (ptr_p) |
10106 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
10107 | new_clauses.safe_push (obj: c); |
10108 | |
10109 | c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10110 | OMP_CLAUSE_SET_MAP_KIND (c, ptr_kind); |
10111 | OMP_CLAUSE_DECL (c) = closure_expr; |
10112 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
10113 | new_clauses.safe_push (obj: c); |
10114 | } |
10115 | } |
10116 | } |
10117 | |
10118 | if (!data.ptr_members_accessed.is_empty ()) |
10119 | for (hash_map<tree, tree>::iterator i = data.ptr_members_accessed.begin (); |
10120 | i != data.ptr_members_accessed.end (); ++i) |
10121 | { |
10122 | /* For each referenced member that is of pointer or reference-to-pointer |
10123 | type, create the equivalent of map(alloc:this->ptr[:0]). */ |
10124 | tree field_decl = (*i).first; |
10125 | tree ptr_member = (*i).second; |
10126 | |
10127 | for (tree c = *clauses_ptr; c; c = OMP_CLAUSE_CHAIN (c)) |
10128 | { |
10129 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
10130 | continue; |
10131 | /* If map(this->ptr[:N]) already exists, avoid creating another |
10132 | such map. */ |
10133 | tree decl = OMP_CLAUSE_DECL (c); |
10134 | if ((TREE_CODE (decl) == INDIRECT_REF |
10135 | || TREE_CODE (decl) == MEM_REF) |
10136 | && operand_equal_p (TREE_OPERAND (decl, 0), ptr_member)) |
10137 | goto next_ptr_member; |
10138 | } |
10139 | |
10140 | if (!cxx_mark_addressable (ptr_member)) |
10141 | gcc_unreachable (); |
10142 | |
10143 | if (TREE_CODE (TREE_TYPE (field_decl)) == REFERENCE_TYPE) |
10144 | { |
10145 | /* For reference to pointers, we need to map the referenced |
10146 | pointer first for things to be correct. */ |
10147 | tree ptr_member_type = TREE_TYPE (ptr_member); |
10148 | |
10149 | /* Map pointer target as zero-length array section. */ |
10150 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10151 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALLOC); |
10152 | OMP_CLAUSE_DECL (c) |
10153 | = build1 (INDIRECT_REF, TREE_TYPE (ptr_member_type), ptr_member); |
10154 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
10155 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
10156 | |
10157 | /* Map pointer to zero-length array section. */ |
10158 | tree c2 = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10159 | OMP_CLAUSE_SET_MAP_KIND |
10160 | (c2, GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION); |
10161 | OMP_CLAUSE_DECL (c2) = ptr_member; |
10162 | OMP_CLAUSE_SIZE (c2) = size_zero_node; |
10163 | |
10164 | /* Attach reference-to-pointer field to pointer. */ |
10165 | tree c3 = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10166 | OMP_CLAUSE_SET_MAP_KIND (c3, GOMP_MAP_ATTACH); |
10167 | OMP_CLAUSE_DECL (c3) = TREE_OPERAND (ptr_member, 0); |
10168 | OMP_CLAUSE_SIZE (c3) = size_zero_node; |
10169 | |
10170 | new_clauses.safe_push (obj: c); |
10171 | new_clauses.safe_push (obj: c2); |
10172 | new_clauses.safe_push (obj: c3); |
10173 | } |
10174 | else if (TREE_CODE (TREE_TYPE (field_decl)) == POINTER_TYPE) |
10175 | { |
10176 | /* Map pointer target as zero-length array section. */ |
10177 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10178 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALLOC); |
10179 | OMP_CLAUSE_DECL (c) = build_indirect_ref (loc, ptr_member, |
10180 | RO_UNARY_STAR); |
10181 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
10182 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
10183 | |
10184 | /* Attach zero-length array section to pointer. */ |
10185 | tree c2 = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10186 | OMP_CLAUSE_SET_MAP_KIND |
10187 | (c2, GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION); |
10188 | OMP_CLAUSE_DECL (c2) = ptr_member; |
10189 | OMP_CLAUSE_SIZE (c2) = size_zero_node; |
10190 | |
10191 | new_clauses.safe_push (obj: c); |
10192 | new_clauses.safe_push (obj: c2); |
10193 | } |
10194 | else |
10195 | gcc_unreachable (); |
10196 | |
10197 | next_ptr_member: |
10198 | ; |
10199 | } |
10200 | |
10201 | for (hash_set<tree>::iterator i = data.lambda_objects_accessed.begin (); |
10202 | i != data.lambda_objects_accessed.end (); ++i) |
10203 | { |
10204 | tree lobj = *i; |
10205 | if (TREE_CODE (lobj) == TARGET_EXPR) |
10206 | lobj = TREE_OPERAND (lobj, 0); |
10207 | |
10208 | tree lt = TREE_TYPE (lobj); |
10209 | gcc_assert (LAMBDA_TYPE_P (lt) && CLASS_TYPE_P (lt)); |
10210 | |
10211 | tree lc = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10212 | OMP_CLAUSE_SET_MAP_KIND (lc, GOMP_MAP_TO); |
10213 | OMP_CLAUSE_DECL (lc) = lobj; |
10214 | OMP_CLAUSE_SIZE (lc) = TYPE_SIZE_UNIT (lt); |
10215 | new_clauses.safe_push (obj: lc); |
10216 | |
10217 | for (tree fld = TYPE_FIELDS (lt); fld; fld = DECL_CHAIN (fld)) |
10218 | { |
10219 | if (TREE_CODE (TREE_TYPE (fld)) == POINTER_TYPE) |
10220 | { |
10221 | tree exp = build3 (COMPONENT_REF, TREE_TYPE (fld), |
10222 | lobj, fld, NULL_TREE); |
10223 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10224 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALLOC); |
10225 | OMP_CLAUSE_DECL (c) |
10226 | = build_indirect_ref (loc, exp, RO_UNARY_STAR); |
10227 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
10228 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
10229 | new_clauses.safe_push (obj: c); |
10230 | |
10231 | c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10232 | OMP_CLAUSE_SET_MAP_KIND |
10233 | (c, GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION); |
10234 | OMP_CLAUSE_DECL (c) = exp; |
10235 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
10236 | new_clauses.safe_push (obj: c); |
10237 | } |
10238 | else if (TREE_CODE (TREE_TYPE (fld)) == REFERENCE_TYPE) |
10239 | { |
10240 | tree exp = build3 (COMPONENT_REF, TREE_TYPE (fld), |
10241 | lobj, fld, NULL_TREE); |
10242 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10243 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM); |
10244 | OMP_CLAUSE_DECL (c) |
10245 | = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp); |
10246 | OMP_CLAUSE_SIZE (c) |
10247 | = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (exp))); |
10248 | new_clauses.safe_push (obj: c); |
10249 | |
10250 | c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10251 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER); |
10252 | OMP_CLAUSE_DECL (c) = exp; |
10253 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
10254 | new_clauses.safe_push (obj: c); |
10255 | } |
10256 | } |
10257 | } |
10258 | |
10259 | tree c = *clauses_ptr; |
10260 | for (int i = new_clauses.length () - 1; i >= 0; i--) |
10261 | { |
10262 | OMP_CLAUSE_CHAIN (new_clauses[i]) = c; |
10263 | c = new_clauses[i]; |
10264 | } |
10265 | *clauses_ptr = c; |
10266 | } |
10267 | |
10268 | /* Called from cp_parser_omp_target. Create additional implicit clauses for |
10269 | OpenMP target directives, and do sanity checks. */ |
10270 | |
10271 | tree |
10272 | finish_omp_target (location_t loc, tree clauses, tree body, bool combined_p) |
10273 | { |
10274 | if (!processing_template_decl) |
10275 | finish_omp_target_clauses (loc, body, clauses_ptr: &clauses); |
10276 | |
10277 | tree stmt = make_node (OMP_TARGET); |
10278 | TREE_TYPE (stmt) = void_type_node; |
10279 | OMP_TARGET_CLAUSES (stmt) = clauses; |
10280 | OMP_TARGET_BODY (stmt) = body; |
10281 | OMP_TARGET_COMBINED (stmt) = combined_p; |
10282 | SET_EXPR_LOCATION (stmt, loc); |
10283 | |
10284 | tree c = clauses; |
10285 | while (c) |
10286 | { |
10287 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP) |
10288 | switch (OMP_CLAUSE_MAP_KIND (c)) |
10289 | { |
10290 | case GOMP_MAP_TO: |
10291 | case GOMP_MAP_ALWAYS_TO: |
10292 | case GOMP_MAP_PRESENT_TO: |
10293 | case GOMP_MAP_ALWAYS_PRESENT_TO: |
10294 | case GOMP_MAP_FROM: |
10295 | case GOMP_MAP_ALWAYS_FROM: |
10296 | case GOMP_MAP_PRESENT_FROM: |
10297 | case GOMP_MAP_ALWAYS_PRESENT_FROM: |
10298 | case GOMP_MAP_TOFROM: |
10299 | case GOMP_MAP_ALWAYS_TOFROM: |
10300 | case GOMP_MAP_PRESENT_TOFROM: |
10301 | case GOMP_MAP_ALWAYS_PRESENT_TOFROM: |
10302 | case GOMP_MAP_ALLOC: |
10303 | case GOMP_MAP_PRESENT_ALLOC: |
10304 | case GOMP_MAP_FIRSTPRIVATE_POINTER: |
10305 | case GOMP_MAP_FIRSTPRIVATE_REFERENCE: |
10306 | case GOMP_MAP_ALWAYS_POINTER: |
10307 | case GOMP_MAP_ATTACH_DETACH: |
10308 | case GOMP_MAP_ATTACH: |
10309 | case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION: |
10310 | case GOMP_MAP_POINTER: |
10311 | case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION: |
10312 | break; |
10313 | default: |
10314 | error_at (OMP_CLAUSE_LOCATION (c), |
10315 | "%<#pragma omp target%> with map-type other " |
10316 | "than %<to%>, %<from%>, %<tofrom%> or %<alloc%> " |
10317 | "on %<map%> clause" ); |
10318 | break; |
10319 | } |
10320 | c = OMP_CLAUSE_CHAIN (c); |
10321 | } |
10322 | return add_stmt (t: stmt); |
10323 | } |
10324 | |
10325 | tree |
10326 | finish_omp_parallel (tree clauses, tree body) |
10327 | { |
10328 | tree stmt; |
10329 | |
10330 | body = finish_omp_structured_block (block: body); |
10331 | |
10332 | stmt = make_node (OMP_PARALLEL); |
10333 | TREE_TYPE (stmt) = void_type_node; |
10334 | OMP_PARALLEL_CLAUSES (stmt) = clauses; |
10335 | OMP_PARALLEL_BODY (stmt) = body; |
10336 | |
10337 | return add_stmt (t: stmt); |
10338 | } |
10339 | |
10340 | tree |
10341 | begin_omp_task (void) |
10342 | { |
10343 | keep_next_level (true); |
10344 | return begin_omp_structured_block (); |
10345 | } |
10346 | |
10347 | tree |
10348 | finish_omp_task (tree clauses, tree body) |
10349 | { |
10350 | tree stmt; |
10351 | |
10352 | body = finish_omp_structured_block (block: body); |
10353 | |
10354 | stmt = make_node (OMP_TASK); |
10355 | TREE_TYPE (stmt) = void_type_node; |
10356 | OMP_TASK_CLAUSES (stmt) = clauses; |
10357 | OMP_TASK_BODY (stmt) = body; |
10358 | |
10359 | return add_stmt (t: stmt); |
10360 | } |
10361 | |
10362 | /* Helper function for finish_omp_for. Convert Ith random access iterator |
10363 | into integral iterator. Return FALSE if successful. */ |
10364 | |
10365 | static bool |
10366 | handle_omp_for_class_iterator (int i, location_t locus, enum tree_code code, |
10367 | tree declv, tree orig_declv, tree initv, |
10368 | tree condv, tree incrv, tree *body, |
10369 | tree *pre_body, tree &clauses, |
10370 | int collapse, int ordered) |
10371 | { |
10372 | tree diff, iter_init, iter_incr = NULL, last; |
10373 | tree incr_var = NULL, orig_pre_body, orig_body, c; |
10374 | tree decl = TREE_VEC_ELT (declv, i); |
10375 | tree init = TREE_VEC_ELT (initv, i); |
10376 | tree cond = TREE_VEC_ELT (condv, i); |
10377 | tree incr = TREE_VEC_ELT (incrv, i); |
10378 | tree iter = decl; |
10379 | location_t elocus = locus; |
10380 | |
10381 | if (init && EXPR_HAS_LOCATION (init)) |
10382 | elocus = EXPR_LOCATION (init); |
10383 | |
10384 | switch (TREE_CODE (cond)) |
10385 | { |
10386 | case GT_EXPR: |
10387 | case GE_EXPR: |
10388 | case LT_EXPR: |
10389 | case LE_EXPR: |
10390 | case NE_EXPR: |
10391 | if (TREE_OPERAND (cond, 1) == iter) |
10392 | cond = build2 (swap_tree_comparison (TREE_CODE (cond)), |
10393 | TREE_TYPE (cond), iter, TREE_OPERAND (cond, 0)); |
10394 | if (TREE_OPERAND (cond, 0) != iter) |
10395 | cond = error_mark_node; |
10396 | else |
10397 | { |
10398 | tree tem = build_x_binary_op (EXPR_LOCATION (cond), |
10399 | TREE_CODE (cond), |
10400 | iter, ERROR_MARK, |
10401 | TREE_OPERAND (cond, 1), ERROR_MARK, |
10402 | NULL_TREE, NULL, tf_warning_or_error); |
10403 | if (error_operand_p (t: tem)) |
10404 | return true; |
10405 | } |
10406 | break; |
10407 | default: |
10408 | cond = error_mark_node; |
10409 | break; |
10410 | } |
10411 | if (cond == error_mark_node) |
10412 | { |
10413 | error_at (elocus, "invalid controlling predicate" ); |
10414 | return true; |
10415 | } |
10416 | diff = build_x_binary_op (elocus, MINUS_EXPR, |
10417 | TREE_OPERAND (cond, 1), ERROR_MARK, |
10418 | iter, ERROR_MARK, |
10419 | NULL_TREE, NULL, tf_warning_or_error); |
10420 | diff = cp_fully_fold (diff); |
10421 | if (error_operand_p (t: diff)) |
10422 | return true; |
10423 | if (TREE_CODE (TREE_TYPE (diff)) != INTEGER_TYPE) |
10424 | { |
10425 | error_at (elocus, "difference between %qE and %qD does not have integer type" , |
10426 | TREE_OPERAND (cond, 1), iter); |
10427 | return true; |
10428 | } |
10429 | if (!c_omp_check_loop_iv_exprs (locus, code, orig_declv, i, |
10430 | TREE_VEC_ELT (declv, i), NULL_TREE, |
10431 | cond, cp_walk_subtrees)) |
10432 | return true; |
10433 | |
10434 | switch (TREE_CODE (incr)) |
10435 | { |
10436 | case PREINCREMENT_EXPR: |
10437 | case PREDECREMENT_EXPR: |
10438 | case POSTINCREMENT_EXPR: |
10439 | case POSTDECREMENT_EXPR: |
10440 | if (TREE_OPERAND (incr, 0) != iter) |
10441 | { |
10442 | incr = error_mark_node; |
10443 | break; |
10444 | } |
10445 | iter_incr = build_x_unary_op (EXPR_LOCATION (incr), |
10446 | TREE_CODE (incr), iter, |
10447 | NULL_TREE, tf_warning_or_error); |
10448 | if (error_operand_p (t: iter_incr)) |
10449 | return true; |
10450 | else if (TREE_CODE (incr) == PREINCREMENT_EXPR |
10451 | || TREE_CODE (incr) == POSTINCREMENT_EXPR) |
10452 | incr = integer_one_node; |
10453 | else |
10454 | incr = integer_minus_one_node; |
10455 | break; |
10456 | case MODIFY_EXPR: |
10457 | if (TREE_OPERAND (incr, 0) != iter) |
10458 | incr = error_mark_node; |
10459 | else if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR |
10460 | || TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR) |
10461 | { |
10462 | tree rhs = TREE_OPERAND (incr, 1); |
10463 | if (TREE_OPERAND (rhs, 0) == iter) |
10464 | { |
10465 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 1))) |
10466 | != INTEGER_TYPE) |
10467 | incr = error_mark_node; |
10468 | else |
10469 | { |
10470 | iter_incr = build_x_modify_expr (EXPR_LOCATION (rhs), |
10471 | iter, TREE_CODE (rhs), |
10472 | TREE_OPERAND (rhs, 1), |
10473 | NULL_TREE, |
10474 | tf_warning_or_error); |
10475 | if (error_operand_p (t: iter_incr)) |
10476 | return true; |
10477 | incr = TREE_OPERAND (rhs, 1); |
10478 | incr = cp_convert (TREE_TYPE (diff), incr, |
10479 | tf_warning_or_error); |
10480 | if (TREE_CODE (rhs) == MINUS_EXPR) |
10481 | { |
10482 | incr = build1 (NEGATE_EXPR, TREE_TYPE (diff), incr); |
10483 | incr = fold_simple (incr); |
10484 | } |
10485 | if (TREE_CODE (incr) != INTEGER_CST |
10486 | && (TREE_CODE (incr) != NOP_EXPR |
10487 | || (TREE_CODE (TREE_OPERAND (incr, 0)) |
10488 | != INTEGER_CST))) |
10489 | iter_incr = NULL; |
10490 | } |
10491 | } |
10492 | else if (TREE_OPERAND (rhs, 1) == iter) |
10493 | { |
10494 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) != INTEGER_TYPE |
10495 | || TREE_CODE (rhs) != PLUS_EXPR) |
10496 | incr = error_mark_node; |
10497 | else |
10498 | { |
10499 | iter_incr = build_x_binary_op (EXPR_LOCATION (rhs), |
10500 | PLUS_EXPR, |
10501 | TREE_OPERAND (rhs, 0), |
10502 | ERROR_MARK, iter, |
10503 | ERROR_MARK, NULL_TREE, NULL, |
10504 | tf_warning_or_error); |
10505 | if (error_operand_p (t: iter_incr)) |
10506 | return true; |
10507 | iter_incr = build_x_modify_expr (EXPR_LOCATION (rhs), |
10508 | iter, NOP_EXPR, |
10509 | iter_incr, NULL_TREE, |
10510 | tf_warning_or_error); |
10511 | if (error_operand_p (t: iter_incr)) |
10512 | return true; |
10513 | incr = TREE_OPERAND (rhs, 0); |
10514 | iter_incr = NULL; |
10515 | } |
10516 | } |
10517 | else |
10518 | incr = error_mark_node; |
10519 | } |
10520 | else |
10521 | incr = error_mark_node; |
10522 | break; |
10523 | default: |
10524 | incr = error_mark_node; |
10525 | break; |
10526 | } |
10527 | |
10528 | if (incr == error_mark_node) |
10529 | { |
10530 | error_at (elocus, "invalid increment expression" ); |
10531 | return true; |
10532 | } |
10533 | |
10534 | incr = cp_convert (TREE_TYPE (diff), incr, tf_warning_or_error); |
10535 | incr = cp_fully_fold (incr); |
10536 | tree loop_iv_seen = NULL_TREE; |
10537 | for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) |
10538 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
10539 | && OMP_CLAUSE_DECL (c) == iter) |
10540 | { |
10541 | if (code == OMP_TASKLOOP || code == OMP_LOOP) |
10542 | { |
10543 | loop_iv_seen = c; |
10544 | OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) = 1; |
10545 | } |
10546 | break; |
10547 | } |
10548 | else if ((code == OMP_TASKLOOP || code == OMP_LOOP) |
10549 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE |
10550 | && OMP_CLAUSE_DECL (c) == iter) |
10551 | { |
10552 | loop_iv_seen = c; |
10553 | if (code == OMP_TASKLOOP) |
10554 | OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c) = 1; |
10555 | } |
10556 | |
10557 | decl = create_temporary_var (TREE_TYPE (diff)); |
10558 | pushdecl (decl); |
10559 | add_decl_expr (decl); |
10560 | last = create_temporary_var (TREE_TYPE (diff)); |
10561 | pushdecl (last); |
10562 | add_decl_expr (decl: last); |
10563 | if (c && iter_incr == NULL && TREE_CODE (incr) != INTEGER_CST |
10564 | && (!ordered || (i < collapse && collapse > 1))) |
10565 | { |
10566 | incr_var = create_temporary_var (TREE_TYPE (diff)); |
10567 | pushdecl (incr_var); |
10568 | add_decl_expr (decl: incr_var); |
10569 | } |
10570 | gcc_assert (stmts_are_full_exprs_p ()); |
10571 | tree diffvar = NULL_TREE; |
10572 | if (code == OMP_TASKLOOP) |
10573 | { |
10574 | if (!loop_iv_seen) |
10575 | { |
10576 | tree ivc = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE); |
10577 | OMP_CLAUSE_DECL (ivc) = iter; |
10578 | cxx_omp_finish_clause (ivc, NULL, false); |
10579 | OMP_CLAUSE_CHAIN (ivc) = clauses; |
10580 | clauses = ivc; |
10581 | } |
10582 | tree lvc = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE); |
10583 | OMP_CLAUSE_DECL (lvc) = last; |
10584 | OMP_CLAUSE_CHAIN (lvc) = clauses; |
10585 | clauses = lvc; |
10586 | diffvar = create_temporary_var (TREE_TYPE (diff)); |
10587 | pushdecl (diffvar); |
10588 | add_decl_expr (decl: diffvar); |
10589 | } |
10590 | else if (code == OMP_LOOP) |
10591 | { |
10592 | if (!loop_iv_seen) |
10593 | { |
10594 | /* While iterators on the loop construct are predetermined |
10595 | lastprivate, if the decl is not declared inside of the |
10596 | loop, OMP_CLAUSE_LASTPRIVATE should have been added |
10597 | already. */ |
10598 | loop_iv_seen = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE); |
10599 | OMP_CLAUSE_DECL (loop_iv_seen) = iter; |
10600 | OMP_CLAUSE_CHAIN (loop_iv_seen) = clauses; |
10601 | clauses = loop_iv_seen; |
10602 | } |
10603 | else if (OMP_CLAUSE_CODE (loop_iv_seen) == OMP_CLAUSE_PRIVATE) |
10604 | { |
10605 | OMP_CLAUSE_PRIVATE_DEBUG (loop_iv_seen) = 0; |
10606 | OMP_CLAUSE_PRIVATE_OUTER_REF (loop_iv_seen) = 0; |
10607 | OMP_CLAUSE_CODE (loop_iv_seen) = OMP_CLAUSE_FIRSTPRIVATE; |
10608 | } |
10609 | if (OMP_CLAUSE_CODE (loop_iv_seen) == OMP_CLAUSE_FIRSTPRIVATE) |
10610 | cxx_omp_finish_clause (loop_iv_seen, NULL, false); |
10611 | } |
10612 | |
10613 | orig_pre_body = *pre_body; |
10614 | *pre_body = push_stmt_list (); |
10615 | if (orig_pre_body) |
10616 | add_stmt (t: orig_pre_body); |
10617 | if (init != NULL) |
10618 | finish_expr_stmt (expr: build_x_modify_expr (elocus, |
10619 | iter, NOP_EXPR, init, |
10620 | NULL_TREE, tf_warning_or_error)); |
10621 | init = build_int_cst (TREE_TYPE (diff), 0); |
10622 | if (c && iter_incr == NULL |
10623 | && (!ordered || (i < collapse && collapse > 1))) |
10624 | { |
10625 | if (incr_var) |
10626 | { |
10627 | finish_expr_stmt (expr: build_x_modify_expr (elocus, |
10628 | incr_var, NOP_EXPR, |
10629 | incr, NULL_TREE, |
10630 | tf_warning_or_error)); |
10631 | incr = incr_var; |
10632 | } |
10633 | iter_incr = build_x_modify_expr (elocus, |
10634 | iter, PLUS_EXPR, incr, |
10635 | NULL_TREE, tf_warning_or_error); |
10636 | } |
10637 | if (c && ordered && i < collapse && collapse > 1) |
10638 | iter_incr = incr; |
10639 | finish_expr_stmt (expr: build_x_modify_expr (elocus, |
10640 | last, NOP_EXPR, init, |
10641 | NULL_TREE, tf_warning_or_error)); |
10642 | if (diffvar) |
10643 | { |
10644 | finish_expr_stmt (expr: build_x_modify_expr (elocus, |
10645 | diffvar, NOP_EXPR, |
10646 | diff, NULL_TREE, tf_warning_or_error)); |
10647 | diff = diffvar; |
10648 | } |
10649 | *pre_body = pop_stmt_list (*pre_body); |
10650 | |
10651 | cond = cp_build_binary_op (elocus, |
10652 | TREE_CODE (cond), decl, diff, |
10653 | tf_warning_or_error); |
10654 | incr = build_modify_expr (elocus, decl, NULL_TREE, PLUS_EXPR, |
10655 | elocus, incr, NULL_TREE); |
10656 | |
10657 | orig_body = *body; |
10658 | *body = push_stmt_list (); |
10659 | iter_init = build2 (MINUS_EXPR, TREE_TYPE (diff), decl, last); |
10660 | iter_init = build_x_modify_expr (elocus, |
10661 | iter, PLUS_EXPR, iter_init, |
10662 | NULL_TREE, tf_warning_or_error); |
10663 | if (iter_init != error_mark_node) |
10664 | iter_init = build1 (NOP_EXPR, void_type_node, iter_init); |
10665 | finish_expr_stmt (expr: iter_init); |
10666 | finish_expr_stmt (expr: build_x_modify_expr (elocus, |
10667 | last, NOP_EXPR, decl, |
10668 | NULL_TREE, tf_warning_or_error)); |
10669 | add_stmt (t: orig_body); |
10670 | *body = pop_stmt_list (*body); |
10671 | |
10672 | if (c) |
10673 | { |
10674 | OMP_CLAUSE_LASTPRIVATE_STMT (c) = push_stmt_list (); |
10675 | if (!ordered) |
10676 | finish_expr_stmt (expr: iter_incr); |
10677 | else |
10678 | { |
10679 | iter_init = decl; |
10680 | if (i < collapse && collapse > 1 && !error_operand_p (t: iter_incr)) |
10681 | iter_init = build2 (PLUS_EXPR, TREE_TYPE (diff), |
10682 | iter_init, iter_incr); |
10683 | iter_init = build2 (MINUS_EXPR, TREE_TYPE (diff), iter_init, last); |
10684 | iter_init = build_x_modify_expr (elocus, |
10685 | iter, PLUS_EXPR, iter_init, |
10686 | NULL_TREE, tf_warning_or_error); |
10687 | if (iter_init != error_mark_node) |
10688 | iter_init = build1 (NOP_EXPR, void_type_node, iter_init); |
10689 | finish_expr_stmt (expr: iter_init); |
10690 | } |
10691 | OMP_CLAUSE_LASTPRIVATE_STMT (c) |
10692 | = pop_stmt_list (OMP_CLAUSE_LASTPRIVATE_STMT (c)); |
10693 | } |
10694 | |
10695 | if (TREE_CODE (TREE_VEC_ELT (orig_declv, i)) == TREE_LIST) |
10696 | { |
10697 | tree t = TREE_VEC_ELT (orig_declv, i); |
10698 | gcc_assert (TREE_PURPOSE (t) == NULL_TREE |
10699 | && TREE_VALUE (t) == NULL_TREE |
10700 | && TREE_CODE (TREE_CHAIN (t)) == TREE_VEC); |
10701 | TREE_PURPOSE (t) = TREE_VEC_ELT (declv, i); |
10702 | TREE_VALUE (t) = last; |
10703 | } |
10704 | else |
10705 | TREE_VEC_ELT (orig_declv, i) |
10706 | = tree_cons (TREE_VEC_ELT (declv, i), last, NULL_TREE); |
10707 | TREE_VEC_ELT (declv, i) = decl; |
10708 | TREE_VEC_ELT (initv, i) = init; |
10709 | TREE_VEC_ELT (condv, i) = cond; |
10710 | TREE_VEC_ELT (incrv, i) = incr; |
10711 | |
10712 | return false; |
10713 | } |
10714 | |
10715 | /* Build and validate an OMP_FOR statement. CLAUSES, BODY, COND, INCR |
10716 | are directly for their associated operands in the statement. DECL |
10717 | and INIT are a combo; if DECL is NULL then INIT ought to be a |
10718 | MODIFY_EXPR, and the DECL should be extracted. PRE_BODY are |
10719 | optional statements that need to go before the loop into its |
10720 | sk_omp scope. */ |
10721 | |
10722 | tree |
10723 | finish_omp_for (location_t locus, enum tree_code code, tree declv, |
10724 | tree orig_declv, tree initv, tree condv, tree incrv, |
10725 | tree body, tree pre_body, vec<tree> *orig_inits, tree clauses) |
10726 | { |
10727 | tree omp_for = NULL, orig_incr = NULL; |
10728 | tree decl = NULL, init, cond, incr; |
10729 | location_t elocus; |
10730 | int i; |
10731 | int collapse = 1; |
10732 | int ordered = 0; |
10733 | auto_vec<location_t> init_locv; |
10734 | |
10735 | gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (initv)); |
10736 | gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (condv)); |
10737 | gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (incrv)); |
10738 | if (TREE_VEC_LENGTH (declv) > 1) |
10739 | { |
10740 | tree c; |
10741 | |
10742 | c = omp_find_clause (clauses, kind: OMP_CLAUSE_TILE); |
10743 | if (c) |
10744 | collapse = list_length (OMP_CLAUSE_TILE_LIST (c)); |
10745 | else |
10746 | { |
10747 | c = omp_find_clause (clauses, kind: OMP_CLAUSE_COLLAPSE); |
10748 | if (c) |
10749 | collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c)); |
10750 | if (collapse != TREE_VEC_LENGTH (declv)) |
10751 | ordered = TREE_VEC_LENGTH (declv); |
10752 | } |
10753 | } |
10754 | for (i = 0; i < TREE_VEC_LENGTH (declv); i++) |
10755 | { |
10756 | decl = TREE_VEC_ELT (declv, i); |
10757 | init = TREE_VEC_ELT (initv, i); |
10758 | cond = TREE_VEC_ELT (condv, i); |
10759 | incr = TREE_VEC_ELT (incrv, i); |
10760 | elocus = locus; |
10761 | |
10762 | /* We are going to throw out the init's original MODIFY_EXPR or |
10763 | MODOP_EXPR below. Save its location so we can use it when |
10764 | reconstructing the expression farther down. Alternatively, if the |
10765 | initializer is a binding of the iteration variable, save |
10766 | that location. Any of these locations in the initialization clause |
10767 | for the current nested loop are better than using the argument locus, |
10768 | that points to the "for" of the outermost loop in the nest. */ |
10769 | if (init && EXPR_HAS_LOCATION (init)) |
10770 | elocus = EXPR_LOCATION (init); |
10771 | else if (decl && INDIRECT_REF_P (decl) && EXPR_HAS_LOCATION (decl)) |
10772 | /* This can happen for class iterators. */ |
10773 | elocus = EXPR_LOCATION (decl); |
10774 | else if (decl && DECL_P (decl)) |
10775 | { |
10776 | if (DECL_SOURCE_LOCATION (decl) != UNKNOWN_LOCATION) |
10777 | elocus = DECL_SOURCE_LOCATION (decl); |
10778 | else if (DECL_INITIAL (decl) |
10779 | && EXPR_HAS_LOCATION (DECL_INITIAL (decl))) |
10780 | elocus = EXPR_LOCATION (DECL_INITIAL (decl)); |
10781 | } |
10782 | init_locv.safe_push (obj: elocus); |
10783 | |
10784 | if (decl == NULL) |
10785 | { |
10786 | if (init != NULL) |
10787 | switch (TREE_CODE (init)) |
10788 | { |
10789 | case MODIFY_EXPR: |
10790 | decl = TREE_OPERAND (init, 0); |
10791 | init = TREE_OPERAND (init, 1); |
10792 | break; |
10793 | case MODOP_EXPR: |
10794 | if (TREE_CODE (TREE_OPERAND (init, 1)) == NOP_EXPR) |
10795 | { |
10796 | decl = TREE_OPERAND (init, 0); |
10797 | init = TREE_OPERAND (init, 2); |
10798 | } |
10799 | break; |
10800 | default: |
10801 | break; |
10802 | } |
10803 | |
10804 | if (decl == NULL) |
10805 | { |
10806 | error_at (locus, |
10807 | "expected iteration declaration or initialization" ); |
10808 | return NULL; |
10809 | } |
10810 | } |
10811 | |
10812 | if (cond == global_namespace) |
10813 | continue; |
10814 | |
10815 | if (cond == NULL) |
10816 | { |
10817 | error_at (elocus, "missing controlling predicate" ); |
10818 | return NULL; |
10819 | } |
10820 | |
10821 | if (incr == NULL) |
10822 | { |
10823 | error_at (elocus, "missing increment expression" ); |
10824 | return NULL; |
10825 | } |
10826 | |
10827 | TREE_VEC_ELT (declv, i) = decl; |
10828 | TREE_VEC_ELT (initv, i) = init; |
10829 | } |
10830 | |
10831 | if (orig_inits) |
10832 | { |
10833 | bool fail = false; |
10834 | tree orig_init; |
10835 | FOR_EACH_VEC_ELT (*orig_inits, i, orig_init) |
10836 | if (orig_init |
10837 | && !c_omp_check_loop_iv_exprs (locus, code, |
10838 | orig_declv ? orig_declv : declv, i, |
10839 | TREE_VEC_ELT (declv, i), orig_init, |
10840 | NULL_TREE, cp_walk_subtrees)) |
10841 | fail = true; |
10842 | if (fail) |
10843 | return NULL; |
10844 | } |
10845 | |
10846 | if (dependent_omp_for_p (declv, initv, condv, incrv)) |
10847 | { |
10848 | tree stmt; |
10849 | |
10850 | stmt = make_node (code); |
10851 | |
10852 | for (i = 0; i < TREE_VEC_LENGTH (declv); i++) |
10853 | { |
10854 | /* This is really just a place-holder. We'll be decomposing this |
10855 | again and going through the cp_build_modify_expr path below when |
10856 | we instantiate the thing. */ |
10857 | TREE_VEC_ELT (initv, i) |
10858 | = build2_loc (loc: init_locv[i], code: MODIFY_EXPR, void_type_node, |
10859 | TREE_VEC_ELT (declv, i), TREE_VEC_ELT (initv, i)); |
10860 | } |
10861 | |
10862 | TREE_TYPE (stmt) = void_type_node; |
10863 | OMP_FOR_INIT (stmt) = initv; |
10864 | OMP_FOR_COND (stmt) = condv; |
10865 | OMP_FOR_INCR (stmt) = incrv; |
10866 | OMP_FOR_BODY (stmt) = body; |
10867 | OMP_FOR_PRE_BODY (stmt) = pre_body; |
10868 | OMP_FOR_CLAUSES (stmt) = clauses; |
10869 | |
10870 | SET_EXPR_LOCATION (stmt, locus); |
10871 | return add_stmt (t: stmt); |
10872 | } |
10873 | |
10874 | if (!orig_declv) |
10875 | orig_declv = copy_node (declv); |
10876 | |
10877 | if (processing_template_decl) |
10878 | orig_incr = make_tree_vec (TREE_VEC_LENGTH (incrv)); |
10879 | |
10880 | for (i = 0; i < TREE_VEC_LENGTH (declv); ) |
10881 | { |
10882 | decl = TREE_VEC_ELT (declv, i); |
10883 | init = TREE_VEC_ELT (initv, i); |
10884 | cond = TREE_VEC_ELT (condv, i); |
10885 | incr = TREE_VEC_ELT (incrv, i); |
10886 | if (orig_incr) |
10887 | TREE_VEC_ELT (orig_incr, i) = incr; |
10888 | elocus = init_locv[i]; |
10889 | |
10890 | if (!DECL_P (decl)) |
10891 | { |
10892 | error_at (elocus, "expected iteration declaration or initialization" ); |
10893 | return NULL; |
10894 | } |
10895 | |
10896 | if (incr && TREE_CODE (incr) == MODOP_EXPR) |
10897 | { |
10898 | if (orig_incr) |
10899 | TREE_VEC_ELT (orig_incr, i) = incr; |
10900 | incr = cp_build_modify_expr (elocus, TREE_OPERAND (incr, 0), |
10901 | TREE_CODE (TREE_OPERAND (incr, 1)), |
10902 | TREE_OPERAND (incr, 2), |
10903 | tf_warning_or_error); |
10904 | } |
10905 | |
10906 | if (CLASS_TYPE_P (TREE_TYPE (decl))) |
10907 | { |
10908 | if (code == OMP_SIMD) |
10909 | { |
10910 | error_at (elocus, "%<#pragma omp simd%> used with class " |
10911 | "iteration variable %qE" , decl); |
10912 | return NULL; |
10913 | } |
10914 | if (handle_omp_for_class_iterator (i, locus, code, declv, orig_declv, |
10915 | initv, condv, incrv, body: &body, |
10916 | pre_body: &pre_body, clauses, |
10917 | collapse, ordered)) |
10918 | return NULL; |
10919 | continue; |
10920 | } |
10921 | |
10922 | if (!INTEGRAL_TYPE_P (TREE_TYPE (decl)) |
10923 | && !TYPE_PTR_P (TREE_TYPE (decl))) |
10924 | { |
10925 | error_at (elocus, "invalid type for iteration variable %qE" , decl); |
10926 | return NULL; |
10927 | } |
10928 | |
10929 | if (!processing_template_decl && TREE_CODE (init) != TREE_VEC) |
10930 | init = cp_build_modify_expr (elocus, decl, NOP_EXPR, init, |
10931 | tf_warning_or_error); |
10932 | else |
10933 | init = build2_loc (loc: elocus, code: MODIFY_EXPR, void_type_node, arg0: decl, arg1: init); |
10934 | if (decl == error_mark_node || init == error_mark_node) |
10935 | return NULL; |
10936 | |
10937 | TREE_VEC_ELT (declv, i) = decl; |
10938 | TREE_VEC_ELT (initv, i) = init; |
10939 | TREE_VEC_ELT (condv, i) = cond; |
10940 | TREE_VEC_ELT (incrv, i) = incr; |
10941 | i++; |
10942 | } |
10943 | |
10944 | if (pre_body && IS_EMPTY_STMT (pre_body)) |
10945 | pre_body = NULL; |
10946 | |
10947 | omp_for = c_finish_omp_for (locus, code, declv, orig_declv, initv, condv, |
10948 | incrv, body, pre_body, |
10949 | !processing_template_decl); |
10950 | |
10951 | /* Check for iterators appearing in lb, b or incr expressions. */ |
10952 | if (omp_for && !c_omp_check_loop_iv (omp_for, orig_declv, cp_walk_subtrees)) |
10953 | omp_for = NULL_TREE; |
10954 | |
10955 | if (omp_for == NULL) |
10956 | return NULL; |
10957 | |
10958 | add_stmt (t: omp_for); |
10959 | |
10960 | for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INCR (omp_for)); i++) |
10961 | { |
10962 | init = TREE_VEC_ELT (OMP_FOR_INIT (omp_for), i); |
10963 | decl = TREE_OPERAND (init, 0); |
10964 | cond = TREE_VEC_ELT (OMP_FOR_COND (omp_for), i); |
10965 | incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i); |
10966 | |
10967 | if (!processing_template_decl) |
10968 | { |
10969 | if (TREE_CODE (TREE_OPERAND (init, 1)) == TREE_VEC) |
10970 | { |
10971 | tree t = TREE_VEC_ELT (TREE_OPERAND (init, 1), 1); |
10972 | TREE_VEC_ELT (TREE_OPERAND (init, 1), 1) |
10973 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
10974 | t = TREE_VEC_ELT (TREE_OPERAND (init, 1), 2); |
10975 | TREE_VEC_ELT (TREE_OPERAND (init, 1), 2) |
10976 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
10977 | } |
10978 | else |
10979 | { |
10980 | tree t = TREE_OPERAND (init, 1); |
10981 | TREE_OPERAND (init, 1) |
10982 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
10983 | } |
10984 | if (TREE_CODE (TREE_OPERAND (cond, 1)) == TREE_VEC) |
10985 | { |
10986 | tree t = TREE_VEC_ELT (TREE_OPERAND (cond, 1), 1); |
10987 | TREE_VEC_ELT (TREE_OPERAND (cond, 1), 1) |
10988 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
10989 | t = TREE_VEC_ELT (TREE_OPERAND (cond, 1), 2); |
10990 | TREE_VEC_ELT (TREE_OPERAND (cond, 1), 2) |
10991 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
10992 | } |
10993 | else |
10994 | { |
10995 | tree t = TREE_OPERAND (cond, 1); |
10996 | TREE_OPERAND (cond, 1) |
10997 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
10998 | } |
10999 | } |
11000 | |
11001 | if (TREE_CODE (incr) != MODIFY_EXPR) |
11002 | continue; |
11003 | |
11004 | if (TREE_SIDE_EFFECTS (TREE_OPERAND (incr, 1)) |
11005 | && BINARY_CLASS_P (TREE_OPERAND (incr, 1)) |
11006 | && !processing_template_decl) |
11007 | { |
11008 | tree t = TREE_OPERAND (TREE_OPERAND (incr, 1), 0); |
11009 | if (TREE_SIDE_EFFECTS (t) |
11010 | && t != decl |
11011 | && (TREE_CODE (t) != NOP_EXPR |
11012 | || TREE_OPERAND (t, 0) != decl)) |
11013 | TREE_OPERAND (TREE_OPERAND (incr, 1), 0) |
11014 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
11015 | |
11016 | t = TREE_OPERAND (TREE_OPERAND (incr, 1), 1); |
11017 | if (TREE_SIDE_EFFECTS (t) |
11018 | && t != decl |
11019 | && (TREE_CODE (t) != NOP_EXPR |
11020 | || TREE_OPERAND (t, 0) != decl)) |
11021 | TREE_OPERAND (TREE_OPERAND (incr, 1), 1) |
11022 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
11023 | } |
11024 | |
11025 | if (orig_incr) |
11026 | TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i) = TREE_VEC_ELT (orig_incr, i); |
11027 | } |
11028 | OMP_FOR_CLAUSES (omp_for) = clauses; |
11029 | |
11030 | /* For simd loops with non-static data member iterators, we could have added |
11031 | OMP_CLAUSE_LINEAR clauses without OMP_CLAUSE_LINEAR_STEP. As we know the |
11032 | step at this point, fill it in. */ |
11033 | if (code == OMP_SIMD && !processing_template_decl |
11034 | && TREE_VEC_LENGTH (OMP_FOR_INCR (omp_for)) == 1) |
11035 | for (tree c = omp_find_clause (clauses, kind: OMP_CLAUSE_LINEAR); c; |
11036 | c = omp_find_clause (OMP_CLAUSE_CHAIN (c), kind: OMP_CLAUSE_LINEAR)) |
11037 | if (OMP_CLAUSE_LINEAR_STEP (c) == NULL_TREE) |
11038 | { |
11039 | decl = TREE_OPERAND (TREE_VEC_ELT (OMP_FOR_INIT (omp_for), 0), 0); |
11040 | gcc_assert (decl == OMP_CLAUSE_DECL (c)); |
11041 | incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), 0); |
11042 | tree step, stept; |
11043 | switch (TREE_CODE (incr)) |
11044 | { |
11045 | case PREINCREMENT_EXPR: |
11046 | case POSTINCREMENT_EXPR: |
11047 | /* c_omp_for_incr_canonicalize_ptr() should have been |
11048 | called to massage things appropriately. */ |
11049 | gcc_assert (!INDIRECT_TYPE_P (TREE_TYPE (decl))); |
11050 | OMP_CLAUSE_LINEAR_STEP (c) = build_int_cst (TREE_TYPE (decl), 1); |
11051 | break; |
11052 | case PREDECREMENT_EXPR: |
11053 | case POSTDECREMENT_EXPR: |
11054 | /* c_omp_for_incr_canonicalize_ptr() should have been |
11055 | called to massage things appropriately. */ |
11056 | gcc_assert (!INDIRECT_TYPE_P (TREE_TYPE (decl))); |
11057 | OMP_CLAUSE_LINEAR_STEP (c) |
11058 | = build_int_cst (TREE_TYPE (decl), -1); |
11059 | break; |
11060 | case MODIFY_EXPR: |
11061 | gcc_assert (TREE_OPERAND (incr, 0) == decl); |
11062 | incr = TREE_OPERAND (incr, 1); |
11063 | switch (TREE_CODE (incr)) |
11064 | { |
11065 | case PLUS_EXPR: |
11066 | if (TREE_OPERAND (incr, 1) == decl) |
11067 | step = TREE_OPERAND (incr, 0); |
11068 | else |
11069 | step = TREE_OPERAND (incr, 1); |
11070 | break; |
11071 | case MINUS_EXPR: |
11072 | case POINTER_PLUS_EXPR: |
11073 | gcc_assert (TREE_OPERAND (incr, 0) == decl); |
11074 | step = TREE_OPERAND (incr, 1); |
11075 | break; |
11076 | default: |
11077 | gcc_unreachable (); |
11078 | } |
11079 | stept = TREE_TYPE (decl); |
11080 | if (INDIRECT_TYPE_P (stept)) |
11081 | stept = sizetype; |
11082 | step = fold_convert (stept, step); |
11083 | if (TREE_CODE (incr) == MINUS_EXPR) |
11084 | step = fold_build1 (NEGATE_EXPR, stept, step); |
11085 | OMP_CLAUSE_LINEAR_STEP (c) = step; |
11086 | break; |
11087 | default: |
11088 | gcc_unreachable (); |
11089 | } |
11090 | } |
11091 | /* Override saved methods on OMP_LOOP's OMP_CLAUSE_LASTPRIVATE_LOOP_IV |
11092 | clauses, we need copy ctor for those rather than default ctor, |
11093 | plus as for other lastprivates assignment op and dtor. */ |
11094 | if (code == OMP_LOOP && !processing_template_decl) |
11095 | for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
11096 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
11097 | && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) |
11098 | && cxx_omp_create_clause_info (c, TREE_TYPE (OMP_CLAUSE_DECL (c)), |
11099 | need_default_ctor: false, need_copy_ctor: true, need_copy_assignment: true, need_dtor: true)) |
11100 | CP_OMP_CLAUSE_INFO (c) = NULL_TREE; |
11101 | |
11102 | return omp_for; |
11103 | } |
11104 | |
11105 | /* Code walker for finish_omp_for_block: extract binding of DP->var |
11106 | from its current block and move it to a new BIND_EXPR DP->b |
11107 | surrounding the body of DP->omp_for. */ |
11108 | |
11109 | struct fofb_data { |
11110 | tree var; |
11111 | tree b; |
11112 | tree omp_for; |
11113 | }; |
11114 | |
11115 | static tree |
11116 | finish_omp_for_block_walker (tree *tp, int *walk_subtrees, void *dp) |
11117 | { |
11118 | struct fofb_data *fofb = (struct fofb_data *)dp; |
11119 | if (TREE_CODE (*tp) == BIND_EXPR) |
11120 | for (tree *p = &BIND_EXPR_VARS (*tp); *p; p = &DECL_CHAIN (*p)) |
11121 | { |
11122 | if (*p == fofb->var) |
11123 | { |
11124 | *p = DECL_CHAIN (*p); |
11125 | if (fofb->b == NULL_TREE) |
11126 | { |
11127 | fofb->b = make_node (BLOCK); |
11128 | fofb->b = build3 (BIND_EXPR, void_type_node, NULL_TREE, |
11129 | OMP_FOR_BODY (fofb->omp_for), fofb->b); |
11130 | TREE_SIDE_EFFECTS (fofb->b) = 1; |
11131 | OMP_FOR_BODY (fofb->omp_for) = fofb->b; |
11132 | } |
11133 | DECL_CHAIN (fofb->var) = BIND_EXPR_VARS (fofb->b); |
11134 | BIND_EXPR_VARS (fofb->b) = fofb->var; |
11135 | BLOCK_VARS (BIND_EXPR_BLOCK (fofb->b)) = fofb->var; |
11136 | BLOCK_VARS (BIND_EXPR_BLOCK (*tp)) = BIND_EXPR_VARS (*tp); |
11137 | return *tp; |
11138 | } |
11139 | } |
11140 | if (TREE_CODE (*tp) != BIND_EXPR && TREE_CODE (*tp) != STATEMENT_LIST) |
11141 | *walk_subtrees = false; |
11142 | return NULL_TREE; |
11143 | } |
11144 | |
11145 | /* Fix up range for decls. Those decls were pushed into BIND's |
11146 | BIND_EXPR_VARS, or that of a nested BIND_EXPR inside its body, |
11147 | and need to be moved into a new BIND_EXPR surrounding OMP_FOR's body |
11148 | so that processing of combined loop directives can find them. */ |
11149 | tree |
11150 | finish_omp_for_block (tree bind, tree omp_for) |
11151 | { |
11152 | if (omp_for == NULL_TREE |
11153 | || !OMP_FOR_ORIG_DECLS (omp_for) |
11154 | || bind == NULL_TREE) |
11155 | return bind; |
11156 | struct fofb_data fofb; |
11157 | fofb.b = NULL_TREE; |
11158 | fofb.omp_for = omp_for; |
11159 | for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (omp_for)); i++) |
11160 | if (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i)) == TREE_LIST |
11161 | && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i))) |
11162 | { |
11163 | tree v = TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i)); |
11164 | for (int j = 2; j < TREE_VEC_LENGTH (v); j++) |
11165 | { |
11166 | fofb.var = TREE_VEC_ELT (v, j); |
11167 | cp_walk_tree (&bind, finish_omp_for_block_walker, |
11168 | (void *)&fofb, NULL); |
11169 | } |
11170 | } |
11171 | return bind; |
11172 | } |
11173 | |
11174 | void |
11175 | finish_omp_atomic (location_t loc, enum tree_code code, enum tree_code opcode, |
11176 | tree lhs, tree rhs, tree v, tree lhs1, tree rhs1, tree r, |
11177 | tree clauses, enum omp_memory_order mo, bool weak) |
11178 | { |
11179 | tree orig_lhs; |
11180 | tree orig_rhs; |
11181 | tree orig_v; |
11182 | tree orig_lhs1; |
11183 | tree orig_rhs1; |
11184 | tree orig_r; |
11185 | bool dependent_p; |
11186 | tree stmt; |
11187 | |
11188 | orig_lhs = lhs; |
11189 | orig_rhs = rhs; |
11190 | orig_v = v; |
11191 | orig_lhs1 = lhs1; |
11192 | orig_rhs1 = rhs1; |
11193 | orig_r = r; |
11194 | dependent_p = false; |
11195 | stmt = NULL_TREE; |
11196 | |
11197 | /* Even in a template, we can detect invalid uses of the atomic |
11198 | pragma if neither LHS nor RHS is type-dependent. */ |
11199 | if (processing_template_decl) |
11200 | { |
11201 | dependent_p = (type_dependent_expression_p (lhs) |
11202 | || (rhs && type_dependent_expression_p (rhs)) |
11203 | || (v && type_dependent_expression_p (v)) |
11204 | || (lhs1 && type_dependent_expression_p (lhs1)) |
11205 | || (rhs1 && type_dependent_expression_p (rhs1)) |
11206 | || (r |
11207 | && r != void_list_node |
11208 | && type_dependent_expression_p (r))); |
11209 | if (clauses) |
11210 | { |
11211 | gcc_assert (TREE_CODE (clauses) == OMP_CLAUSE |
11212 | && OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_HINT |
11213 | && OMP_CLAUSE_CHAIN (clauses) == NULL_TREE); |
11214 | if (type_dependent_expression_p (OMP_CLAUSE_HINT_EXPR (clauses)) |
11215 | || TREE_CODE (OMP_CLAUSE_HINT_EXPR (clauses)) != INTEGER_CST) |
11216 | dependent_p = true; |
11217 | } |
11218 | } |
11219 | if (!dependent_p) |
11220 | { |
11221 | bool swapped = false; |
11222 | if (rhs1 && opcode != COND_EXPR && cp_tree_equal (lhs, rhs)) |
11223 | { |
11224 | std::swap (a&: rhs, b&: rhs1); |
11225 | swapped = !commutative_tree_code (opcode); |
11226 | } |
11227 | if (rhs1 && opcode != COND_EXPR && !cp_tree_equal (lhs, rhs1)) |
11228 | { |
11229 | if (code == OMP_ATOMIC) |
11230 | error ("%<#pragma omp atomic update%> uses two different " |
11231 | "expressions for memory" ); |
11232 | else |
11233 | error ("%<#pragma omp atomic capture%> uses two different " |
11234 | "expressions for memory" ); |
11235 | return; |
11236 | } |
11237 | if (lhs1 && !cp_tree_equal (lhs, lhs1)) |
11238 | { |
11239 | if (code == OMP_ATOMIC) |
11240 | error ("%<#pragma omp atomic update%> uses two different " |
11241 | "expressions for memory" ); |
11242 | else |
11243 | error ("%<#pragma omp atomic capture%> uses two different " |
11244 | "expressions for memory" ); |
11245 | return; |
11246 | } |
11247 | stmt = c_finish_omp_atomic (loc, code, opcode, lhs, rhs, |
11248 | v, lhs1, rhs1, r, swapped, mo, weak, |
11249 | processing_template_decl != 0); |
11250 | if (stmt == error_mark_node) |
11251 | return; |
11252 | } |
11253 | if (processing_template_decl) |
11254 | { |
11255 | if (code == OMP_ATOMIC_READ) |
11256 | { |
11257 | stmt = build_min_nt_loc (loc, OMP_ATOMIC_READ, orig_lhs); |
11258 | OMP_ATOMIC_MEMORY_ORDER (stmt) = mo; |
11259 | stmt = build2 (MODIFY_EXPR, void_type_node, orig_v, stmt); |
11260 | } |
11261 | else |
11262 | { |
11263 | if (opcode == NOP_EXPR) |
11264 | stmt = build2 (MODIFY_EXPR, void_type_node, orig_lhs, orig_rhs); |
11265 | else if (opcode == COND_EXPR) |
11266 | { |
11267 | stmt = build2 (EQ_EXPR, boolean_type_node, orig_lhs, orig_rhs); |
11268 | if (orig_r) |
11269 | stmt = build2 (MODIFY_EXPR, boolean_type_node, orig_r, |
11270 | stmt); |
11271 | stmt = build3 (COND_EXPR, void_type_node, stmt, orig_rhs1, |
11272 | orig_lhs); |
11273 | orig_rhs1 = NULL_TREE; |
11274 | } |
11275 | else |
11276 | stmt = build2 (opcode, void_type_node, orig_lhs, orig_rhs); |
11277 | if (orig_rhs1) |
11278 | stmt = build_min_nt_loc (EXPR_LOCATION (orig_rhs1), |
11279 | COMPOUND_EXPR, orig_rhs1, stmt); |
11280 | if (code != OMP_ATOMIC) |
11281 | { |
11282 | stmt = build_min_nt_loc (loc, code, orig_lhs1, stmt); |
11283 | OMP_ATOMIC_MEMORY_ORDER (stmt) = mo; |
11284 | OMP_ATOMIC_WEAK (stmt) = weak; |
11285 | stmt = build2 (MODIFY_EXPR, void_type_node, orig_v, stmt); |
11286 | } |
11287 | } |
11288 | stmt = build2 (OMP_ATOMIC, void_type_node, |
11289 | clauses ? clauses : integer_zero_node, stmt); |
11290 | OMP_ATOMIC_MEMORY_ORDER (stmt) = mo; |
11291 | OMP_ATOMIC_WEAK (stmt) = weak; |
11292 | SET_EXPR_LOCATION (stmt, loc); |
11293 | } |
11294 | |
11295 | /* Avoid -Wunused-value warnings here, the whole construct has side-effects |
11296 | and even if it might be wrapped from fold-const.cc or c-omp.cc wrapped |
11297 | in some tree that appears to be unused, the value is not unused. */ |
11298 | warning_sentinel w (warn_unused_value); |
11299 | finish_expr_stmt (expr: stmt); |
11300 | } |
11301 | |
11302 | void |
11303 | finish_omp_barrier (void) |
11304 | { |
11305 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_BARRIER); |
11306 | releasing_vec vec; |
11307 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
11308 | finish_expr_stmt (expr: stmt); |
11309 | } |
11310 | |
11311 | void |
11312 | finish_omp_depobj (location_t loc, tree depobj, |
11313 | enum omp_clause_depend_kind kind, tree clause) |
11314 | { |
11315 | if (!error_operand_p (t: depobj) && !type_dependent_expression_p (depobj)) |
11316 | { |
11317 | if (!lvalue_p (depobj)) |
11318 | { |
11319 | error_at (EXPR_LOC_OR_LOC (depobj, loc), |
11320 | "%<depobj%> expression is not lvalue expression" ); |
11321 | depobj = error_mark_node; |
11322 | } |
11323 | } |
11324 | |
11325 | if (processing_template_decl) |
11326 | { |
11327 | if (clause == NULL_TREE) |
11328 | clause = build_int_cst (integer_type_node, kind); |
11329 | add_stmt (t: build_min_nt_loc (loc, OMP_DEPOBJ, depobj, clause)); |
11330 | return; |
11331 | } |
11332 | |
11333 | if (!error_operand_p (t: depobj)) |
11334 | { |
11335 | tree addr = cp_build_addr_expr (depobj, tf_warning_or_error); |
11336 | if (addr == error_mark_node) |
11337 | depobj = error_mark_node; |
11338 | else |
11339 | depobj = cp_build_indirect_ref (loc, addr, RO_UNARY_STAR, |
11340 | tf_warning_or_error); |
11341 | } |
11342 | |
11343 | c_finish_omp_depobj (loc, depobj, kind, clause); |
11344 | } |
11345 | |
11346 | void |
11347 | finish_omp_flush (int mo) |
11348 | { |
11349 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_SYNC_SYNCHRONIZE); |
11350 | releasing_vec vec; |
11351 | if (mo != MEMMODEL_LAST && mo != MEMMODEL_SEQ_CST) |
11352 | { |
11353 | fn = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_THREAD_FENCE); |
11354 | vec->quick_push (obj: build_int_cst (integer_type_node, mo)); |
11355 | } |
11356 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
11357 | finish_expr_stmt (expr: stmt); |
11358 | } |
11359 | |
11360 | void |
11361 | finish_omp_taskwait (void) |
11362 | { |
11363 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKWAIT); |
11364 | releasing_vec vec; |
11365 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
11366 | finish_expr_stmt (expr: stmt); |
11367 | } |
11368 | |
11369 | void |
11370 | finish_omp_taskyield (void) |
11371 | { |
11372 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKYIELD); |
11373 | releasing_vec vec; |
11374 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
11375 | finish_expr_stmt (expr: stmt); |
11376 | } |
11377 | |
11378 | void |
11379 | finish_omp_cancel (tree clauses) |
11380 | { |
11381 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CANCEL); |
11382 | int mask = 0; |
11383 | if (omp_find_clause (clauses, kind: OMP_CLAUSE_PARALLEL)) |
11384 | mask = 1; |
11385 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_FOR)) |
11386 | mask = 2; |
11387 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_SECTIONS)) |
11388 | mask = 4; |
11389 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_TASKGROUP)) |
11390 | mask = 8; |
11391 | else |
11392 | { |
11393 | error ("%<#pragma omp cancel%> must specify one of " |
11394 | "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses" ); |
11395 | return; |
11396 | } |
11397 | releasing_vec vec; |
11398 | tree ifc = omp_find_clause (clauses, kind: OMP_CLAUSE_IF); |
11399 | if (ifc != NULL_TREE) |
11400 | { |
11401 | if (OMP_CLAUSE_IF_MODIFIER (ifc) != ERROR_MARK |
11402 | && OMP_CLAUSE_IF_MODIFIER (ifc) != VOID_CST) |
11403 | error_at (OMP_CLAUSE_LOCATION (ifc), |
11404 | "expected %<cancel%> %<if%> clause modifier" ); |
11405 | else |
11406 | { |
11407 | tree ifc2 = omp_find_clause (OMP_CLAUSE_CHAIN (ifc), kind: OMP_CLAUSE_IF); |
11408 | if (ifc2 != NULL_TREE) |
11409 | { |
11410 | gcc_assert (OMP_CLAUSE_IF_MODIFIER (ifc) == VOID_CST |
11411 | && OMP_CLAUSE_IF_MODIFIER (ifc2) != ERROR_MARK |
11412 | && OMP_CLAUSE_IF_MODIFIER (ifc2) != VOID_CST); |
11413 | error_at (OMP_CLAUSE_LOCATION (ifc2), |
11414 | "expected %<cancel%> %<if%> clause modifier" ); |
11415 | } |
11416 | } |
11417 | |
11418 | if (!processing_template_decl) |
11419 | ifc = maybe_convert_cond (OMP_CLAUSE_IF_EXPR (ifc)); |
11420 | else |
11421 | ifc = build_x_binary_op (OMP_CLAUSE_LOCATION (ifc), NE_EXPR, |
11422 | OMP_CLAUSE_IF_EXPR (ifc), ERROR_MARK, |
11423 | integer_zero_node, ERROR_MARK, |
11424 | NULL_TREE, NULL, tf_warning_or_error); |
11425 | } |
11426 | else |
11427 | ifc = boolean_true_node; |
11428 | vec->quick_push (obj: build_int_cst (integer_type_node, mask)); |
11429 | vec->quick_push (obj: ifc); |
11430 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
11431 | finish_expr_stmt (expr: stmt); |
11432 | } |
11433 | |
11434 | void |
11435 | finish_omp_cancellation_point (tree clauses) |
11436 | { |
11437 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CANCELLATION_POINT); |
11438 | int mask = 0; |
11439 | if (omp_find_clause (clauses, kind: OMP_CLAUSE_PARALLEL)) |
11440 | mask = 1; |
11441 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_FOR)) |
11442 | mask = 2; |
11443 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_SECTIONS)) |
11444 | mask = 4; |
11445 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_TASKGROUP)) |
11446 | mask = 8; |
11447 | else |
11448 | { |
11449 | error ("%<#pragma omp cancellation point%> must specify one of " |
11450 | "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses" ); |
11451 | return; |
11452 | } |
11453 | releasing_vec vec |
11454 | = make_tree_vector_single (build_int_cst (integer_type_node, mask)); |
11455 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
11456 | finish_expr_stmt (expr: stmt); |
11457 | } |
11458 | |
11459 | /* Begin a __transaction_atomic or __transaction_relaxed statement. |
11460 | If PCOMPOUND is non-null, this is for a function-transaction-block, and we |
11461 | should create an extra compound stmt. */ |
11462 | |
11463 | tree |
11464 | begin_transaction_stmt (location_t loc, tree *pcompound, int flags) |
11465 | { |
11466 | tree r; |
11467 | |
11468 | if (pcompound) |
11469 | *pcompound = begin_compound_stmt (flags: 0); |
11470 | |
11471 | r = build_stmt (loc, TRANSACTION_EXPR, NULL_TREE); |
11472 | |
11473 | /* Only add the statement to the function if support enabled. */ |
11474 | if (flag_tm) |
11475 | add_stmt (t: r); |
11476 | else |
11477 | error_at (loc, ((flags & TM_STMT_ATTR_RELAXED) != 0 |
11478 | ? G_("%<__transaction_relaxed%> without " |
11479 | "transactional memory support enabled" ) |
11480 | : G_("%<__transaction_atomic%> without " |
11481 | "transactional memory support enabled" ))); |
11482 | |
11483 | TRANSACTION_EXPR_BODY (r) = push_stmt_list (); |
11484 | TREE_SIDE_EFFECTS (r) = 1; |
11485 | return r; |
11486 | } |
11487 | |
11488 | /* End a __transaction_atomic or __transaction_relaxed statement. |
11489 | If COMPOUND_STMT is non-null, this is for a function-transaction-block, |
11490 | and we should end the compound. If NOEX is non-NULL, we wrap the body in |
11491 | a MUST_NOT_THROW_EXPR with NOEX as condition. */ |
11492 | |
11493 | void |
11494 | finish_transaction_stmt (tree stmt, tree compound_stmt, int flags, tree noex) |
11495 | { |
11496 | TRANSACTION_EXPR_BODY (stmt) = pop_stmt_list (TRANSACTION_EXPR_BODY (stmt)); |
11497 | TRANSACTION_EXPR_OUTER (stmt) = (flags & TM_STMT_ATTR_OUTER) != 0; |
11498 | TRANSACTION_EXPR_RELAXED (stmt) = (flags & TM_STMT_ATTR_RELAXED) != 0; |
11499 | TRANSACTION_EXPR_IS_STMT (stmt) = 1; |
11500 | |
11501 | /* noexcept specifications are not allowed for function transactions. */ |
11502 | gcc_assert (!(noex && compound_stmt)); |
11503 | if (noex) |
11504 | { |
11505 | tree body = build_must_not_throw_expr (TRANSACTION_EXPR_BODY (stmt), |
11506 | noex); |
11507 | protected_set_expr_location |
11508 | (body, EXPR_LOCATION (TRANSACTION_EXPR_BODY (stmt))); |
11509 | TREE_SIDE_EFFECTS (body) = 1; |
11510 | TRANSACTION_EXPR_BODY (stmt) = body; |
11511 | } |
11512 | |
11513 | if (compound_stmt) |
11514 | finish_compound_stmt (stmt: compound_stmt); |
11515 | } |
11516 | |
11517 | /* Build a __transaction_atomic or __transaction_relaxed expression. If |
11518 | NOEX is non-NULL, we wrap the body in a MUST_NOT_THROW_EXPR with NOEX as |
11519 | condition. */ |
11520 | |
11521 | tree |
11522 | build_transaction_expr (location_t loc, tree expr, int flags, tree noex) |
11523 | { |
11524 | tree ret; |
11525 | if (noex) |
11526 | { |
11527 | expr = build_must_not_throw_expr (expr, noex); |
11528 | protected_set_expr_location (expr, loc); |
11529 | TREE_SIDE_EFFECTS (expr) = 1; |
11530 | } |
11531 | ret = build1 (TRANSACTION_EXPR, TREE_TYPE (expr), expr); |
11532 | if (flags & TM_STMT_ATTR_RELAXED) |
11533 | TRANSACTION_EXPR_RELAXED (ret) = 1; |
11534 | TREE_SIDE_EFFECTS (ret) = 1; |
11535 | SET_EXPR_LOCATION (ret, loc); |
11536 | return ret; |
11537 | } |
11538 | |
11539 | void |
11540 | init_cp_semantics (void) |
11541 | { |
11542 | } |
11543 | |
11544 | |
11545 | /* Build a STATIC_ASSERT for a static assertion with the condition |
11546 | CONDITION and the message text MESSAGE. LOCATION is the location |
11547 | of the static assertion in the source code. When MEMBER_P, this |
11548 | static assertion is a member of a class. If SHOW_EXPR_P is true, |
11549 | print the condition (because it was instantiation-dependent). */ |
11550 | |
11551 | void |
11552 | finish_static_assert (tree condition, tree message, location_t location, |
11553 | bool member_p, bool show_expr_p) |
11554 | { |
11555 | tsubst_flags_t complain = tf_warning_or_error; |
11556 | tree message_sz = NULL_TREE, message_data = NULL_TREE; |
11557 | |
11558 | if (message == NULL_TREE |
11559 | || message == error_mark_node |
11560 | || condition == NULL_TREE |
11561 | || condition == error_mark_node) |
11562 | return; |
11563 | |
11564 | if (check_for_bare_parameter_packs (condition) |
11565 | || check_for_bare_parameter_packs (message)) |
11566 | return; |
11567 | |
11568 | if (TREE_CODE (message) != STRING_CST |
11569 | && !type_dependent_expression_p (message)) |
11570 | { |
11571 | message_sz |
11572 | = finish_class_member_access_expr (message, |
11573 | get_identifier ("size" ), |
11574 | false, complain); |
11575 | if (message_sz != error_mark_node) |
11576 | message_data |
11577 | = finish_class_member_access_expr (message, |
11578 | get_identifier ("data" ), |
11579 | false, complain); |
11580 | if (message_sz == error_mark_node || message_data == error_mark_node) |
11581 | { |
11582 | error_at (location, "%<static_assert%> message must be a string " |
11583 | "literal or object with %<size%> and " |
11584 | "%<data%> members" ); |
11585 | return; |
11586 | } |
11587 | releasing_vec size_args, data_args; |
11588 | message_sz = finish_call_expr (fn: message_sz, args: &size_args, disallow_virtual: false, koenig_p: false, |
11589 | complain); |
11590 | message_data = finish_call_expr (fn: message_data, args: &data_args, disallow_virtual: false, koenig_p: false, |
11591 | complain); |
11592 | if (message_sz == error_mark_node || message_data == error_mark_node) |
11593 | return; |
11594 | message_sz = build_converted_constant_expr (size_type_node, message_sz, |
11595 | complain); |
11596 | if (message_sz == error_mark_node) |
11597 | { |
11598 | error_at (location, "%<static_assert%> message %<size()%> " |
11599 | "must be implicitly convertible to " |
11600 | "%<std::size_t%>" ); |
11601 | return; |
11602 | } |
11603 | message_data = build_converted_constant_expr (const_string_type_node, |
11604 | message_data, complain); |
11605 | if (message_data == error_mark_node) |
11606 | { |
11607 | error_at (location, "%<static_assert%> message %<data()%> " |
11608 | "must be implicitly convertible to " |
11609 | "%<const char*%>" ); |
11610 | return; |
11611 | } |
11612 | } |
11613 | |
11614 | /* Save the condition in case it was a concept check. */ |
11615 | tree orig_condition = condition; |
11616 | |
11617 | if (instantiation_dependent_expression_p (condition) |
11618 | || instantiation_dependent_expression_p (message)) |
11619 | { |
11620 | /* We're in a template; build a STATIC_ASSERT and put it in |
11621 | the right place. */ |
11622 | defer: |
11623 | tree assertion = make_node (STATIC_ASSERT); |
11624 | STATIC_ASSERT_CONDITION (assertion) = orig_condition; |
11625 | STATIC_ASSERT_MESSAGE (assertion) = message; |
11626 | STATIC_ASSERT_SOURCE_LOCATION (assertion) = location; |
11627 | |
11628 | if (member_p) |
11629 | maybe_add_class_template_decl_list (current_class_type, |
11630 | assertion, |
11631 | /*friend_p=*/0); |
11632 | else |
11633 | add_stmt (t: assertion); |
11634 | |
11635 | return; |
11636 | } |
11637 | |
11638 | /* Fold the expression and convert it to a boolean value. */ |
11639 | condition = contextual_conv_bool (condition, complain); |
11640 | condition = fold_non_dependent_expr (condition, complain, |
11641 | /*manifestly_const_eval=*/true); |
11642 | |
11643 | if (TREE_CODE (condition) == INTEGER_CST && !integer_zerop (condition)) |
11644 | /* Do nothing; the condition is satisfied. */ |
11645 | ; |
11646 | else |
11647 | { |
11648 | iloc_sentinel ils (location); |
11649 | |
11650 | if (integer_zerop (condition)) |
11651 | { |
11652 | /* CWG2518: static_assert failure in a template is not IFNDR. */ |
11653 | if (processing_template_decl) |
11654 | goto defer; |
11655 | |
11656 | int len; |
11657 | const char *msg = NULL; |
11658 | char *buf = NULL; |
11659 | if (message_sz && message_data) |
11660 | { |
11661 | tree msz = cxx_constant_value (message_sz, NULL_TREE, complain); |
11662 | if (!tree_fits_uhwi_p (msz)) |
11663 | { |
11664 | error_at (location, |
11665 | "%<static_assert%> message %<size()%> " |
11666 | "must be a constant expression" ); |
11667 | return; |
11668 | } |
11669 | else if ((unsigned HOST_WIDE_INT) (int) tree_to_uhwi (msz) |
11670 | != tree_to_uhwi (msz)) |
11671 | { |
11672 | error_at (location, |
11673 | "%<static_assert%> message %<size()%> " |
11674 | "%qE too large" , msz); |
11675 | return; |
11676 | } |
11677 | len = tree_to_uhwi (msz); |
11678 | tree data = maybe_constant_value (message_data, NULL_TREE, |
11679 | mce_true); |
11680 | if (!reduced_constant_expression_p (data)) |
11681 | data = NULL_TREE; |
11682 | if (len) |
11683 | { |
11684 | if (data) |
11685 | msg = c_getstr (data); |
11686 | if (msg == NULL) |
11687 | buf = XNEWVEC (char, len); |
11688 | for (int i = 0; i < len; ++i) |
11689 | { |
11690 | tree t = message_data; |
11691 | if (i) |
11692 | t = build2 (POINTER_PLUS_EXPR, |
11693 | TREE_TYPE (message_data), message_data, |
11694 | size_int (i)); |
11695 | t = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t); |
11696 | tree t2 = cxx_constant_value (t, NULL_TREE, complain); |
11697 | if (!tree_fits_shwi_p (t2)) |
11698 | { |
11699 | error_at (location, |
11700 | "%<static_assert%> message %<data()[%d]%> " |
11701 | "must be a constant expression" , i); |
11702 | XDELETEVEC (buf); |
11703 | return; |
11704 | } |
11705 | if (msg == NULL) |
11706 | buf[i] = tree_to_shwi (t2); |
11707 | /* If c_getstr worked, just verify the first and |
11708 | last characters using constant evaluation. */ |
11709 | else if (len > 2 && i == 0) |
11710 | i = len - 2; |
11711 | } |
11712 | if (msg == NULL) |
11713 | msg = buf; |
11714 | } |
11715 | else if (!data) |
11716 | { |
11717 | /* We don't have any function to test whether some |
11718 | expression is a core constant expression. So, instead |
11719 | test whether (message.data (), 0) is a constant |
11720 | expression. */ |
11721 | data = build2 (COMPOUND_EXPR, integer_type_node, |
11722 | message_data, integer_zero_node); |
11723 | tree t = cxx_constant_value (data, NULL_TREE, complain); |
11724 | if (!integer_zerop (t)) |
11725 | { |
11726 | error_at (location, |
11727 | "%<static_assert%> message %<data()%> " |
11728 | "must be a core constant expression" ); |
11729 | return; |
11730 | } |
11731 | } |
11732 | } |
11733 | else |
11734 | { |
11735 | tree eltype = TREE_TYPE (TREE_TYPE (message)); |
11736 | int sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (eltype)); |
11737 | msg = TREE_STRING_POINTER (message); |
11738 | len = TREE_STRING_LENGTH (message) / sz - 1; |
11739 | } |
11740 | |
11741 | /* See if we can find which clause was failing (for logical AND). */ |
11742 | tree bad = find_failing_clause (NULL, orig_condition); |
11743 | /* If not, or its location is unusable, fall back to the previous |
11744 | location. */ |
11745 | location_t cloc = cp_expr_loc_or_loc (t: bad, or_loc: location); |
11746 | |
11747 | auto_diagnostic_group d; |
11748 | |
11749 | /* Report the error. */ |
11750 | if (len == 0) |
11751 | error_at (cloc, "static assertion failed" ); |
11752 | else |
11753 | error_at (cloc, "static assertion failed: %.*s" , len, msg); |
11754 | |
11755 | XDELETEVEC (buf); |
11756 | |
11757 | diagnose_failing_condition (bad, cloc, show_expr_p); |
11758 | } |
11759 | else if (condition && condition != error_mark_node) |
11760 | { |
11761 | error ("non-constant condition for static assertion" ); |
11762 | if (require_rvalue_constant_expression (condition)) |
11763 | cxx_constant_value (condition); |
11764 | } |
11765 | } |
11766 | } |
11767 | |
11768 | /* Implements the C++0x decltype keyword. Returns the type of EXPR, |
11769 | suitable for use as a type-specifier. |
11770 | |
11771 | ID_EXPRESSION_OR_MEMBER_ACCESS_P is true when EXPR was parsed as an |
11772 | id-expression or a class member access, FALSE when it was parsed as |
11773 | a full expression. */ |
11774 | |
11775 | tree |
11776 | finish_decltype_type (tree expr, bool id_expression_or_member_access_p, |
11777 | tsubst_flags_t complain) |
11778 | { |
11779 | tree type = NULL_TREE; |
11780 | |
11781 | if (!expr || error_operand_p (t: expr)) |
11782 | return error_mark_node; |
11783 | |
11784 | if (TYPE_P (expr) |
11785 | || TREE_CODE (expr) == TYPE_DECL |
11786 | || (TREE_CODE (expr) == BIT_NOT_EXPR |
11787 | && TYPE_P (TREE_OPERAND (expr, 0)))) |
11788 | { |
11789 | if (complain & tf_error) |
11790 | error ("argument to %<decltype%> must be an expression" ); |
11791 | return error_mark_node; |
11792 | } |
11793 | |
11794 | /* decltype is an unevaluated context. */ |
11795 | cp_unevaluated u; |
11796 | |
11797 | processing_template_decl_sentinel ptds (/*reset=*/false); |
11798 | |
11799 | /* Depending on the resolution of DR 1172, we may later need to distinguish |
11800 | instantiation-dependent but not type-dependent expressions so that, say, |
11801 | A<decltype(sizeof(T))>::U doesn't require 'typename'. */ |
11802 | if (instantiation_dependent_uneval_expression_p (expr)) |
11803 | { |
11804 | dependent: |
11805 | type = cxx_make_type (DECLTYPE_TYPE); |
11806 | DECLTYPE_TYPE_EXPR (type) = expr; |
11807 | DECLTYPE_TYPE_ID_EXPR_OR_MEMBER_ACCESS_P (type) |
11808 | = id_expression_or_member_access_p; |
11809 | SET_TYPE_STRUCTURAL_EQUALITY (type); |
11810 | |
11811 | return type; |
11812 | } |
11813 | else if (processing_template_decl) |
11814 | { |
11815 | expr = instantiate_non_dependent_expr (expr, complain|tf_decltype); |
11816 | if (expr == error_mark_node) |
11817 | return error_mark_node; |
11818 | /* Keep processing_template_decl cleared for the rest of the function |
11819 | (for sake of the call to lvalue_kind below, which handles templated |
11820 | and non-templated COND_EXPR differently). */ |
11821 | processing_template_decl = 0; |
11822 | } |
11823 | |
11824 | /* The type denoted by decltype(e) is defined as follows: */ |
11825 | |
11826 | expr = resolve_nondeduced_context (expr, complain); |
11827 | if (!mark_single_function (expr, complain)) |
11828 | return error_mark_node; |
11829 | |
11830 | if (invalid_nonstatic_memfn_p (input_location, expr, complain)) |
11831 | return error_mark_node; |
11832 | |
11833 | if (type_unknown_p (expr)) |
11834 | { |
11835 | if (complain & tf_error) |
11836 | error ("%<decltype%> cannot resolve address of overloaded function" ); |
11837 | return error_mark_node; |
11838 | } |
11839 | |
11840 | /* To get the size of a static data member declared as an array of |
11841 | unknown bound, we need to instantiate it. */ |
11842 | if (VAR_P (expr) |
11843 | && VAR_HAD_UNKNOWN_BOUND (expr) |
11844 | && DECL_TEMPLATE_INSTANTIATION (expr)) |
11845 | instantiate_decl (expr, /*defer_ok*/true, /*expl_inst_mem*/false); |
11846 | |
11847 | if (id_expression_or_member_access_p) |
11848 | { |
11849 | /* If e is an id-expression or a class member access (5.2.5 |
11850 | [expr.ref]), decltype(e) is defined as the type of the entity |
11851 | named by e. If there is no such entity, or e names a set of |
11852 | overloaded functions, the program is ill-formed. */ |
11853 | if (identifier_p (t: expr)) |
11854 | expr = lookup_name (name: expr); |
11855 | |
11856 | if (INDIRECT_REF_P (expr) |
11857 | || TREE_CODE (expr) == VIEW_CONVERT_EXPR) |
11858 | /* This can happen when the expression is, e.g., "a.b". Just |
11859 | look at the underlying operand. */ |
11860 | expr = TREE_OPERAND (expr, 0); |
11861 | |
11862 | if (TREE_CODE (expr) == OFFSET_REF |
11863 | || TREE_CODE (expr) == MEMBER_REF |
11864 | || TREE_CODE (expr) == SCOPE_REF) |
11865 | /* We're only interested in the field itself. If it is a |
11866 | BASELINK, we will need to see through it in the next |
11867 | step. */ |
11868 | expr = TREE_OPERAND (expr, 1); |
11869 | |
11870 | if (BASELINK_P (expr)) |
11871 | /* See through BASELINK nodes to the underlying function. */ |
11872 | expr = BASELINK_FUNCTIONS (expr); |
11873 | |
11874 | /* decltype of a decomposition name drops references in the tuple case |
11875 | (unlike decltype of a normal variable) and keeps cv-qualifiers from |
11876 | the containing object in the other cases (unlike decltype of a member |
11877 | access expression). */ |
11878 | if (DECL_DECOMPOSITION_P (expr)) |
11879 | { |
11880 | if (ptds.saved) |
11881 | { |
11882 | gcc_checking_assert (DECL_HAS_VALUE_EXPR_P (expr)); |
11883 | /* DECL_HAS_VALUE_EXPR_P is always set if |
11884 | processing_template_decl. If lookup_decomp_type |
11885 | returns non-NULL, it is the tuple case. */ |
11886 | if (tree ret = lookup_decomp_type (expr)) |
11887 | return ret; |
11888 | } |
11889 | if (DECL_HAS_VALUE_EXPR_P (expr)) |
11890 | /* Expr is an array or struct subobject proxy, handle |
11891 | bit-fields properly. */ |
11892 | return unlowered_expr_type (expr); |
11893 | else |
11894 | /* Expr is a reference variable for the tuple case. */ |
11895 | return lookup_decomp_type (expr); |
11896 | } |
11897 | |
11898 | switch (TREE_CODE (expr)) |
11899 | { |
11900 | case FIELD_DECL: |
11901 | if (DECL_BIT_FIELD_TYPE (expr)) |
11902 | { |
11903 | type = DECL_BIT_FIELD_TYPE (expr); |
11904 | break; |
11905 | } |
11906 | /* Fall through for fields that aren't bitfields. */ |
11907 | gcc_fallthrough (); |
11908 | |
11909 | case VAR_DECL: |
11910 | if (is_capture_proxy (expr)) |
11911 | { |
11912 | if (is_normal_capture_proxy (expr)) |
11913 | { |
11914 | expr = DECL_CAPTURED_VARIABLE (expr); |
11915 | type = TREE_TYPE (expr); |
11916 | type = non_reference (type); |
11917 | } |
11918 | else |
11919 | { |
11920 | expr = DECL_VALUE_EXPR (expr); |
11921 | gcc_assert (TREE_CODE (expr) == COMPONENT_REF); |
11922 | expr = TREE_OPERAND (expr, 1); |
11923 | type = TREE_TYPE (expr); |
11924 | } |
11925 | break; |
11926 | } |
11927 | /* Fall through for variables that aren't capture proxies. */ |
11928 | gcc_fallthrough (); |
11929 | |
11930 | case FUNCTION_DECL: |
11931 | case CONST_DECL: |
11932 | case PARM_DECL: |
11933 | case RESULT_DECL: |
11934 | case TEMPLATE_PARM_INDEX: |
11935 | expr = mark_type_use (expr); |
11936 | type = TREE_TYPE (expr); |
11937 | if (VAR_P (expr) && DECL_NTTP_OBJECT_P (expr)) |
11938 | { |
11939 | /* decltype of an NTTP object is the type of the template |
11940 | parameter, which is the object type modulo cv-quals. */ |
11941 | int quals = cp_type_quals (type); |
11942 | gcc_checking_assert (quals & TYPE_QUAL_CONST); |
11943 | type = cv_unqualified (type); |
11944 | } |
11945 | break; |
11946 | |
11947 | case ERROR_MARK: |
11948 | type = error_mark_node; |
11949 | break; |
11950 | |
11951 | case COMPONENT_REF: |
11952 | case COMPOUND_EXPR: |
11953 | mark_type_use (expr); |
11954 | type = is_bitfield_expr_with_lowered_type (expr); |
11955 | if (!type) |
11956 | type = TREE_TYPE (TREE_OPERAND (expr, 1)); |
11957 | break; |
11958 | |
11959 | case BIT_FIELD_REF: |
11960 | gcc_unreachable (); |
11961 | |
11962 | case INTEGER_CST: |
11963 | case PTRMEM_CST: |
11964 | /* We can get here when the id-expression refers to an |
11965 | enumerator or non-type template parameter. */ |
11966 | type = TREE_TYPE (expr); |
11967 | break; |
11968 | |
11969 | default: |
11970 | /* Handle instantiated template non-type arguments. */ |
11971 | type = TREE_TYPE (expr); |
11972 | break; |
11973 | } |
11974 | } |
11975 | else |
11976 | { |
11977 | if (outer_automatic_var_p (STRIP_REFERENCE_REF (expr)) |
11978 | && current_function_decl |
11979 | && LAMBDA_FUNCTION_P (current_function_decl)) |
11980 | { |
11981 | /* [expr.prim.id.unqual]/3: If naming the entity from outside of an |
11982 | unevaluated operand within S would refer to an entity captured by |
11983 | copy in some intervening lambda-expression, then let E be the |
11984 | innermost such lambda-expression. |
11985 | |
11986 | If there is such a lambda-expression and if P is in E's function |
11987 | parameter scope but not its parameter-declaration-clause, then the |
11988 | type of the expression is the type of a class member access |
11989 | expression naming the non-static data member that would be declared |
11990 | for such a capture in the object parameter of the function call |
11991 | operator of E." */ |
11992 | /* FIXME: This transformation needs to happen for all uses of an outer |
11993 | local variable inside decltype, not just decltype((x)) (PR83167). |
11994 | And we don't handle nested lambdas properly, where we need to |
11995 | consider the outer lambdas as well (PR112926). */ |
11996 | tree decl = STRIP_REFERENCE_REF (expr); |
11997 | tree lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl)); |
11998 | tree cap = lookup_name (DECL_NAME (decl), LOOK_where::BLOCK, |
11999 | LOOK_want::HIDDEN_LAMBDA); |
12000 | |
12001 | if (cap && is_capture_proxy (cap)) |
12002 | type = TREE_TYPE (cap); |
12003 | else if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_COPY) |
12004 | { |
12005 | type = TREE_TYPE (decl); |
12006 | if (TYPE_REF_P (type) |
12007 | && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE) |
12008 | type = TREE_TYPE (type); |
12009 | } |
12010 | |
12011 | if (type && !TYPE_REF_P (type)) |
12012 | { |
12013 | tree obtype = TREE_TYPE (DECL_ARGUMENTS (current_function_decl)); |
12014 | if (WILDCARD_TYPE_P (non_reference (obtype))) |
12015 | /* We don't know what the eventual obtype quals will be. */ |
12016 | goto dependent; |
12017 | auto direct_type = [](tree t){ |
12018 | if (INDIRECT_TYPE_P (t)) |
12019 | return TREE_TYPE (t); |
12020 | return t; |
12021 | }; |
12022 | int const quals = cp_type_quals (type) |
12023 | | cp_type_quals (direct_type (obtype)); |
12024 | type = cp_build_qualified_type (type, quals); |
12025 | type = build_reference_type (type); |
12026 | } |
12027 | } |
12028 | else if (error_operand_p (t: expr)) |
12029 | type = error_mark_node; |
12030 | else if (expr == current_class_ptr) |
12031 | /* If the expression is just "this", we want the |
12032 | cv-unqualified pointer for the "this" type. */ |
12033 | type = TYPE_MAIN_VARIANT (TREE_TYPE (expr)); |
12034 | |
12035 | if (!type) |
12036 | { |
12037 | /* Otherwise, where T is the type of e, if e is an lvalue, |
12038 | decltype(e) is defined as T&; if an xvalue, T&&; otherwise, T. */ |
12039 | cp_lvalue_kind clk = lvalue_kind (expr); |
12040 | type = unlowered_expr_type (expr); |
12041 | gcc_assert (!TYPE_REF_P (type)); |
12042 | |
12043 | /* For vector types, pick a non-opaque variant. */ |
12044 | if (VECTOR_TYPE_P (type)) |
12045 | type = strip_typedefs (type); |
12046 | |
12047 | if (clk != clk_none && !(clk & clk_class)) |
12048 | type = cp_build_reference_type (type, (clk & clk_rvalueref)); |
12049 | } |
12050 | } |
12051 | |
12052 | return type; |
12053 | } |
12054 | |
12055 | /* Called from trait_expr_value to evaluate either __has_nothrow_assign or |
12056 | __has_nothrow_copy, depending on assign_p. Returns true iff all |
12057 | the copy {ctor,assign} fns are nothrow. */ |
12058 | |
12059 | static bool |
12060 | classtype_has_nothrow_assign_or_copy_p (tree type, bool assign_p) |
12061 | { |
12062 | tree fns = NULL_TREE; |
12063 | |
12064 | if (assign_p || TYPE_HAS_COPY_CTOR (type)) |
12065 | fns = get_class_binding (type, assign_p ? assign_op_identifier |
12066 | : ctor_identifier); |
12067 | |
12068 | bool saw_copy = false; |
12069 | for (ovl_iterator iter (fns); iter; ++iter) |
12070 | { |
12071 | tree fn = *iter; |
12072 | |
12073 | if (copy_fn_p (fn) > 0) |
12074 | { |
12075 | saw_copy = true; |
12076 | if (!maybe_instantiate_noexcept (fn) |
12077 | || !TYPE_NOTHROW_P (TREE_TYPE (fn))) |
12078 | return false; |
12079 | } |
12080 | } |
12081 | |
12082 | return saw_copy; |
12083 | } |
12084 | |
12085 | /* Return true if DERIVED is pointer interconvertible base of BASE. */ |
12086 | |
12087 | static bool |
12088 | pointer_interconvertible_base_of_p (tree base, tree derived) |
12089 | { |
12090 | if (base == error_mark_node || derived == error_mark_node) |
12091 | return false; |
12092 | base = TYPE_MAIN_VARIANT (base); |
12093 | derived = TYPE_MAIN_VARIANT (derived); |
12094 | if (!NON_UNION_CLASS_TYPE_P (base) |
12095 | || !NON_UNION_CLASS_TYPE_P (derived)) |
12096 | return false; |
12097 | |
12098 | if (same_type_p (base, derived)) |
12099 | return true; |
12100 | |
12101 | if (!std_layout_type_p (derived)) |
12102 | return false; |
12103 | |
12104 | return uniquely_derived_from_p (base, derived); |
12105 | } |
12106 | |
12107 | /* Helper function for fold_builtin_is_pointer_inverconvertible_with_class, |
12108 | return true if MEMBERTYPE is the type of the first non-static data member |
12109 | of TYPE or for unions of any members. */ |
12110 | static bool |
12111 | first_nonstatic_data_member_p (tree type, tree membertype) |
12112 | { |
12113 | for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
12114 | { |
12115 | if (TREE_CODE (field) != FIELD_DECL) |
12116 | continue; |
12117 | if (DECL_FIELD_IS_BASE (field) && is_empty_field (field)) |
12118 | continue; |
12119 | if (DECL_FIELD_IS_BASE (field)) |
12120 | return first_nonstatic_data_member_p (TREE_TYPE (field), membertype); |
12121 | if (ANON_AGGR_TYPE_P (TREE_TYPE (field))) |
12122 | { |
12123 | if ((TREE_CODE (TREE_TYPE (field)) == UNION_TYPE |
12124 | || std_layout_type_p (TREE_TYPE (field))) |
12125 | && first_nonstatic_data_member_p (TREE_TYPE (field), membertype)) |
12126 | return true; |
12127 | } |
12128 | else if (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field), |
12129 | membertype)) |
12130 | return true; |
12131 | if (TREE_CODE (type) != UNION_TYPE) |
12132 | return false; |
12133 | } |
12134 | return false; |
12135 | } |
12136 | |
12137 | /* Fold __builtin_is_pointer_interconvertible_with_class call. */ |
12138 | |
12139 | tree |
12140 | fold_builtin_is_pointer_inverconvertible_with_class (location_t loc, int nargs, |
12141 | tree *args) |
12142 | { |
12143 | /* Unless users call the builtin directly, the following 3 checks should be |
12144 | ensured from std::is_pointer_interconvertible_with_class function |
12145 | template. */ |
12146 | if (nargs != 1) |
12147 | { |
12148 | error_at (loc, "%<__builtin_is_pointer_interconvertible_with_class%> " |
12149 | "needs a single argument" ); |
12150 | return boolean_false_node; |
12151 | } |
12152 | tree arg = args[0]; |
12153 | if (error_operand_p (t: arg)) |
12154 | return boolean_false_node; |
12155 | if (!TYPE_PTRMEM_P (TREE_TYPE (arg))) |
12156 | { |
12157 | error_at (loc, "%<__builtin_is_pointer_interconvertible_with_class%> " |
12158 | "argument is not pointer to member" ); |
12159 | return boolean_false_node; |
12160 | } |
12161 | |
12162 | if (!TYPE_PTRDATAMEM_P (TREE_TYPE (arg))) |
12163 | return boolean_false_node; |
12164 | |
12165 | tree membertype = TREE_TYPE (TREE_TYPE (arg)); |
12166 | tree basetype = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg)); |
12167 | if (!complete_type_or_else (basetype, NULL_TREE)) |
12168 | return boolean_false_node; |
12169 | |
12170 | if (TREE_CODE (basetype) != UNION_TYPE |
12171 | && !std_layout_type_p (basetype)) |
12172 | return boolean_false_node; |
12173 | |
12174 | if (!first_nonstatic_data_member_p (type: basetype, membertype)) |
12175 | return boolean_false_node; |
12176 | |
12177 | if (TREE_CODE (arg) == PTRMEM_CST) |
12178 | arg = cplus_expand_constant (arg); |
12179 | |
12180 | if (integer_nonzerop (arg)) |
12181 | return boolean_false_node; |
12182 | if (integer_zerop (arg)) |
12183 | return boolean_true_node; |
12184 | |
12185 | return fold_build2 (EQ_EXPR, boolean_type_node, arg, |
12186 | build_zero_cst (TREE_TYPE (arg))); |
12187 | } |
12188 | |
12189 | /* Helper function for is_corresponding_member_aggr. Return true if |
12190 | MEMBERTYPE pointer-to-data-member ARG can be found in anonymous |
12191 | union or structure BASETYPE. */ |
12192 | |
12193 | static bool |
12194 | is_corresponding_member_union (tree basetype, tree membertype, tree arg) |
12195 | { |
12196 | for (tree field = TYPE_FIELDS (basetype); field; field = DECL_CHAIN (field)) |
12197 | if (TREE_CODE (field) != FIELD_DECL || DECL_BIT_FIELD_TYPE (field)) |
12198 | continue; |
12199 | else if (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field), |
12200 | membertype)) |
12201 | { |
12202 | if (TREE_CODE (arg) != INTEGER_CST |
12203 | || tree_int_cst_equal (arg, byte_position (field))) |
12204 | return true; |
12205 | } |
12206 | else if (ANON_AGGR_TYPE_P (TREE_TYPE (field))) |
12207 | { |
12208 | tree narg = arg; |
12209 | if (TREE_CODE (basetype) != UNION_TYPE |
12210 | && TREE_CODE (narg) == INTEGER_CST) |
12211 | narg = size_binop (MINUS_EXPR, arg, byte_position (field)); |
12212 | if (is_corresponding_member_union (TREE_TYPE (field), |
12213 | membertype, arg: narg)) |
12214 | return true; |
12215 | } |
12216 | return false; |
12217 | } |
12218 | |
12219 | /* Helper function for fold_builtin_is_corresponding_member call. |
12220 | Return boolean_false_node if MEMBERTYPE1 BASETYPE1::*ARG1 and |
12221 | MEMBERTYPE2 BASETYPE2::*ARG2 aren't corresponding members, |
12222 | boolean_true_node if they are corresponding members, or for |
12223 | non-constant ARG2 the highest member offset for corresponding |
12224 | members. */ |
12225 | |
12226 | static tree |
12227 | is_corresponding_member_aggr (location_t loc, tree basetype1, tree membertype1, |
12228 | tree arg1, tree basetype2, tree membertype2, |
12229 | tree arg2) |
12230 | { |
12231 | tree field1 = TYPE_FIELDS (basetype1); |
12232 | tree field2 = TYPE_FIELDS (basetype2); |
12233 | tree ret = boolean_false_node; |
12234 | while (1) |
12235 | { |
12236 | bool r = next_common_initial_sequence (field1, field2); |
12237 | if (field1 == NULL_TREE || field2 == NULL_TREE) |
12238 | break; |
12239 | if (r |
12240 | && same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field1), |
12241 | membertype1) |
12242 | && same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field2), |
12243 | membertype2)) |
12244 | { |
12245 | tree pos = byte_position (field1); |
12246 | if (TREE_CODE (arg1) == INTEGER_CST |
12247 | && tree_int_cst_equal (arg1, pos)) |
12248 | { |
12249 | if (TREE_CODE (arg2) == INTEGER_CST) |
12250 | return boolean_true_node; |
12251 | return pos; |
12252 | } |
12253 | else if (TREE_CODE (arg1) != INTEGER_CST) |
12254 | ret = pos; |
12255 | } |
12256 | else if (ANON_AGGR_TYPE_P (TREE_TYPE (field1)) |
12257 | && ANON_AGGR_TYPE_P (TREE_TYPE (field2))) |
12258 | { |
12259 | if ((!lookup_attribute (attr_name: "no_unique_address" , |
12260 | DECL_ATTRIBUTES (field1))) |
12261 | != !lookup_attribute (attr_name: "no_unique_address" , |
12262 | DECL_ATTRIBUTES (field2))) |
12263 | break; |
12264 | if (!tree_int_cst_equal (bit_position (field1), |
12265 | bit_position (field2))) |
12266 | break; |
12267 | bool overlap = true; |
12268 | tree pos = byte_position (field1); |
12269 | if (TREE_CODE (arg1) == INTEGER_CST) |
12270 | { |
12271 | tree off1 = fold_convert (sizetype, arg1); |
12272 | tree sz1 = TYPE_SIZE_UNIT (TREE_TYPE (field1)); |
12273 | if (tree_int_cst_lt (t1: off1, t2: pos) |
12274 | || tree_int_cst_le (size_binop (PLUS_EXPR, pos, sz1), t2: off1)) |
12275 | overlap = false; |
12276 | } |
12277 | if (TREE_CODE (arg2) == INTEGER_CST) |
12278 | { |
12279 | tree off2 = fold_convert (sizetype, arg2); |
12280 | tree sz2 = TYPE_SIZE_UNIT (TREE_TYPE (field2)); |
12281 | if (tree_int_cst_lt (t1: off2, t2: pos) |
12282 | || tree_int_cst_le (size_binop (PLUS_EXPR, pos, sz2), t2: off2)) |
12283 | overlap = false; |
12284 | } |
12285 | if (overlap |
12286 | && NON_UNION_CLASS_TYPE_P (TREE_TYPE (field1)) |
12287 | && NON_UNION_CLASS_TYPE_P (TREE_TYPE (field2))) |
12288 | { |
12289 | tree narg1 = arg1; |
12290 | if (TREE_CODE (arg1) == INTEGER_CST) |
12291 | narg1 = size_binop (MINUS_EXPR, |
12292 | fold_convert (sizetype, arg1), pos); |
12293 | tree narg2 = arg2; |
12294 | if (TREE_CODE (arg2) == INTEGER_CST) |
12295 | narg2 = size_binop (MINUS_EXPR, |
12296 | fold_convert (sizetype, arg2), pos); |
12297 | tree t1 = TREE_TYPE (field1); |
12298 | tree t2 = TREE_TYPE (field2); |
12299 | tree nret = is_corresponding_member_aggr (loc, basetype1: t1, membertype1, |
12300 | arg1: narg1, basetype2: t2, membertype2, |
12301 | arg2: narg2); |
12302 | if (nret != boolean_false_node) |
12303 | { |
12304 | if (nret == boolean_true_node) |
12305 | return nret; |
12306 | if (TREE_CODE (arg1) == INTEGER_CST) |
12307 | return size_binop (PLUS_EXPR, nret, pos); |
12308 | ret = size_binop (PLUS_EXPR, nret, pos); |
12309 | } |
12310 | } |
12311 | else if (overlap |
12312 | && TREE_CODE (TREE_TYPE (field1)) == UNION_TYPE |
12313 | && TREE_CODE (TREE_TYPE (field2)) == UNION_TYPE) |
12314 | { |
12315 | tree narg1 = arg1; |
12316 | if (TREE_CODE (arg1) == INTEGER_CST) |
12317 | narg1 = size_binop (MINUS_EXPR, |
12318 | fold_convert (sizetype, arg1), pos); |
12319 | tree narg2 = arg2; |
12320 | if (TREE_CODE (arg2) == INTEGER_CST) |
12321 | narg2 = size_binop (MINUS_EXPR, |
12322 | fold_convert (sizetype, arg2), pos); |
12323 | if (is_corresponding_member_union (TREE_TYPE (field1), |
12324 | membertype: membertype1, arg: narg1) |
12325 | && is_corresponding_member_union (TREE_TYPE (field2), |
12326 | membertype: membertype2, arg: narg2)) |
12327 | { |
12328 | sorry_at (loc, "%<__builtin_is_corresponding_member%> " |
12329 | "not well defined for anonymous unions" ); |
12330 | return boolean_false_node; |
12331 | } |
12332 | } |
12333 | } |
12334 | if (!r) |
12335 | break; |
12336 | field1 = DECL_CHAIN (field1); |
12337 | field2 = DECL_CHAIN (field2); |
12338 | } |
12339 | return ret; |
12340 | } |
12341 | |
12342 | /* Fold __builtin_is_corresponding_member call. */ |
12343 | |
12344 | tree |
12345 | fold_builtin_is_corresponding_member (location_t loc, int nargs, |
12346 | tree *args) |
12347 | { |
12348 | /* Unless users call the builtin directly, the following 3 checks should be |
12349 | ensured from std::is_corresponding_member function template. */ |
12350 | if (nargs != 2) |
12351 | { |
12352 | error_at (loc, "%<__builtin_is_corresponding_member%> " |
12353 | "needs two arguments" ); |
12354 | return boolean_false_node; |
12355 | } |
12356 | tree arg1 = args[0]; |
12357 | tree arg2 = args[1]; |
12358 | if (error_operand_p (t: arg1) || error_operand_p (t: arg2)) |
12359 | return boolean_false_node; |
12360 | if (!TYPE_PTRMEM_P (TREE_TYPE (arg1)) |
12361 | || !TYPE_PTRMEM_P (TREE_TYPE (arg2))) |
12362 | { |
12363 | error_at (loc, "%<__builtin_is_corresponding_member%> " |
12364 | "argument is not pointer to member" ); |
12365 | return boolean_false_node; |
12366 | } |
12367 | |
12368 | if (!TYPE_PTRDATAMEM_P (TREE_TYPE (arg1)) |
12369 | || !TYPE_PTRDATAMEM_P (TREE_TYPE (arg2))) |
12370 | return boolean_false_node; |
12371 | |
12372 | tree membertype1 = TREE_TYPE (TREE_TYPE (arg1)); |
12373 | tree basetype1 = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg1)); |
12374 | if (!complete_type_or_else (basetype1, NULL_TREE)) |
12375 | return boolean_false_node; |
12376 | |
12377 | tree membertype2 = TREE_TYPE (TREE_TYPE (arg2)); |
12378 | tree basetype2 = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg2)); |
12379 | if (!complete_type_or_else (basetype2, NULL_TREE)) |
12380 | return boolean_false_node; |
12381 | |
12382 | if (!NON_UNION_CLASS_TYPE_P (basetype1) |
12383 | || !NON_UNION_CLASS_TYPE_P (basetype2) |
12384 | || !std_layout_type_p (basetype1) |
12385 | || !std_layout_type_p (basetype2)) |
12386 | return boolean_false_node; |
12387 | |
12388 | /* If the member types aren't layout compatible, then they |
12389 | can't be corresponding members. */ |
12390 | if (!layout_compatible_type_p (membertype1, membertype2)) |
12391 | return boolean_false_node; |
12392 | |
12393 | if (TREE_CODE (arg1) == PTRMEM_CST) |
12394 | arg1 = cplus_expand_constant (arg1); |
12395 | if (TREE_CODE (arg2) == PTRMEM_CST) |
12396 | arg2 = cplus_expand_constant (arg2); |
12397 | |
12398 | if (null_member_pointer_value_p (arg1) |
12399 | || null_member_pointer_value_p (arg2)) |
12400 | return boolean_false_node; |
12401 | |
12402 | if (TREE_CODE (arg1) == INTEGER_CST |
12403 | && TREE_CODE (arg2) == INTEGER_CST |
12404 | && !tree_int_cst_equal (arg1, arg2)) |
12405 | return boolean_false_node; |
12406 | |
12407 | if (TREE_CODE (arg2) == INTEGER_CST |
12408 | && TREE_CODE (arg1) != INTEGER_CST) |
12409 | { |
12410 | std::swap (a&: arg1, b&: arg2); |
12411 | std::swap (a&: membertype1, b&: membertype2); |
12412 | std::swap (a&: basetype1, b&: basetype2); |
12413 | } |
12414 | |
12415 | tree ret = is_corresponding_member_aggr (loc, basetype1, membertype1, arg1, |
12416 | basetype2, membertype2, arg2); |
12417 | if (TREE_TYPE (ret) == boolean_type_node) |
12418 | return ret; |
12419 | /* If both arg1 and arg2 are INTEGER_CSTs, is_corresponding_member_aggr |
12420 | already returns boolean_{true,false}_node whether those particular |
12421 | members are corresponding members or not. Otherwise, if only |
12422 | one of them is INTEGER_CST (canonicalized to first being INTEGER_CST |
12423 | above), it returns boolean_false_node if it is certainly not a |
12424 | corresponding member and otherwise we need to do a runtime check that |
12425 | those two OFFSET_TYPE offsets are equal. |
12426 | If neither of the operands is INTEGER_CST, is_corresponding_member_aggr |
12427 | returns the largest offset at which the members would be corresponding |
12428 | members, so perform arg1 <= ret && arg1 == arg2 runtime check. */ |
12429 | gcc_assert (TREE_CODE (arg2) != INTEGER_CST); |
12430 | if (TREE_CODE (arg1) == INTEGER_CST) |
12431 | return fold_build2 (EQ_EXPR, boolean_type_node, arg1, |
12432 | fold_convert (TREE_TYPE (arg1), arg2)); |
12433 | ret = fold_build2 (LE_EXPR, boolean_type_node, |
12434 | fold_convert (pointer_sized_int_node, arg1), |
12435 | fold_convert (pointer_sized_int_node, ret)); |
12436 | return fold_build2 (TRUTH_AND_EXPR, boolean_type_node, ret, |
12437 | fold_build2 (EQ_EXPR, boolean_type_node, arg1, |
12438 | fold_convert (TREE_TYPE (arg1), arg2))); |
12439 | } |
12440 | |
12441 | /* Actually evaluates the trait. */ |
12442 | |
12443 | static bool |
12444 | trait_expr_value (cp_trait_kind kind, tree type1, tree type2) |
12445 | { |
12446 | enum tree_code type_code1; |
12447 | tree t; |
12448 | |
12449 | type_code1 = TREE_CODE (type1); |
12450 | |
12451 | switch (kind) |
12452 | { |
12453 | case CPTK_HAS_NOTHROW_ASSIGN: |
12454 | type1 = strip_array_types (type: type1); |
12455 | return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE |
12456 | && (trait_expr_value (kind: CPTK_HAS_TRIVIAL_ASSIGN, type1, type2) |
12457 | || (CLASS_TYPE_P (type1) |
12458 | && classtype_has_nothrow_assign_or_copy_p (type: type1, |
12459 | assign_p: true)))); |
12460 | |
12461 | case CPTK_HAS_NOTHROW_CONSTRUCTOR: |
12462 | type1 = strip_array_types (type: type1); |
12463 | return (trait_expr_value (kind: CPTK_HAS_TRIVIAL_CONSTRUCTOR, type1, type2) |
12464 | || (CLASS_TYPE_P (type1) |
12465 | && (t = locate_ctor (type1)) |
12466 | && maybe_instantiate_noexcept (t) |
12467 | && TYPE_NOTHROW_P (TREE_TYPE (t)))); |
12468 | |
12469 | case CPTK_HAS_NOTHROW_COPY: |
12470 | type1 = strip_array_types (type: type1); |
12471 | return (trait_expr_value (kind: CPTK_HAS_TRIVIAL_COPY, type1, type2) |
12472 | || (CLASS_TYPE_P (type1) |
12473 | && classtype_has_nothrow_assign_or_copy_p (type: type1, assign_p: false))); |
12474 | |
12475 | case CPTK_HAS_TRIVIAL_ASSIGN: |
12476 | /* ??? The standard seems to be missing the "or array of such a class |
12477 | type" wording for this trait. */ |
12478 | type1 = strip_array_types (type: type1); |
12479 | return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE |
12480 | && (trivial_type_p (type1) |
12481 | || (CLASS_TYPE_P (type1) |
12482 | && TYPE_HAS_TRIVIAL_COPY_ASSIGN (type1)))); |
12483 | |
12484 | case CPTK_HAS_TRIVIAL_CONSTRUCTOR: |
12485 | type1 = strip_array_types (type: type1); |
12486 | return (trivial_type_p (type1) |
12487 | || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_DFLT (type1))); |
12488 | |
12489 | case CPTK_HAS_TRIVIAL_COPY: |
12490 | /* ??? The standard seems to be missing the "or array of such a class |
12491 | type" wording for this trait. */ |
12492 | type1 = strip_array_types (type: type1); |
12493 | return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE |
12494 | || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_COPY_CTOR (type1))); |
12495 | |
12496 | case CPTK_HAS_TRIVIAL_DESTRUCTOR: |
12497 | type1 = strip_array_types (type: type1); |
12498 | return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE |
12499 | || (CLASS_TYPE_P (type1) |
12500 | && TYPE_HAS_TRIVIAL_DESTRUCTOR (type1))); |
12501 | |
12502 | case CPTK_HAS_UNIQUE_OBJ_REPRESENTATIONS: |
12503 | return type_has_unique_obj_representations (type1); |
12504 | |
12505 | case CPTK_HAS_VIRTUAL_DESTRUCTOR: |
12506 | return type_has_virtual_destructor (type1); |
12507 | |
12508 | case CPTK_IS_ABSTRACT: |
12509 | return ABSTRACT_CLASS_TYPE_P (type1); |
12510 | |
12511 | case CPTK_IS_AGGREGATE: |
12512 | return CP_AGGREGATE_TYPE_P (type1); |
12513 | |
12514 | case CPTK_IS_ARRAY: |
12515 | return (type_code1 == ARRAY_TYPE |
12516 | /* We don't want to report T[0] as being an array type. |
12517 | This is for compatibility with an implementation of |
12518 | std::is_array by template argument deduction, because |
12519 | compute_array_index_type_loc rejects a zero-size array |
12520 | in SFINAE context. */ |
12521 | && !(TYPE_SIZE (type1) && integer_zerop (TYPE_SIZE (type1)))); |
12522 | |
12523 | case CPTK_IS_ASSIGNABLE: |
12524 | return is_xible (MODIFY_EXPR, type1, type2); |
12525 | |
12526 | case CPTK_IS_BASE_OF: |
12527 | return (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2) |
12528 | && (same_type_ignoring_top_level_qualifiers_p (type1, type2) |
12529 | || DERIVED_FROM_P (type1, type2))); |
12530 | |
12531 | case CPTK_IS_BOUNDED_ARRAY: |
12532 | return type_code1 == ARRAY_TYPE && TYPE_DOMAIN (type1); |
12533 | |
12534 | case CPTK_IS_CLASS: |
12535 | return NON_UNION_CLASS_TYPE_P (type1); |
12536 | |
12537 | case CPTK_IS_CONSTRUCTIBLE: |
12538 | return is_xible (INIT_EXPR, type1, type2); |
12539 | |
12540 | case CPTK_IS_CONVERTIBLE: |
12541 | return is_convertible (type1, type2); |
12542 | |
12543 | case CPTK_IS_EMPTY: |
12544 | return NON_UNION_CLASS_TYPE_P (type1) && CLASSTYPE_EMPTY_P (type1); |
12545 | |
12546 | case CPTK_IS_ENUM: |
12547 | return type_code1 == ENUMERAL_TYPE; |
12548 | |
12549 | case CPTK_IS_FINAL: |
12550 | return CLASS_TYPE_P (type1) && CLASSTYPE_FINAL (type1); |
12551 | |
12552 | case CPTK_IS_FUNCTION: |
12553 | return type_code1 == FUNCTION_TYPE; |
12554 | |
12555 | case CPTK_IS_LAYOUT_COMPATIBLE: |
12556 | return layout_compatible_type_p (type1, type2); |
12557 | |
12558 | case CPTK_IS_LITERAL_TYPE: |
12559 | return literal_type_p (type1); |
12560 | |
12561 | case CPTK_IS_MEMBER_FUNCTION_POINTER: |
12562 | return TYPE_PTRMEMFUNC_P (type1); |
12563 | |
12564 | case CPTK_IS_MEMBER_OBJECT_POINTER: |
12565 | return TYPE_PTRDATAMEM_P (type1); |
12566 | |
12567 | case CPTK_IS_MEMBER_POINTER: |
12568 | return TYPE_PTRMEM_P (type1); |
12569 | |
12570 | case CPTK_IS_NOTHROW_ASSIGNABLE: |
12571 | return is_nothrow_xible (MODIFY_EXPR, type1, type2); |
12572 | |
12573 | case CPTK_IS_NOTHROW_CONSTRUCTIBLE: |
12574 | return is_nothrow_xible (INIT_EXPR, type1, type2); |
12575 | |
12576 | case CPTK_IS_NOTHROW_CONVERTIBLE: |
12577 | return is_nothrow_convertible (type1, type2); |
12578 | |
12579 | case CPTK_IS_OBJECT: |
12580 | return (type_code1 != FUNCTION_TYPE |
12581 | && type_code1 != REFERENCE_TYPE |
12582 | && type_code1 != VOID_TYPE); |
12583 | |
12584 | case CPTK_IS_POINTER_INTERCONVERTIBLE_BASE_OF: |
12585 | return pointer_interconvertible_base_of_p (base: type1, derived: type2); |
12586 | |
12587 | case CPTK_IS_POD: |
12588 | return pod_type_p (type1); |
12589 | |
12590 | case CPTK_IS_POLYMORPHIC: |
12591 | return CLASS_TYPE_P (type1) && TYPE_POLYMORPHIC_P (type1); |
12592 | |
12593 | case CPTK_IS_REFERENCE: |
12594 | return type_code1 == REFERENCE_TYPE; |
12595 | |
12596 | case CPTK_IS_SAME: |
12597 | return same_type_p (type1, type2); |
12598 | |
12599 | case CPTK_IS_SCOPED_ENUM: |
12600 | return SCOPED_ENUM_P (type1); |
12601 | |
12602 | case CPTK_IS_STD_LAYOUT: |
12603 | return std_layout_type_p (type1); |
12604 | |
12605 | case CPTK_IS_TRIVIAL: |
12606 | return trivial_type_p (type1); |
12607 | |
12608 | case CPTK_IS_TRIVIALLY_ASSIGNABLE: |
12609 | return is_trivially_xible (MODIFY_EXPR, type1, type2); |
12610 | |
12611 | case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE: |
12612 | return is_trivially_xible (INIT_EXPR, type1, type2); |
12613 | |
12614 | case CPTK_IS_TRIVIALLY_COPYABLE: |
12615 | return trivially_copyable_p (type1); |
12616 | |
12617 | case CPTK_IS_UNION: |
12618 | return type_code1 == UNION_TYPE; |
12619 | |
12620 | case CPTK_REF_CONSTRUCTS_FROM_TEMPORARY: |
12621 | return ref_xes_from_temporary (type1, type2, /*direct_init=*/true); |
12622 | |
12623 | case CPTK_REF_CONVERTS_FROM_TEMPORARY: |
12624 | return ref_xes_from_temporary (type1, type2, /*direct_init=*/false); |
12625 | |
12626 | case CPTK_IS_DEDUCIBLE: |
12627 | return type_targs_deducible_from (type1, type2); |
12628 | |
12629 | #define DEFTRAIT_TYPE(CODE, NAME, ARITY) \ |
12630 | case CPTK_##CODE: |
12631 | #include "cp-trait.def" |
12632 | #undef DEFTRAIT_TYPE |
12633 | /* Type-yielding traits are handled in finish_trait_type. */ |
12634 | break; |
12635 | } |
12636 | |
12637 | gcc_unreachable (); |
12638 | } |
12639 | |
12640 | /* Returns true if TYPE meets the requirements for the specified KIND, |
12641 | false otherwise. |
12642 | |
12643 | When KIND == 1, TYPE must be an array of unknown bound, |
12644 | or (possibly cv-qualified) void, or a complete type. |
12645 | |
12646 | When KIND == 2, TYPE must be a complete type, or array of complete type, |
12647 | or (possibly cv-qualified) void. |
12648 | |
12649 | When KIND == 3: |
12650 | If TYPE is a non-union class type, it must be complete. |
12651 | |
12652 | When KIND == 4: |
12653 | If TYPE is a class type, it must be complete. */ |
12654 | |
12655 | static bool |
12656 | check_trait_type (tree type, int kind = 1) |
12657 | { |
12658 | if (type == NULL_TREE) |
12659 | return true; |
12660 | |
12661 | if (TREE_CODE (type) == TREE_VEC) |
12662 | { |
12663 | for (tree arg : tree_vec_range (type)) |
12664 | if (!check_trait_type (type: arg, kind)) |
12665 | return false; |
12666 | return true; |
12667 | } |
12668 | |
12669 | if (kind == 1 && TREE_CODE (type) == ARRAY_TYPE && !TYPE_DOMAIN (type)) |
12670 | return true; // Array of unknown bound. Don't care about completeness. |
12671 | |
12672 | if (kind == 3 && !NON_UNION_CLASS_TYPE_P (type)) |
12673 | return true; // Not a non-union class type. Don't care about completeness. |
12674 | |
12675 | if (kind == 4 && TREE_CODE (type) == ARRAY_TYPE) |
12676 | return true; // Not a class type. Don't care about completeness. |
12677 | |
12678 | if (VOID_TYPE_P (type)) |
12679 | return true; |
12680 | |
12681 | type = complete_type (strip_array_types (type)); |
12682 | if (!COMPLETE_TYPE_P (type) |
12683 | && cxx_incomplete_type_diagnostic (NULL_TREE, type, diag_kind: DK_PERMERROR) |
12684 | && !flag_permissive) |
12685 | return false; |
12686 | return true; |
12687 | } |
12688 | |
12689 | /* True iff the conversion (if any) would be a direct reference |
12690 | binding, not requiring complete types. This is LWG2939. */ |
12691 | |
12692 | static bool |
12693 | same_type_ref_bind_p (cp_trait_kind kind, tree type1, tree type2) |
12694 | { |
12695 | tree from, to; |
12696 | switch (kind) |
12697 | { |
12698 | /* These put the target type first. */ |
12699 | case CPTK_IS_CONSTRUCTIBLE: |
12700 | case CPTK_IS_NOTHROW_CONSTRUCTIBLE: |
12701 | case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE: |
12702 | case CPTK_REF_CONSTRUCTS_FROM_TEMPORARY: |
12703 | case CPTK_REF_CONVERTS_FROM_TEMPORARY: |
12704 | to = type1; |
12705 | from = type2; |
12706 | break; |
12707 | |
12708 | /* These put it second. */ |
12709 | case CPTK_IS_CONVERTIBLE: |
12710 | case CPTK_IS_NOTHROW_CONVERTIBLE: |
12711 | to = type2; |
12712 | from = type1; |
12713 | break; |
12714 | |
12715 | default: |
12716 | gcc_unreachable (); |
12717 | } |
12718 | |
12719 | if (TREE_CODE (to) != REFERENCE_TYPE || !from) |
12720 | return false; |
12721 | if (TREE_CODE (from) == TREE_VEC && TREE_VEC_LENGTH (from) == 1) |
12722 | from = TREE_VEC_ELT (from, 0); |
12723 | return (TYPE_P (from) |
12724 | && (same_type_ignoring_top_level_qualifiers_p |
12725 | (non_reference (to), non_reference (from)))); |
12726 | } |
12727 | |
12728 | /* Process a trait expression. */ |
12729 | |
12730 | tree |
12731 | finish_trait_expr (location_t loc, cp_trait_kind kind, tree type1, tree type2) |
12732 | { |
12733 | if (type1 == error_mark_node |
12734 | || type2 == error_mark_node) |
12735 | return error_mark_node; |
12736 | |
12737 | if (processing_template_decl) |
12738 | { |
12739 | tree trait_expr = make_node (TRAIT_EXPR); |
12740 | TREE_TYPE (trait_expr) = boolean_type_node; |
12741 | TRAIT_EXPR_TYPE1 (trait_expr) = type1; |
12742 | TRAIT_EXPR_TYPE2 (trait_expr) = type2; |
12743 | TRAIT_EXPR_KIND (trait_expr) = kind; |
12744 | TRAIT_EXPR_LOCATION (trait_expr) = loc; |
12745 | return trait_expr; |
12746 | } |
12747 | |
12748 | switch (kind) |
12749 | { |
12750 | case CPTK_HAS_NOTHROW_ASSIGN: |
12751 | case CPTK_HAS_TRIVIAL_ASSIGN: |
12752 | case CPTK_HAS_NOTHROW_CONSTRUCTOR: |
12753 | case CPTK_HAS_TRIVIAL_CONSTRUCTOR: |
12754 | case CPTK_HAS_NOTHROW_COPY: |
12755 | case CPTK_HAS_TRIVIAL_COPY: |
12756 | case CPTK_HAS_TRIVIAL_DESTRUCTOR: |
12757 | case CPTK_HAS_UNIQUE_OBJ_REPRESENTATIONS: |
12758 | if (!check_trait_type (type: type1)) |
12759 | return error_mark_node; |
12760 | break; |
12761 | |
12762 | case CPTK_IS_LITERAL_TYPE: |
12763 | case CPTK_IS_POD: |
12764 | case CPTK_IS_STD_LAYOUT: |
12765 | case CPTK_IS_TRIVIAL: |
12766 | case CPTK_IS_TRIVIALLY_COPYABLE: |
12767 | if (!check_trait_type (type: type1, /* kind = */ 2)) |
12768 | return error_mark_node; |
12769 | break; |
12770 | |
12771 | case CPTK_IS_ABSTRACT: |
12772 | case CPTK_IS_EMPTY: |
12773 | case CPTK_IS_POLYMORPHIC: |
12774 | case CPTK_HAS_VIRTUAL_DESTRUCTOR: |
12775 | if (!check_trait_type (type: type1, /* kind = */ 3)) |
12776 | return error_mark_node; |
12777 | break; |
12778 | |
12779 | /* N.B. std::is_aggregate is kind=2 but we don't need a complete element |
12780 | type to know whether an array is an aggregate, so use kind=4 here. */ |
12781 | case CPTK_IS_AGGREGATE: |
12782 | case CPTK_IS_FINAL: |
12783 | if (!check_trait_type (type: type1, /* kind = */ 4)) |
12784 | return error_mark_node; |
12785 | break; |
12786 | |
12787 | case CPTK_IS_CONSTRUCTIBLE: |
12788 | case CPTK_IS_CONVERTIBLE: |
12789 | case CPTK_IS_NOTHROW_CONSTRUCTIBLE: |
12790 | case CPTK_IS_NOTHROW_CONVERTIBLE: |
12791 | case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE: |
12792 | case CPTK_REF_CONSTRUCTS_FROM_TEMPORARY: |
12793 | case CPTK_REF_CONVERTS_FROM_TEMPORARY: |
12794 | /* Don't check completeness for direct reference binding. */; |
12795 | if (same_type_ref_bind_p (kind, type1, type2)) |
12796 | break; |
12797 | gcc_fallthrough (); |
12798 | |
12799 | case CPTK_IS_ASSIGNABLE: |
12800 | case CPTK_IS_NOTHROW_ASSIGNABLE: |
12801 | case CPTK_IS_TRIVIALLY_ASSIGNABLE: |
12802 | if (!check_trait_type (type: type1) |
12803 | || !check_trait_type (type: type2)) |
12804 | return error_mark_node; |
12805 | break; |
12806 | |
12807 | case CPTK_IS_BASE_OF: |
12808 | case CPTK_IS_POINTER_INTERCONVERTIBLE_BASE_OF: |
12809 | if (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2) |
12810 | && !same_type_ignoring_top_level_qualifiers_p (type1, type2) |
12811 | && !complete_type_or_else (type2, NULL_TREE)) |
12812 | /* We already issued an error. */ |
12813 | return error_mark_node; |
12814 | break; |
12815 | |
12816 | case CPTK_IS_ARRAY: |
12817 | case CPTK_IS_BOUNDED_ARRAY: |
12818 | case CPTK_IS_CLASS: |
12819 | case CPTK_IS_ENUM: |
12820 | case CPTK_IS_FUNCTION: |
12821 | case CPTK_IS_MEMBER_FUNCTION_POINTER: |
12822 | case CPTK_IS_MEMBER_OBJECT_POINTER: |
12823 | case CPTK_IS_MEMBER_POINTER: |
12824 | case CPTK_IS_OBJECT: |
12825 | case CPTK_IS_REFERENCE: |
12826 | case CPTK_IS_SAME: |
12827 | case CPTK_IS_SCOPED_ENUM: |
12828 | case CPTK_IS_UNION: |
12829 | break; |
12830 | |
12831 | case CPTK_IS_LAYOUT_COMPATIBLE: |
12832 | if (!array_of_unknown_bound_p (type1) |
12833 | && TREE_CODE (type1) != VOID_TYPE |
12834 | && !complete_type_or_else (type1, NULL_TREE)) |
12835 | /* We already issued an error. */ |
12836 | return error_mark_node; |
12837 | if (!array_of_unknown_bound_p (type2) |
12838 | && TREE_CODE (type2) != VOID_TYPE |
12839 | && !complete_type_or_else (type2, NULL_TREE)) |
12840 | /* We already issued an error. */ |
12841 | return error_mark_node; |
12842 | break; |
12843 | |
12844 | case CPTK_IS_DEDUCIBLE: |
12845 | if (!DECL_TYPE_TEMPLATE_P (type1)) |
12846 | { |
12847 | error ("%qD is not a class or alias template" , type1); |
12848 | return error_mark_node; |
12849 | } |
12850 | break; |
12851 | |
12852 | #define DEFTRAIT_TYPE(CODE, NAME, ARITY) \ |
12853 | case CPTK_##CODE: |
12854 | #include "cp-trait.def" |
12855 | #undef DEFTRAIT_TYPE |
12856 | /* Type-yielding traits are handled in finish_trait_type. */ |
12857 | gcc_unreachable (); |
12858 | } |
12859 | |
12860 | tree val = (trait_expr_value (kind, type1, type2) |
12861 | ? boolean_true_node : boolean_false_node); |
12862 | return maybe_wrap_with_location (val, loc); |
12863 | } |
12864 | |
12865 | /* Process a trait type. */ |
12866 | |
12867 | tree |
12868 | finish_trait_type (cp_trait_kind kind, tree type1, tree type2, |
12869 | tsubst_flags_t complain) |
12870 | { |
12871 | if (type1 == error_mark_node |
12872 | || type2 == error_mark_node) |
12873 | return error_mark_node; |
12874 | |
12875 | if (processing_template_decl) |
12876 | { |
12877 | tree type = cxx_make_type (TRAIT_TYPE); |
12878 | TRAIT_TYPE_TYPE1 (type) = type1; |
12879 | TRAIT_TYPE_TYPE2 (type) = type2; |
12880 | TRAIT_TYPE_KIND_RAW (type) = build_int_cstu (integer_type_node, kind); |
12881 | /* These traits are intended to be used in the definition of the ::type |
12882 | member of the corresponding standard library type trait and aren't |
12883 | mangleable (and thus won't appear directly in template signatures), |
12884 | so structural equality should suffice. */ |
12885 | SET_TYPE_STRUCTURAL_EQUALITY (type); |
12886 | return type; |
12887 | } |
12888 | |
12889 | switch (kind) |
12890 | { |
12891 | case CPTK_REMOVE_CV: |
12892 | return cv_unqualified (type1); |
12893 | |
12894 | case CPTK_REMOVE_CVREF: |
12895 | if (TYPE_REF_P (type1)) |
12896 | type1 = TREE_TYPE (type1); |
12897 | return cv_unqualified (type1); |
12898 | |
12899 | case CPTK_REMOVE_POINTER: |
12900 | if (TYPE_PTR_P (type1)) |
12901 | type1 = TREE_TYPE (type1); |
12902 | return type1; |
12903 | |
12904 | case CPTK_REMOVE_REFERENCE: |
12905 | if (TYPE_REF_P (type1)) |
12906 | type1 = TREE_TYPE (type1); |
12907 | return type1; |
12908 | |
12909 | case CPTK_TYPE_PACK_ELEMENT: |
12910 | return finish_type_pack_element (idx: type1, types: type2, complain); |
12911 | |
12912 | case CPTK_UNDERLYING_TYPE: |
12913 | return finish_underlying_type (type: type1); |
12914 | |
12915 | #define DEFTRAIT_EXPR(CODE, NAME, ARITY) \ |
12916 | case CPTK_##CODE: |
12917 | #include "cp-trait.def" |
12918 | #undef DEFTRAIT_EXPR |
12919 | /* Expression-yielding traits are handled in finish_trait_expr. */ |
12920 | case CPTK_BASES: |
12921 | case CPTK_DIRECT_BASES: |
12922 | /* BASES and DIRECT_BASES are handled in finish_bases. */ |
12923 | break; |
12924 | } |
12925 | |
12926 | gcc_unreachable (); |
12927 | } |
12928 | |
12929 | /* Do-nothing variants of functions to handle pragma FLOAT_CONST_DECIMAL64, |
12930 | which is ignored for C++. */ |
12931 | |
12932 | void |
12933 | set_float_const_decimal64 (void) |
12934 | { |
12935 | } |
12936 | |
12937 | void |
12938 | clear_float_const_decimal64 (void) |
12939 | { |
12940 | } |
12941 | |
12942 | bool |
12943 | float_const_decimal64_p (void) |
12944 | { |
12945 | return 0; |
12946 | } |
12947 | |
12948 | |
12949 | /* Return true if T designates the implied `this' parameter. */ |
12950 | |
12951 | bool |
12952 | is_this_parameter (tree t) |
12953 | { |
12954 | if (!DECL_P (t) || DECL_NAME (t) != this_identifier) |
12955 | return false; |
12956 | gcc_assert (TREE_CODE (t) == PARM_DECL |
12957 | || (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t)) |
12958 | || (cp_binding_oracle && VAR_P (t))); |
12959 | return true; |
12960 | } |
12961 | |
12962 | /* As above, or a C++23 explicit object parameter. */ |
12963 | |
12964 | bool |
12965 | is_object_parameter (tree t) |
12966 | { |
12967 | if (is_this_parameter (t)) |
12968 | return true; |
12969 | if (TREE_CODE (t) != PARM_DECL) |
12970 | return false; |
12971 | tree ctx = DECL_CONTEXT (t); |
12972 | return (ctx && DECL_XOBJ_MEMBER_FUNCTION_P (ctx) |
12973 | && t == DECL_ARGUMENTS (ctx)); |
12974 | } |
12975 | |
12976 | /* Insert the deduced return type for an auto function. */ |
12977 | |
12978 | void |
12979 | apply_deduced_return_type (tree fco, tree return_type) |
12980 | { |
12981 | tree result; |
12982 | |
12983 | if (return_type == error_mark_node) |
12984 | return; |
12985 | |
12986 | if (DECL_CONV_FN_P (fco)) |
12987 | DECL_NAME (fco) = make_conv_op_name (return_type); |
12988 | |
12989 | TREE_TYPE (fco) = change_return_type (return_type, TREE_TYPE (fco)); |
12990 | |
12991 | maybe_update_postconditions (fco); |
12992 | |
12993 | /* Apply the type to the result object. */ |
12994 | |
12995 | result = DECL_RESULT (fco); |
12996 | if (result == NULL_TREE) |
12997 | return; |
12998 | if (TREE_TYPE (result) == return_type) |
12999 | return; |
13000 | |
13001 | if (!processing_template_decl && !VOID_TYPE_P (return_type) |
13002 | && !complete_type_or_else (return_type, NULL_TREE)) |
13003 | return; |
13004 | |
13005 | /* We already have a DECL_RESULT from start_preparsed_function. |
13006 | Now we need to redo the work it and allocate_struct_function |
13007 | did to reflect the new type. */ |
13008 | result = build_decl (DECL_SOURCE_LOCATION (result), RESULT_DECL, NULL_TREE, |
13009 | TYPE_MAIN_VARIANT (return_type)); |
13010 | DECL_ARTIFICIAL (result) = 1; |
13011 | DECL_IGNORED_P (result) = 1; |
13012 | cp_apply_type_quals_to_decl (cp_type_quals (return_type), |
13013 | result); |
13014 | DECL_RESULT (fco) = result; |
13015 | |
13016 | if (!processing_template_decl) |
13017 | if (function *fun = DECL_STRUCT_FUNCTION (fco)) |
13018 | { |
13019 | bool aggr = aggregate_value_p (result, fco); |
13020 | #ifdef PCC_STATIC_STRUCT_RETURN |
13021 | fun->returns_pcc_struct = aggr; |
13022 | #endif |
13023 | fun->returns_struct = aggr; |
13024 | } |
13025 | } |
13026 | |
13027 | /* Build a unary fold expression of EXPR over OP. If IS_RIGHT is true, |
13028 | this is a right unary fold. Otherwise it is a left unary fold. */ |
13029 | |
13030 | static tree |
13031 | finish_unary_fold_expr (location_t loc, tree expr, int op, tree_code dir) |
13032 | { |
13033 | /* Build a pack expansion (assuming expr has pack type). */ |
13034 | if (!uses_parameter_packs (expr)) |
13035 | { |
13036 | error_at (location_of (expr), "operand of fold expression has no " |
13037 | "unexpanded parameter packs" ); |
13038 | return error_mark_node; |
13039 | } |
13040 | tree pack = make_pack_expansion (expr); |
13041 | |
13042 | /* Build the fold expression. */ |
13043 | tree code = build_int_cstu (integer_type_node, abs (x: op)); |
13044 | tree fold = build_min_nt_loc (loc, dir, code, pack); |
13045 | FOLD_EXPR_MODIFY_P (fold) = (op < 0); |
13046 | TREE_TYPE (fold) = build_dependent_operator_type (NULL_TREE, |
13047 | FOLD_EXPR_OP (fold), |
13048 | FOLD_EXPR_MODIFY_P (fold)); |
13049 | return fold; |
13050 | } |
13051 | |
13052 | tree |
13053 | finish_left_unary_fold_expr (location_t loc, tree expr, int op) |
13054 | { |
13055 | return finish_unary_fold_expr (loc, expr, op, dir: UNARY_LEFT_FOLD_EXPR); |
13056 | } |
13057 | |
13058 | tree |
13059 | finish_right_unary_fold_expr (location_t loc, tree expr, int op) |
13060 | { |
13061 | return finish_unary_fold_expr (loc, expr, op, dir: UNARY_RIGHT_FOLD_EXPR); |
13062 | } |
13063 | |
13064 | /* Build a binary fold expression over EXPR1 and EXPR2. The |
13065 | associativity of the fold is determined by EXPR1 and EXPR2 (whichever |
13066 | has an unexpanded parameter pack). */ |
13067 | |
13068 | static tree |
13069 | finish_binary_fold_expr (location_t loc, tree pack, tree init, |
13070 | int op, tree_code dir) |
13071 | { |
13072 | pack = make_pack_expansion (pack); |
13073 | tree code = build_int_cstu (integer_type_node, abs (x: op)); |
13074 | tree fold = build_min_nt_loc (loc, dir, code, pack, init); |
13075 | FOLD_EXPR_MODIFY_P (fold) = (op < 0); |
13076 | TREE_TYPE (fold) = build_dependent_operator_type (NULL_TREE, |
13077 | FOLD_EXPR_OP (fold), |
13078 | FOLD_EXPR_MODIFY_P (fold)); |
13079 | return fold; |
13080 | } |
13081 | |
13082 | tree |
13083 | finish_binary_fold_expr (location_t loc, tree expr1, tree expr2, int op) |
13084 | { |
13085 | // Determine which expr has an unexpanded parameter pack and |
13086 | // set the pack and initial term. |
13087 | bool pack1 = uses_parameter_packs (expr1); |
13088 | bool pack2 = uses_parameter_packs (expr2); |
13089 | if (pack1 && !pack2) |
13090 | return finish_binary_fold_expr (loc, pack: expr1, init: expr2, op, dir: BINARY_RIGHT_FOLD_EXPR); |
13091 | else if (pack2 && !pack1) |
13092 | return finish_binary_fold_expr (loc, pack: expr2, init: expr1, op, dir: BINARY_LEFT_FOLD_EXPR); |
13093 | else |
13094 | { |
13095 | if (pack1) |
13096 | error ("both arguments in binary fold have unexpanded parameter packs" ); |
13097 | else |
13098 | error ("no unexpanded parameter packs in binary fold" ); |
13099 | } |
13100 | return error_mark_node; |
13101 | } |
13102 | |
13103 | /* Finish __builtin_launder (arg). */ |
13104 | |
13105 | tree |
13106 | finish_builtin_launder (location_t loc, tree arg, tsubst_flags_t complain) |
13107 | { |
13108 | tree orig_arg = arg; |
13109 | if (!type_dependent_expression_p (arg)) |
13110 | arg = decay_conversion (arg, complain); |
13111 | if (error_operand_p (t: arg)) |
13112 | return error_mark_node; |
13113 | if (!type_dependent_expression_p (arg) |
13114 | && !TYPE_PTR_P (TREE_TYPE (arg))) |
13115 | { |
13116 | error_at (loc, "non-pointer argument to %<__builtin_launder%>" ); |
13117 | return error_mark_node; |
13118 | } |
13119 | if (processing_template_decl) |
13120 | arg = orig_arg; |
13121 | return build_call_expr_internal_loc (loc, IFN_LAUNDER, |
13122 | TREE_TYPE (arg), 1, arg); |
13123 | } |
13124 | |
13125 | /* Finish __builtin_convertvector (arg, type). */ |
13126 | |
13127 | tree |
13128 | cp_build_vec_convert (tree arg, location_t loc, tree type, |
13129 | tsubst_flags_t complain) |
13130 | { |
13131 | if (error_operand_p (t: type)) |
13132 | return error_mark_node; |
13133 | if (error_operand_p (t: arg)) |
13134 | return error_mark_node; |
13135 | |
13136 | tree ret = NULL_TREE; |
13137 | if (!type_dependent_expression_p (arg) && !dependent_type_p (type)) |
13138 | ret = c_build_vec_convert (cp_expr_loc_or_input_loc (t: arg), |
13139 | decay_conversion (arg, complain), |
13140 | loc, type, (complain & tf_error) != 0); |
13141 | |
13142 | if (!processing_template_decl) |
13143 | return ret; |
13144 | |
13145 | return build_call_expr_internal_loc (loc, IFN_VEC_CONVERT, type, 1, arg); |
13146 | } |
13147 | |
13148 | /* Finish __builtin_bit_cast (type, arg). */ |
13149 | |
13150 | tree |
13151 | cp_build_bit_cast (location_t loc, tree type, tree arg, |
13152 | tsubst_flags_t complain) |
13153 | { |
13154 | if (error_operand_p (t: type)) |
13155 | return error_mark_node; |
13156 | if (!dependent_type_p (type)) |
13157 | { |
13158 | if (!complete_type_or_maybe_complain (type, NULL_TREE, complain)) |
13159 | return error_mark_node; |
13160 | if (TREE_CODE (type) == ARRAY_TYPE) |
13161 | { |
13162 | /* std::bit_cast for destination ARRAY_TYPE is not possible, |
13163 | as functions may not return an array, so don't bother trying |
13164 | to support this (and then deal with VLAs etc.). */ |
13165 | error_at (loc, "%<__builtin_bit_cast%> destination type %qT " |
13166 | "is an array type" , type); |
13167 | return error_mark_node; |
13168 | } |
13169 | if (!trivially_copyable_p (type)) |
13170 | { |
13171 | error_at (loc, "%<__builtin_bit_cast%> destination type %qT " |
13172 | "is not trivially copyable" , type); |
13173 | return error_mark_node; |
13174 | } |
13175 | } |
13176 | |
13177 | if (error_operand_p (t: arg)) |
13178 | return error_mark_node; |
13179 | |
13180 | if (!type_dependent_expression_p (arg)) |
13181 | { |
13182 | if (TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE) |
13183 | { |
13184 | /* Don't perform array-to-pointer conversion. */ |
13185 | arg = mark_rvalue_use (arg, loc, reject_builtin: true); |
13186 | if (!complete_type_or_maybe_complain (TREE_TYPE (arg), arg, complain)) |
13187 | return error_mark_node; |
13188 | } |
13189 | else |
13190 | arg = decay_conversion (arg, complain); |
13191 | |
13192 | if (error_operand_p (t: arg)) |
13193 | return error_mark_node; |
13194 | |
13195 | if (!trivially_copyable_p (TREE_TYPE (arg))) |
13196 | { |
13197 | error_at (cp_expr_loc_or_loc (t: arg, or_loc: loc), |
13198 | "%<__builtin_bit_cast%> source type %qT " |
13199 | "is not trivially copyable" , TREE_TYPE (arg)); |
13200 | return error_mark_node; |
13201 | } |
13202 | if (!dependent_type_p (type) |
13203 | && !cp_tree_equal (TYPE_SIZE_UNIT (type), |
13204 | TYPE_SIZE_UNIT (TREE_TYPE (arg)))) |
13205 | { |
13206 | error_at (loc, "%<__builtin_bit_cast%> source size %qE " |
13207 | "not equal to destination type size %qE" , |
13208 | TYPE_SIZE_UNIT (TREE_TYPE (arg)), |
13209 | TYPE_SIZE_UNIT (type)); |
13210 | return error_mark_node; |
13211 | } |
13212 | } |
13213 | |
13214 | tree ret = build_min (BIT_CAST_EXPR, type, arg); |
13215 | SET_EXPR_LOCATION (ret, loc); |
13216 | |
13217 | if (!processing_template_decl && CLASS_TYPE_P (type)) |
13218 | ret = get_target_expr (ret, complain); |
13219 | |
13220 | return ret; |
13221 | } |
13222 | |
13223 | /* Diagnose invalid #pragma GCC unroll argument and adjust |
13224 | it if needed. */ |
13225 | |
13226 | tree |
13227 | cp_check_pragma_unroll (location_t loc, tree unroll) |
13228 | { |
13229 | HOST_WIDE_INT lunroll = 0; |
13230 | if (type_dependent_expression_p (unroll)) |
13231 | ; |
13232 | else if (!INTEGRAL_TYPE_P (TREE_TYPE (unroll)) |
13233 | || (!value_dependent_expression_p (unroll) |
13234 | && (!tree_fits_shwi_p (unroll) |
13235 | || (lunroll = tree_to_shwi (unroll)) < 0 |
13236 | || lunroll >= USHRT_MAX))) |
13237 | { |
13238 | error_at (loc, "%<#pragma GCC unroll%> requires an" |
13239 | " assignment-expression that evaluates to a non-negative" |
13240 | " integral constant less than %u" , USHRT_MAX); |
13241 | unroll = integer_one_node; |
13242 | } |
13243 | else if (TREE_CODE (unroll) == INTEGER_CST) |
13244 | { |
13245 | unroll = fold_convert (integer_type_node, unroll); |
13246 | if (integer_zerop (unroll)) |
13247 | unroll = integer_one_node; |
13248 | } |
13249 | return unroll; |
13250 | } |
13251 | |
13252 | #include "gt-cp-semantics.h" |
13253 | |