1 | /* Build expressions with type checking for C compiler. |
2 | Copyright (C) 1987-2024 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | |
21 | /* This file is part of the C front end. |
22 | It contains routines to build C expressions given their operands, |
23 | including computing the types of the result, C-specific error checks, |
24 | and some optimization. */ |
25 | |
26 | #include "config.h" |
27 | #include "system.h" |
28 | #include "coretypes.h" |
29 | #include "memmodel.h" |
30 | #include "target.h" |
31 | #include "function.h" |
32 | #include "bitmap.h" |
33 | #include "c-tree.h" |
34 | #include "gimple-expr.h" |
35 | #include "predict.h" |
36 | #include "stor-layout.h" |
37 | #include "trans-mem.h" |
38 | #include "varasm.h" |
39 | #include "stmt.h" |
40 | #include "langhooks.h" |
41 | #include "c-lang.h" |
42 | #include "intl.h" |
43 | #include "tree-iterator.h" |
44 | #include "gimplify.h" |
45 | #include "tree-inline.h" |
46 | #include "omp-general.h" |
47 | #include "c-family/c-objc.h" |
48 | #include "c-family/c-ubsan.h" |
49 | #include "gomp-constants.h" |
50 | #include "spellcheck-tree.h" |
51 | #include "gcc-rich-location.h" |
52 | #include "stringpool.h" |
53 | #include "attribs.h" |
54 | #include "asan.h" |
55 | #include "realmpfr.h" |
56 | |
57 | /* Possible cases of implicit conversions. Used to select diagnostic messages |
58 | and control folding initializers in convert_for_assignment. */ |
59 | enum impl_conv { |
60 | ic_argpass, |
61 | ic_assign, |
62 | ic_init, |
63 | ic_init_const, |
64 | ic_return |
65 | }; |
66 | |
67 | /* The level of nesting inside "__alignof__". */ |
68 | int in_alignof; |
69 | |
70 | /* The level of nesting inside "sizeof". */ |
71 | int in_sizeof; |
72 | |
73 | /* The level of nesting inside "typeof". */ |
74 | int in_typeof; |
75 | |
76 | /* True when parsing OpenMP loop expressions. */ |
77 | bool c_in_omp_for; |
78 | |
79 | /* True when parsing OpenMP map clause. */ |
80 | bool c_omp_array_section_p; |
81 | |
82 | /* The argument of last parsed sizeof expression, only to be tested |
83 | if expr.original_code == SIZEOF_EXPR. */ |
84 | tree c_last_sizeof_arg; |
85 | location_t c_last_sizeof_loc; |
86 | |
87 | /* Nonzero if we might need to print a "missing braces around |
88 | initializer" message within this initializer. */ |
89 | static int found_missing_braces; |
90 | |
91 | static bool require_constant_value; |
92 | static bool require_constant_elements; |
93 | static bool require_constexpr_value; |
94 | |
95 | static tree qualify_type (tree, tree); |
96 | struct comptypes_data; |
97 | static bool tagged_types_tu_compatible_p (const_tree, const_tree, |
98 | struct comptypes_data *); |
99 | static bool comp_target_types (location_t, tree, tree); |
100 | static bool function_types_compatible_p (const_tree, const_tree, |
101 | struct comptypes_data *); |
102 | static bool type_lists_compatible_p (const_tree, const_tree, |
103 | struct comptypes_data *); |
104 | static tree lookup_field (tree, tree); |
105 | static int convert_arguments (location_t, vec<location_t>, tree, |
106 | vec<tree, va_gc> *, vec<tree, va_gc> *, tree, |
107 | tree); |
108 | static tree pointer_diff (location_t, tree, tree, tree *); |
109 | static tree convert_for_assignment (location_t, location_t, tree, tree, tree, |
110 | enum impl_conv, bool, tree, tree, int, |
111 | int = 0); |
112 | static tree valid_compound_expr_initializer (tree, tree); |
113 | static void push_string (const char *); |
114 | static void push_member_name (tree); |
115 | static int spelling_length (void); |
116 | static char *print_spelling (char *); |
117 | static void warning_init (location_t, int, const char *); |
118 | static tree digest_init (location_t, tree, tree, tree, bool, bool, bool, bool, |
119 | bool, bool); |
120 | static void output_init_element (location_t, tree, tree, bool, tree, tree, bool, |
121 | bool, struct obstack *); |
122 | static void output_pending_init_elements (int, struct obstack *); |
123 | static bool set_designator (location_t, bool, struct obstack *); |
124 | static void push_range_stack (tree, struct obstack *); |
125 | static void add_pending_init (location_t, tree, tree, tree, bool, |
126 | struct obstack *); |
127 | static void set_nonincremental_init (struct obstack *); |
128 | static void set_nonincremental_init_from_string (tree, struct obstack *); |
129 | static tree find_init_member (tree, struct obstack *); |
130 | static void readonly_warning (tree, enum lvalue_use); |
131 | static int lvalue_or_else (location_t, const_tree, enum lvalue_use); |
132 | static void record_maybe_used_decl (tree); |
133 | static bool comptypes_internal (const_tree, const_tree, |
134 | struct comptypes_data *data); |
135 | |
136 | /* Return true if EXP is a null pointer constant, false otherwise. */ |
137 | |
138 | bool |
139 | null_pointer_constant_p (const_tree expr) |
140 | { |
141 | /* This should really operate on c_expr structures, but they aren't |
142 | yet available everywhere required. */ |
143 | tree type = TREE_TYPE (expr); |
144 | |
145 | /* An integer constant expression with the value 0, such an expression |
146 | cast to type void*, or the predefined constant nullptr, are a null |
147 | pointer constant. */ |
148 | if (expr == nullptr_node) |
149 | return true; |
150 | |
151 | return (TREE_CODE (expr) == INTEGER_CST |
152 | && !TREE_OVERFLOW (expr) |
153 | && integer_zerop (expr) |
154 | && (INTEGRAL_TYPE_P (type) |
155 | || (TREE_CODE (type) == POINTER_TYPE |
156 | && VOID_TYPE_P (TREE_TYPE (type)) |
157 | && TYPE_QUALS (TREE_TYPE (type)) == TYPE_UNQUALIFIED))); |
158 | } |
159 | |
160 | /* EXPR may appear in an unevaluated part of an integer constant |
161 | expression, but not in an evaluated part. Wrap it in a |
162 | C_MAYBE_CONST_EXPR, or mark it with TREE_OVERFLOW if it is just an |
163 | INTEGER_CST and we cannot create a C_MAYBE_CONST_EXPR. */ |
164 | |
165 | static tree |
166 | note_integer_operands (tree expr) |
167 | { |
168 | tree ret; |
169 | if (TREE_CODE (expr) == INTEGER_CST && in_late_binary_op) |
170 | { |
171 | ret = copy_node (expr); |
172 | TREE_OVERFLOW (ret) = 1; |
173 | } |
174 | else |
175 | { |
176 | ret = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (expr), NULL_TREE, expr); |
177 | C_MAYBE_CONST_EXPR_INT_OPERANDS (ret) = 1; |
178 | } |
179 | return ret; |
180 | } |
181 | |
182 | /* Having checked whether EXPR may appear in an unevaluated part of an |
183 | integer constant expression and found that it may, remove any |
184 | C_MAYBE_CONST_EXPR noting this fact and return the resulting |
185 | expression. */ |
186 | |
187 | static inline tree |
188 | remove_c_maybe_const_expr (tree expr) |
189 | { |
190 | if (TREE_CODE (expr) == C_MAYBE_CONST_EXPR) |
191 | return C_MAYBE_CONST_EXPR_EXPR (expr); |
192 | else |
193 | return expr; |
194 | } |
195 | |
196 | /* This is a cache to hold if two types are seen. */ |
197 | |
198 | struct tagged_tu_seen_cache { |
199 | const struct tagged_tu_seen_cache * next; |
200 | const_tree t1; |
201 | const_tree t2; |
202 | }; |
203 | |
204 | /* Do `exp = require_complete_type (loc, exp);' to make sure exp |
205 | does not have an incomplete type. (That includes void types.) |
206 | LOC is the location of the use. */ |
207 | |
208 | tree |
209 | require_complete_type (location_t loc, tree value) |
210 | { |
211 | tree type = TREE_TYPE (value); |
212 | |
213 | if (error_operand_p (t: value)) |
214 | return error_mark_node; |
215 | |
216 | /* First, detect a valid value with a complete type. */ |
217 | if (COMPLETE_TYPE_P (type)) |
218 | return value; |
219 | |
220 | c_incomplete_type_error (loc, value, type); |
221 | return error_mark_node; |
222 | } |
223 | |
224 | /* Print an error message for invalid use of an incomplete type. |
225 | VALUE is the expression that was used (or 0 if that isn't known) |
226 | and TYPE is the type that was invalid. LOC is the location for |
227 | the error. */ |
228 | |
229 | void |
230 | c_incomplete_type_error (location_t loc, const_tree value, const_tree type) |
231 | { |
232 | /* Avoid duplicate error message. */ |
233 | if (TREE_CODE (type) == ERROR_MARK) |
234 | return; |
235 | |
236 | if (value != NULL_TREE && (VAR_P (value) || TREE_CODE (value) == PARM_DECL)) |
237 | error_at (loc, "%qD has an incomplete type %qT" , value, type); |
238 | else |
239 | { |
240 | retry: |
241 | /* We must print an error message. Be clever about what it says. */ |
242 | |
243 | switch (TREE_CODE (type)) |
244 | { |
245 | case RECORD_TYPE: |
246 | case UNION_TYPE: |
247 | case ENUMERAL_TYPE: |
248 | break; |
249 | |
250 | case VOID_TYPE: |
251 | error_at (loc, "invalid use of void expression" ); |
252 | return; |
253 | |
254 | case ARRAY_TYPE: |
255 | if (TYPE_DOMAIN (type)) |
256 | { |
257 | if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL) |
258 | { |
259 | error_at (loc, "invalid use of flexible array member" ); |
260 | return; |
261 | } |
262 | type = TREE_TYPE (type); |
263 | goto retry; |
264 | } |
265 | error_at (loc, "invalid use of array with unspecified bounds" ); |
266 | return; |
267 | |
268 | default: |
269 | gcc_unreachable (); |
270 | } |
271 | |
272 | if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE) |
273 | error_at (loc, "invalid use of undefined type %qT" , type); |
274 | else |
275 | /* If this type has a typedef-name, the TYPE_NAME is a TYPE_DECL. */ |
276 | error_at (loc, "invalid use of incomplete typedef %qT" , type); |
277 | } |
278 | } |
279 | |
280 | /* Given a type, apply default promotions wrt unnamed function |
281 | arguments and return the new type. */ |
282 | |
283 | tree |
284 | c_type_promotes_to (tree type) |
285 | { |
286 | tree ret = NULL_TREE; |
287 | |
288 | if (TYPE_MAIN_VARIANT (type) == float_type_node) |
289 | ret = double_type_node; |
290 | else if (c_promoting_integer_type_p (type)) |
291 | { |
292 | /* Preserve unsignedness if not really getting any wider. */ |
293 | if (TYPE_UNSIGNED (type) |
294 | && (TYPE_PRECISION (type) == TYPE_PRECISION (integer_type_node))) |
295 | ret = unsigned_type_node; |
296 | else |
297 | ret = integer_type_node; |
298 | } |
299 | |
300 | if (ret != NULL_TREE) |
301 | return (TYPE_ATOMIC (type) |
302 | ? c_build_qualified_type (ret, TYPE_QUAL_ATOMIC) |
303 | : ret); |
304 | |
305 | return type; |
306 | } |
307 | |
308 | /* Return true if between two named address spaces, whether there is a superset |
309 | named address space that encompasses both address spaces. If there is a |
310 | superset, return which address space is the superset. */ |
311 | |
312 | static bool |
313 | addr_space_superset (addr_space_t as1, addr_space_t as2, addr_space_t *common) |
314 | { |
315 | if (as1 == as2) |
316 | { |
317 | *common = as1; |
318 | return true; |
319 | } |
320 | else if (targetm.addr_space.subset_p (as1, as2)) |
321 | { |
322 | *common = as2; |
323 | return true; |
324 | } |
325 | else if (targetm.addr_space.subset_p (as2, as1)) |
326 | { |
327 | *common = as1; |
328 | return true; |
329 | } |
330 | else |
331 | return false; |
332 | } |
333 | |
334 | /* Return a variant of TYPE which has all the type qualifiers of LIKE |
335 | as well as those of TYPE. */ |
336 | |
337 | static tree |
338 | qualify_type (tree type, tree like) |
339 | { |
340 | addr_space_t as_type = TYPE_ADDR_SPACE (type); |
341 | addr_space_t as_like = TYPE_ADDR_SPACE (like); |
342 | addr_space_t as_common; |
343 | |
344 | /* If the two named address spaces are different, determine the common |
345 | superset address space. If there isn't one, raise an error. */ |
346 | if (!addr_space_superset (as1: as_type, as2: as_like, common: &as_common)) |
347 | { |
348 | as_common = as_type; |
349 | error ("%qT and %qT are in disjoint named address spaces" , |
350 | type, like); |
351 | } |
352 | |
353 | return c_build_qualified_type (type, |
354 | TYPE_QUALS_NO_ADDR_SPACE (type) |
355 | | TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (like) |
356 | | ENCODE_QUAL_ADDR_SPACE (as_common)); |
357 | } |
358 | |
359 | |
360 | /* If NTYPE is a type of a non-variadic function with a prototype |
361 | and OTYPE is a type of a function without a prototype and ATTRS |
362 | contains attribute format, diagnosess and removes it from ATTRS. |
363 | Returns the result of build_type_attribute_variant of NTYPE and |
364 | the (possibly) modified ATTRS. */ |
365 | |
366 | static tree |
367 | build_functype_attribute_variant (tree ntype, tree otype, tree attrs) |
368 | { |
369 | if (!prototype_p (otype) |
370 | && prototype_p (ntype) |
371 | && lookup_attribute (attr_name: "format" , list: attrs)) |
372 | { |
373 | warning_at (input_location, OPT_Wattributes, |
374 | "%qs attribute cannot be applied to a function that " |
375 | "does not take variable arguments" , "format" ); |
376 | attrs = remove_attribute ("format" , attrs); |
377 | } |
378 | return build_type_attribute_variant (ntype, attrs); |
379 | |
380 | } |
381 | /* Return the composite type of two compatible types. |
382 | |
383 | We assume that comptypes has already been done and returned |
384 | nonzero; if that isn't so, this may crash. In particular, we |
385 | assume that qualifiers match. */ |
386 | |
387 | struct composite_cache { |
388 | tree t1; |
389 | tree t2; |
390 | tree composite; |
391 | struct composite_cache* next; |
392 | }; |
393 | |
394 | tree |
395 | composite_type_internal (tree t1, tree t2, struct composite_cache* cache) |
396 | { |
397 | enum tree_code code1; |
398 | enum tree_code code2; |
399 | tree attributes; |
400 | |
401 | /* Save time if the two types are the same. */ |
402 | |
403 | if (t1 == t2) return t1; |
404 | |
405 | /* If one type is nonsense, use the other. */ |
406 | if (t1 == error_mark_node) |
407 | return t2; |
408 | if (t2 == error_mark_node) |
409 | return t1; |
410 | |
411 | code1 = TREE_CODE (t1); |
412 | code2 = TREE_CODE (t2); |
413 | |
414 | /* Merge the attributes. */ |
415 | attributes = targetm.merge_type_attributes (t1, t2); |
416 | |
417 | /* If one is an enumerated type and the other is the compatible |
418 | integer type, the composite type might be either of the two |
419 | (DR#013 question 3). For consistency, use the enumerated type as |
420 | the composite type. */ |
421 | |
422 | if (code1 == ENUMERAL_TYPE |
423 | && (code2 == INTEGER_TYPE |
424 | || code2 == BOOLEAN_TYPE)) |
425 | return t1; |
426 | if (code2 == ENUMERAL_TYPE |
427 | && (code1 == INTEGER_TYPE |
428 | || code1 == BOOLEAN_TYPE)) |
429 | return t2; |
430 | |
431 | gcc_assert (code1 == code2); |
432 | |
433 | switch (code1) |
434 | { |
435 | case POINTER_TYPE: |
436 | /* For two pointers, do this recursively on the target type. */ |
437 | { |
438 | tree pointed_to_1 = TREE_TYPE (t1); |
439 | tree pointed_to_2 = TREE_TYPE (t2); |
440 | tree target = composite_type_internal (t1: pointed_to_1, |
441 | t2: pointed_to_2, cache); |
442 | t1 = build_pointer_type_for_mode (target, TYPE_MODE (t1), false); |
443 | t1 = build_type_attribute_variant (t1, attributes); |
444 | return qualify_type (type: t1, like: t2); |
445 | } |
446 | |
447 | case ARRAY_TYPE: |
448 | { |
449 | tree elt = composite_type_internal (TREE_TYPE (t1), TREE_TYPE (t2), |
450 | cache); |
451 | int quals; |
452 | tree unqual_elt; |
453 | tree d1 = TYPE_DOMAIN (t1); |
454 | tree d2 = TYPE_DOMAIN (t2); |
455 | bool d1_variable, d2_variable; |
456 | bool d1_zero, d2_zero; |
457 | bool t1_complete, t2_complete; |
458 | |
459 | /* We should not have any type quals on arrays at all. */ |
460 | gcc_assert (!TYPE_QUALS_NO_ADDR_SPACE (t1) |
461 | && !TYPE_QUALS_NO_ADDR_SPACE (t2)); |
462 | |
463 | t1_complete = COMPLETE_TYPE_P (t1); |
464 | t2_complete = COMPLETE_TYPE_P (t2); |
465 | |
466 | d1_zero = d1 == NULL_TREE || !TYPE_MAX_VALUE (d1); |
467 | d2_zero = d2 == NULL_TREE || !TYPE_MAX_VALUE (d2); |
468 | |
469 | d1_variable = (!d1_zero |
470 | && (TREE_CODE (TYPE_MIN_VALUE (d1)) != INTEGER_CST |
471 | || TREE_CODE (TYPE_MAX_VALUE (d1)) != INTEGER_CST)); |
472 | d2_variable = (!d2_zero |
473 | && (TREE_CODE (TYPE_MIN_VALUE (d2)) != INTEGER_CST |
474 | || TREE_CODE (TYPE_MAX_VALUE (d2)) != INTEGER_CST)); |
475 | d1_variable = d1_variable || (d1_zero && C_TYPE_VARIABLE_SIZE (t1)); |
476 | d2_variable = d2_variable || (d2_zero && C_TYPE_VARIABLE_SIZE (t2)); |
477 | |
478 | /* Save space: see if the result is identical to one of the args. */ |
479 | if (elt == TREE_TYPE (t1) && TYPE_DOMAIN (t1) |
480 | && (d2_variable || d2_zero || !d1_variable)) |
481 | return build_type_attribute_variant (t1, attributes); |
482 | if (elt == TREE_TYPE (t2) && TYPE_DOMAIN (t2) |
483 | && (d1_variable || d1_zero || !d2_variable)) |
484 | return build_type_attribute_variant (t2, attributes); |
485 | |
486 | if (elt == TREE_TYPE (t1) && !TYPE_DOMAIN (t2) && !TYPE_DOMAIN (t1)) |
487 | return build_type_attribute_variant (t1, attributes); |
488 | if (elt == TREE_TYPE (t2) && !TYPE_DOMAIN (t2) && !TYPE_DOMAIN (t1)) |
489 | return build_type_attribute_variant (t2, attributes); |
490 | |
491 | /* Merge the element types, and have a size if either arg has |
492 | one. We may have qualifiers on the element types. To set |
493 | up TYPE_MAIN_VARIANT correctly, we need to form the |
494 | composite of the unqualified types and add the qualifiers |
495 | back at the end. */ |
496 | quals = TYPE_QUALS (strip_array_types (elt)); |
497 | unqual_elt = c_build_qualified_type (elt, TYPE_UNQUALIFIED); |
498 | t1 = build_array_type (unqual_elt, |
499 | TYPE_DOMAIN ((TYPE_DOMAIN (t1) |
500 | && (d2_variable |
501 | || d2_zero |
502 | || !d1_variable)) |
503 | ? t1 |
504 | : t2)); |
505 | /* Ensure a composite type involving a zero-length array type |
506 | is a zero-length type not an incomplete type. */ |
507 | if (d1_zero && d2_zero |
508 | && (t1_complete || t2_complete) |
509 | && !COMPLETE_TYPE_P (t1)) |
510 | { |
511 | TYPE_SIZE (t1) = bitsize_zero_node; |
512 | TYPE_SIZE_UNIT (t1) = size_zero_node; |
513 | } |
514 | t1 = c_build_qualified_type (t1, quals); |
515 | return build_type_attribute_variant (t1, attributes); |
516 | } |
517 | |
518 | case RECORD_TYPE: |
519 | case UNION_TYPE: |
520 | if (flag_isoc23 && !comptypes_same_p (t1, t2)) |
521 | { |
522 | gcc_checking_assert (COMPLETE_TYPE_P (t1) && COMPLETE_TYPE_P (t2)); |
523 | gcc_checking_assert (!TYPE_NAME (t1) || comptypes (t1, t2)); |
524 | |
525 | /* If a composite type for these two types is already under |
526 | construction, return it. */ |
527 | |
528 | for (struct composite_cache *c = cache; c != NULL; c = c->next) |
529 | if (c->t1 == t1 && c->t2 == t2) |
530 | return c->composite; |
531 | |
532 | /* Otherwise, create a new type node and link it into the cache. */ |
533 | |
534 | tree n = make_node (code1); |
535 | SET_TYPE_STRUCTURAL_EQUALITY (n); |
536 | TYPE_NAME (n) = TYPE_NAME (t1); |
537 | |
538 | struct composite_cache cache2 = { .t1: t1, .t2: t2, .composite: n, .next: cache }; |
539 | cache = &cache2; |
540 | |
541 | tree f1 = TYPE_FIELDS (t1); |
542 | tree f2 = TYPE_FIELDS (t2); |
543 | tree fields = NULL_TREE; |
544 | |
545 | for (tree a = f1, b = f2; a && b; |
546 | a = DECL_CHAIN (a), b = DECL_CHAIN (b)) |
547 | { |
548 | tree ta = TREE_TYPE (a); |
549 | tree tb = TREE_TYPE (b); |
550 | |
551 | if (DECL_C_BIT_FIELD (a)) |
552 | { |
553 | ta = DECL_BIT_FIELD_TYPE (a); |
554 | tb = DECL_BIT_FIELD_TYPE (b); |
555 | } |
556 | |
557 | gcc_assert (DECL_NAME (a) == DECL_NAME (b)); |
558 | gcc_checking_assert (!DECL_NAME (a) || comptypes (ta, tb)); |
559 | |
560 | tree t = composite_type_internal (t1: ta, t2: tb, cache); |
561 | tree f = build_decl (input_location, FIELD_DECL, DECL_NAME (a), t); |
562 | |
563 | DECL_PACKED (f) = DECL_PACKED (a); |
564 | SET_DECL_ALIGN (f, DECL_ALIGN (a)); |
565 | DECL_ATTRIBUTES (f) = DECL_ATTRIBUTES (a); |
566 | C_DECL_VARIABLE_SIZE (f) = C_TYPE_VARIABLE_SIZE (t); |
567 | |
568 | finish_decl (f, input_location, NULL, NULL, NULL); |
569 | |
570 | if (DECL_C_BIT_FIELD (a)) |
571 | { |
572 | /* This will be processed by finish_struct. */ |
573 | SET_DECL_C_BIT_FIELD (f); |
574 | DECL_INITIAL (f) = build_int_cst (integer_type_node, |
575 | tree_to_uhwi (DECL_SIZE (a))); |
576 | DECL_NONADDRESSABLE_P (f) = true; |
577 | DECL_PADDING_P (f) = !DECL_NAME (a); |
578 | } |
579 | |
580 | DECL_CHAIN (f) = fields; |
581 | fields = f; |
582 | } |
583 | |
584 | fields = nreverse (fields); |
585 | |
586 | /* Setup the struct/union type. Because we inherit all variably |
587 | modified components, we can ignore the size expression. */ |
588 | tree expr = NULL_TREE; |
589 | |
590 | /* Set TYPE_STUB_DECL for debugging symbols. */ |
591 | TYPE_STUB_DECL (n) = pushdecl (build_decl (input_location, TYPE_DECL, |
592 | NULL_TREE, n)); |
593 | |
594 | n = finish_struct (input_location, n, fields, attributes, NULL, |
595 | expr: &expr); |
596 | |
597 | n = qualify_type (type: n, like: t1); |
598 | |
599 | gcc_checking_assert (!TYPE_NAME (n) || comptypes (n, t1)); |
600 | gcc_checking_assert (!TYPE_NAME (n) || comptypes (n, t2)); |
601 | |
602 | return n; |
603 | } |
604 | /* FALLTHRU */ |
605 | case ENUMERAL_TYPE: |
606 | if (attributes != NULL) |
607 | { |
608 | /* Try harder not to create a new aggregate type. */ |
609 | if (attribute_list_equal (TYPE_ATTRIBUTES (t1), attributes)) |
610 | return t1; |
611 | if (attribute_list_equal (TYPE_ATTRIBUTES (t2), attributes)) |
612 | return t2; |
613 | } |
614 | return build_type_attribute_variant (t1, attributes); |
615 | |
616 | case FUNCTION_TYPE: |
617 | /* Function types: prefer the one that specified arg types. |
618 | If both do, merge the arg types. Also merge the return types. */ |
619 | { |
620 | tree valtype = composite_type_internal (TREE_TYPE (t1), |
621 | TREE_TYPE (t2), cache); |
622 | tree p1 = TYPE_ARG_TYPES (t1); |
623 | tree p2 = TYPE_ARG_TYPES (t2); |
624 | int len; |
625 | tree newargs, n; |
626 | int i; |
627 | |
628 | /* Save space: see if the result is identical to one of the args. */ |
629 | if (valtype == TREE_TYPE (t1) && !TYPE_ARG_TYPES (t2)) |
630 | return build_functype_attribute_variant (ntype: t1, otype: t2, attrs: attributes); |
631 | if (valtype == TREE_TYPE (t2) && !TYPE_ARG_TYPES (t1)) |
632 | return build_functype_attribute_variant (ntype: t2, otype: t1, attrs: attributes); |
633 | |
634 | /* Simple way if one arg fails to specify argument types. */ |
635 | if (TYPE_ARG_TYPES (t1) == NULL_TREE) |
636 | { |
637 | t1 = build_function_type (valtype, TYPE_ARG_TYPES (t2), |
638 | TYPE_NO_NAMED_ARGS_STDARG_P (t2)); |
639 | t1 = build_type_attribute_variant (t1, attributes); |
640 | return qualify_type (type: t1, like: t2); |
641 | } |
642 | if (TYPE_ARG_TYPES (t2) == NULL_TREE) |
643 | { |
644 | t1 = build_function_type (valtype, TYPE_ARG_TYPES (t1), |
645 | TYPE_NO_NAMED_ARGS_STDARG_P (t1)); |
646 | t1 = build_type_attribute_variant (t1, attributes); |
647 | return qualify_type (type: t1, like: t2); |
648 | } |
649 | |
650 | /* If both args specify argument types, we must merge the two |
651 | lists, argument by argument. */ |
652 | |
653 | for (len = 0, newargs = p1; |
654 | newargs && newargs != void_list_node; |
655 | len++, newargs = TREE_CHAIN (newargs)) |
656 | ; |
657 | |
658 | for (i = 0; i < len; i++) |
659 | newargs = tree_cons (NULL_TREE, NULL_TREE, newargs); |
660 | |
661 | n = newargs; |
662 | |
663 | for (; p1 && p1 != void_list_node; |
664 | p1 = TREE_CHAIN (p1), p2 = TREE_CHAIN (p2), n = TREE_CHAIN (n)) |
665 | { |
666 | tree mv1 = TREE_VALUE (p1); |
667 | if (mv1 && mv1 != error_mark_node |
668 | && TREE_CODE (mv1) != ARRAY_TYPE) |
669 | mv1 = TYPE_MAIN_VARIANT (mv1); |
670 | |
671 | tree mv2 = TREE_VALUE (p2); |
672 | if (mv2 && mv2 != error_mark_node |
673 | && TREE_CODE (mv2) != ARRAY_TYPE) |
674 | mv2 = TYPE_MAIN_VARIANT (mv2); |
675 | |
676 | /* A null type means arg type is not specified. |
677 | Take whatever the other function type has. */ |
678 | if (TREE_VALUE (p1) == NULL_TREE) |
679 | { |
680 | TREE_VALUE (n) = TREE_VALUE (p2); |
681 | goto parm_done; |
682 | } |
683 | if (TREE_VALUE (p2) == NULL_TREE) |
684 | { |
685 | TREE_VALUE (n) = TREE_VALUE (p1); |
686 | goto parm_done; |
687 | } |
688 | |
689 | /* Given wait (union {union wait *u; int *i} *) |
690 | and wait (union wait *), |
691 | prefer union wait * as type of parm. */ |
692 | if (TREE_CODE (TREE_VALUE (p1)) == UNION_TYPE |
693 | && TREE_VALUE (p1) != TREE_VALUE (p2)) |
694 | { |
695 | tree memb; |
696 | for (memb = TYPE_FIELDS (TREE_VALUE (p1)); |
697 | memb; memb = DECL_CHAIN (memb)) |
698 | { |
699 | tree mv3 = TREE_TYPE (memb); |
700 | if (mv3 && mv3 != error_mark_node |
701 | && TREE_CODE (mv3) != ARRAY_TYPE) |
702 | mv3 = TYPE_MAIN_VARIANT (mv3); |
703 | if (comptypes (mv3, mv2)) |
704 | { |
705 | TREE_VALUE (n) = composite_type_internal (TREE_TYPE (memb), |
706 | TREE_VALUE (p2), |
707 | cache); |
708 | pedwarn (input_location, OPT_Wpedantic, |
709 | "function types not truly compatible in ISO C" ); |
710 | goto parm_done; |
711 | } |
712 | } |
713 | } |
714 | if (TREE_CODE (TREE_VALUE (p2)) == UNION_TYPE |
715 | && TREE_VALUE (p2) != TREE_VALUE (p1)) |
716 | { |
717 | tree memb; |
718 | for (memb = TYPE_FIELDS (TREE_VALUE (p2)); |
719 | memb; memb = DECL_CHAIN (memb)) |
720 | { |
721 | tree mv3 = TREE_TYPE (memb); |
722 | if (mv3 && mv3 != error_mark_node |
723 | && TREE_CODE (mv3) != ARRAY_TYPE) |
724 | mv3 = TYPE_MAIN_VARIANT (mv3); |
725 | if (comptypes (mv3, mv1)) |
726 | { |
727 | TREE_VALUE (n) |
728 | = composite_type_internal (TREE_TYPE (memb), |
729 | TREE_VALUE (p1), |
730 | cache); |
731 | pedwarn (input_location, OPT_Wpedantic, |
732 | "function types not truly compatible in ISO C" ); |
733 | goto parm_done; |
734 | } |
735 | } |
736 | } |
737 | TREE_VALUE (n) = composite_type_internal (t1: mv1, t2: mv2, cache); |
738 | parm_done: ; |
739 | } |
740 | |
741 | t1 = build_function_type (valtype, newargs); |
742 | t1 = qualify_type (type: t1, like: t2); |
743 | } |
744 | /* FALLTHRU */ |
745 | |
746 | default: |
747 | return build_type_attribute_variant (t1, attributes); |
748 | } |
749 | } |
750 | |
751 | tree |
752 | composite_type (tree t1, tree t2) |
753 | { |
754 | struct composite_cache cache = { }; |
755 | return composite_type_internal (t1, t2, cache: &cache); |
756 | } |
757 | |
758 | /* Return the type of a conditional expression between pointers to |
759 | possibly differently qualified versions of compatible types. |
760 | |
761 | We assume that comp_target_types has already been done and returned |
762 | true; if that isn't so, this may crash. */ |
763 | |
764 | static tree |
765 | common_pointer_type (tree t1, tree t2) |
766 | { |
767 | tree attributes; |
768 | tree pointed_to_1, mv1; |
769 | tree pointed_to_2, mv2; |
770 | tree target; |
771 | unsigned target_quals; |
772 | addr_space_t as1, as2, as_common; |
773 | int quals1, quals2; |
774 | |
775 | /* Save time if the two types are the same. */ |
776 | |
777 | if (t1 == t2) return t1; |
778 | |
779 | /* If one type is nonsense, use the other. */ |
780 | if (t1 == error_mark_node) |
781 | return t2; |
782 | if (t2 == error_mark_node) |
783 | return t1; |
784 | |
785 | gcc_assert (TREE_CODE (t1) == POINTER_TYPE |
786 | && TREE_CODE (t2) == POINTER_TYPE); |
787 | |
788 | /* Merge the attributes. */ |
789 | attributes = targetm.merge_type_attributes (t1, t2); |
790 | |
791 | /* Find the composite type of the target types, and combine the |
792 | qualifiers of the two types' targets. Do not lose qualifiers on |
793 | array element types by taking the TYPE_MAIN_VARIANT. */ |
794 | mv1 = pointed_to_1 = TREE_TYPE (t1); |
795 | mv2 = pointed_to_2 = TREE_TYPE (t2); |
796 | if (TREE_CODE (mv1) != ARRAY_TYPE) |
797 | mv1 = TYPE_MAIN_VARIANT (pointed_to_1); |
798 | if (TREE_CODE (mv2) != ARRAY_TYPE) |
799 | mv2 = TYPE_MAIN_VARIANT (pointed_to_2); |
800 | target = composite_type (t1: mv1, t2: mv2); |
801 | |
802 | /* Strip array types to get correct qualifier for pointers to arrays */ |
803 | quals1 = TYPE_QUALS_NO_ADDR_SPACE (strip_array_types (pointed_to_1)); |
804 | quals2 = TYPE_QUALS_NO_ADDR_SPACE (strip_array_types (pointed_to_2)); |
805 | |
806 | /* For function types do not merge const qualifiers, but drop them |
807 | if used inconsistently. The middle-end uses these to mark const |
808 | and noreturn functions. */ |
809 | if (TREE_CODE (pointed_to_1) == FUNCTION_TYPE) |
810 | target_quals = (quals1 & quals2); |
811 | else |
812 | target_quals = (quals1 | quals2); |
813 | |
814 | /* If the two named address spaces are different, determine the common |
815 | superset address space. This is guaranteed to exist due to the |
816 | assumption that comp_target_type returned true. */ |
817 | as1 = TYPE_ADDR_SPACE (pointed_to_1); |
818 | as2 = TYPE_ADDR_SPACE (pointed_to_2); |
819 | if (!addr_space_superset (as1, as2, common: &as_common)) |
820 | gcc_unreachable (); |
821 | |
822 | target_quals |= ENCODE_QUAL_ADDR_SPACE (as_common); |
823 | |
824 | t1 = build_pointer_type (c_build_qualified_type (target, target_quals)); |
825 | return build_type_attribute_variant (t1, attributes); |
826 | } |
827 | |
828 | /* Return the common type for two arithmetic types under the usual |
829 | arithmetic conversions. The default conversions have already been |
830 | applied, and enumerated types converted to their compatible integer |
831 | types. The resulting type is unqualified and has no attributes. |
832 | |
833 | This is the type for the result of most arithmetic operations |
834 | if the operands have the given two types. */ |
835 | |
836 | static tree |
837 | c_common_type (tree t1, tree t2) |
838 | { |
839 | enum tree_code code1; |
840 | enum tree_code code2; |
841 | |
842 | /* If one type is nonsense, use the other. */ |
843 | if (t1 == error_mark_node) |
844 | return t2; |
845 | if (t2 == error_mark_node) |
846 | return t1; |
847 | |
848 | if (TYPE_QUALS (t1) != TYPE_UNQUALIFIED) |
849 | t1 = TYPE_MAIN_VARIANT (t1); |
850 | |
851 | if (TYPE_QUALS (t2) != TYPE_UNQUALIFIED) |
852 | t2 = TYPE_MAIN_VARIANT (t2); |
853 | |
854 | if (TYPE_ATTRIBUTES (t1) != NULL_TREE) |
855 | { |
856 | tree attrs = affects_type_identity_attributes (TYPE_ATTRIBUTES (t1)); |
857 | t1 = build_type_attribute_variant (t1, attrs); |
858 | } |
859 | |
860 | if (TYPE_ATTRIBUTES (t2) != NULL_TREE) |
861 | { |
862 | tree attrs = affects_type_identity_attributes (TYPE_ATTRIBUTES (t2)); |
863 | t2 = build_type_attribute_variant (t2, attrs); |
864 | } |
865 | |
866 | /* Save time if the two types are the same. */ |
867 | |
868 | if (t1 == t2) return t1; |
869 | |
870 | code1 = TREE_CODE (t1); |
871 | code2 = TREE_CODE (t2); |
872 | |
873 | gcc_assert (code1 == VECTOR_TYPE || code1 == COMPLEX_TYPE |
874 | || code1 == FIXED_POINT_TYPE || code1 == REAL_TYPE |
875 | || code1 == INTEGER_TYPE || code1 == BITINT_TYPE); |
876 | gcc_assert (code2 == VECTOR_TYPE || code2 == COMPLEX_TYPE |
877 | || code2 == FIXED_POINT_TYPE || code2 == REAL_TYPE |
878 | || code2 == INTEGER_TYPE || code2 == BITINT_TYPE); |
879 | |
880 | /* When one operand is a decimal float type, the other operand cannot be |
881 | a generic float type or a complex type. We also disallow vector types |
882 | here. */ |
883 | if ((DECIMAL_FLOAT_TYPE_P (t1) || DECIMAL_FLOAT_TYPE_P (t2)) |
884 | && !(DECIMAL_FLOAT_TYPE_P (t1) && DECIMAL_FLOAT_TYPE_P (t2))) |
885 | { |
886 | if (code1 == VECTOR_TYPE || code2 == VECTOR_TYPE) |
887 | { |
888 | error ("cannot mix operands of decimal floating and vector types" ); |
889 | return error_mark_node; |
890 | } |
891 | if (code1 == COMPLEX_TYPE || code2 == COMPLEX_TYPE) |
892 | { |
893 | error ("cannot mix operands of decimal floating and complex types" ); |
894 | return error_mark_node; |
895 | } |
896 | if (code1 == REAL_TYPE && code2 == REAL_TYPE) |
897 | { |
898 | error ("cannot mix operands of decimal floating " |
899 | "and other floating types" ); |
900 | return error_mark_node; |
901 | } |
902 | } |
903 | |
904 | /* If one type is a vector type, return that type. (How the usual |
905 | arithmetic conversions apply to the vector types extension is not |
906 | precisely specified.) */ |
907 | if (code1 == VECTOR_TYPE) |
908 | return t1; |
909 | |
910 | if (code2 == VECTOR_TYPE) |
911 | return t2; |
912 | |
913 | /* If one type is complex, form the common type of the non-complex |
914 | components, then make that complex. Use T1 or T2 if it is the |
915 | required type. */ |
916 | if (code1 == COMPLEX_TYPE || code2 == COMPLEX_TYPE) |
917 | { |
918 | tree subtype1 = code1 == COMPLEX_TYPE ? TREE_TYPE (t1) : t1; |
919 | tree subtype2 = code2 == COMPLEX_TYPE ? TREE_TYPE (t2) : t2; |
920 | tree subtype = c_common_type (t1: subtype1, t2: subtype2); |
921 | |
922 | if (code1 == COMPLEX_TYPE && TREE_TYPE (t1) == subtype) |
923 | return t1; |
924 | else if (code2 == COMPLEX_TYPE && TREE_TYPE (t2) == subtype) |
925 | return t2; |
926 | else if (TREE_CODE (subtype) == BITINT_TYPE) |
927 | { |
928 | sorry ("%<_Complex _BitInt(%d)%> unsupported" , |
929 | TYPE_PRECISION (subtype)); |
930 | return code1 == COMPLEX_TYPE ? t1 : t2; |
931 | } |
932 | else |
933 | return build_complex_type (subtype); |
934 | } |
935 | |
936 | /* If only one is real, use it as the result. */ |
937 | |
938 | if (code1 == REAL_TYPE && code2 != REAL_TYPE) |
939 | return t1; |
940 | |
941 | if (code2 == REAL_TYPE && code1 != REAL_TYPE) |
942 | return t2; |
943 | |
944 | /* If both are real and either are decimal floating point types, use |
945 | the decimal floating point type with the greater precision. */ |
946 | |
947 | if (code1 == REAL_TYPE && code2 == REAL_TYPE) |
948 | { |
949 | if (TYPE_MAIN_VARIANT (t1) == dfloat128_type_node |
950 | || TYPE_MAIN_VARIANT (t2) == dfloat128_type_node) |
951 | return dfloat128_type_node; |
952 | else if (TYPE_MAIN_VARIANT (t1) == dfloat64_type_node |
953 | || TYPE_MAIN_VARIANT (t2) == dfloat64_type_node) |
954 | return dfloat64_type_node; |
955 | else if (TYPE_MAIN_VARIANT (t1) == dfloat32_type_node |
956 | || TYPE_MAIN_VARIANT (t2) == dfloat32_type_node) |
957 | return dfloat32_type_node; |
958 | } |
959 | |
960 | /* Deal with fixed-point types. */ |
961 | if (code1 == FIXED_POINT_TYPE || code2 == FIXED_POINT_TYPE) |
962 | { |
963 | unsigned int unsignedp = 0, satp = 0; |
964 | scalar_mode m1, m2; |
965 | unsigned int fbit1, ibit1, fbit2, ibit2, max_fbit, max_ibit; |
966 | |
967 | m1 = SCALAR_TYPE_MODE (t1); |
968 | m2 = SCALAR_TYPE_MODE (t2); |
969 | |
970 | /* If one input type is saturating, the result type is saturating. */ |
971 | if (TYPE_SATURATING (t1) || TYPE_SATURATING (t2)) |
972 | satp = 1; |
973 | |
974 | /* If both fixed-point types are unsigned, the result type is unsigned. |
975 | When mixing fixed-point and integer types, follow the sign of the |
976 | fixed-point type. |
977 | Otherwise, the result type is signed. */ |
978 | if ((TYPE_UNSIGNED (t1) && TYPE_UNSIGNED (t2) |
979 | && code1 == FIXED_POINT_TYPE && code2 == FIXED_POINT_TYPE) |
980 | || (code1 == FIXED_POINT_TYPE && code2 != FIXED_POINT_TYPE |
981 | && TYPE_UNSIGNED (t1)) |
982 | || (code1 != FIXED_POINT_TYPE && code2 == FIXED_POINT_TYPE |
983 | && TYPE_UNSIGNED (t2))) |
984 | unsignedp = 1; |
985 | |
986 | /* The result type is signed. */ |
987 | if (unsignedp == 0) |
988 | { |
989 | /* If the input type is unsigned, we need to convert to the |
990 | signed type. */ |
991 | if (code1 == FIXED_POINT_TYPE && TYPE_UNSIGNED (t1)) |
992 | { |
993 | enum mode_class mclass = (enum mode_class) 0; |
994 | if (GET_MODE_CLASS (m1) == MODE_UFRACT) |
995 | mclass = MODE_FRACT; |
996 | else if (GET_MODE_CLASS (m1) == MODE_UACCUM) |
997 | mclass = MODE_ACCUM; |
998 | else |
999 | gcc_unreachable (); |
1000 | m1 = as_a <scalar_mode> |
1001 | (m: mode_for_size (GET_MODE_PRECISION (mode: m1), mclass, 0)); |
1002 | } |
1003 | if (code2 == FIXED_POINT_TYPE && TYPE_UNSIGNED (t2)) |
1004 | { |
1005 | enum mode_class mclass = (enum mode_class) 0; |
1006 | if (GET_MODE_CLASS (m2) == MODE_UFRACT) |
1007 | mclass = MODE_FRACT; |
1008 | else if (GET_MODE_CLASS (m2) == MODE_UACCUM) |
1009 | mclass = MODE_ACCUM; |
1010 | else |
1011 | gcc_unreachable (); |
1012 | m2 = as_a <scalar_mode> |
1013 | (m: mode_for_size (GET_MODE_PRECISION (mode: m2), mclass, 0)); |
1014 | } |
1015 | } |
1016 | |
1017 | if (code1 == FIXED_POINT_TYPE) |
1018 | { |
1019 | fbit1 = GET_MODE_FBIT (m1); |
1020 | ibit1 = GET_MODE_IBIT (m1); |
1021 | } |
1022 | else |
1023 | { |
1024 | fbit1 = 0; |
1025 | /* Signed integers need to subtract one sign bit. */ |
1026 | ibit1 = TYPE_PRECISION (t1) - (!TYPE_UNSIGNED (t1)); |
1027 | } |
1028 | |
1029 | if (code2 == FIXED_POINT_TYPE) |
1030 | { |
1031 | fbit2 = GET_MODE_FBIT (m2); |
1032 | ibit2 = GET_MODE_IBIT (m2); |
1033 | } |
1034 | else |
1035 | { |
1036 | fbit2 = 0; |
1037 | /* Signed integers need to subtract one sign bit. */ |
1038 | ibit2 = TYPE_PRECISION (t2) - (!TYPE_UNSIGNED (t2)); |
1039 | } |
1040 | |
1041 | max_ibit = ibit1 >= ibit2 ? ibit1 : ibit2; |
1042 | max_fbit = fbit1 >= fbit2 ? fbit1 : fbit2; |
1043 | return c_common_fixed_point_type_for_size (max_ibit, max_fbit, unsignedp, |
1044 | satp); |
1045 | } |
1046 | |
1047 | /* Both real or both integers; use the one with greater precision. */ |
1048 | |
1049 | if (TYPE_PRECISION (t1) > TYPE_PRECISION (t2)) |
1050 | return t1; |
1051 | else if (TYPE_PRECISION (t2) > TYPE_PRECISION (t1)) |
1052 | return t2; |
1053 | |
1054 | /* Same precision. Prefer long longs to longs to ints when the |
1055 | same precision, following the C99 rules on integer type rank |
1056 | (which are equivalent to the C90 rules for C90 types). */ |
1057 | |
1058 | if (TYPE_MAIN_VARIANT (t1) == long_long_unsigned_type_node |
1059 | || TYPE_MAIN_VARIANT (t2) == long_long_unsigned_type_node) |
1060 | return long_long_unsigned_type_node; |
1061 | |
1062 | if (TYPE_MAIN_VARIANT (t1) == long_long_integer_type_node |
1063 | || TYPE_MAIN_VARIANT (t2) == long_long_integer_type_node) |
1064 | { |
1065 | if (TYPE_UNSIGNED (t1) || TYPE_UNSIGNED (t2)) |
1066 | return long_long_unsigned_type_node; |
1067 | else |
1068 | return long_long_integer_type_node; |
1069 | } |
1070 | |
1071 | if (TYPE_MAIN_VARIANT (t1) == long_unsigned_type_node |
1072 | || TYPE_MAIN_VARIANT (t2) == long_unsigned_type_node) |
1073 | return long_unsigned_type_node; |
1074 | |
1075 | if (TYPE_MAIN_VARIANT (t1) == long_integer_type_node |
1076 | || TYPE_MAIN_VARIANT (t2) == long_integer_type_node) |
1077 | { |
1078 | /* But preserve unsignedness from the other type, |
1079 | since long cannot hold all the values of an unsigned int. */ |
1080 | if (TYPE_UNSIGNED (t1) || TYPE_UNSIGNED (t2)) |
1081 | return long_unsigned_type_node; |
1082 | else |
1083 | return long_integer_type_node; |
1084 | } |
1085 | |
1086 | /* For floating types of the same TYPE_PRECISION (which we here |
1087 | assume means either the same set of values, or sets of values |
1088 | neither a subset of the other, with behavior being undefined in |
1089 | the latter case), follow the rules from TS 18661-3: prefer |
1090 | interchange types _FloatN, then standard types long double, |
1091 | double, float, then extended types _FloatNx. For extended types, |
1092 | check them starting with _Float128x as that seems most consistent |
1093 | in spirit with preferring long double to double; for interchange |
1094 | types, also check in that order for consistency although it's not |
1095 | possible for more than one of them to have the same |
1096 | precision. */ |
1097 | tree mv1 = TYPE_MAIN_VARIANT (t1); |
1098 | tree mv2 = TYPE_MAIN_VARIANT (t2); |
1099 | |
1100 | for (int i = NUM_FLOATN_TYPES - 1; i >= 0; i--) |
1101 | if (mv1 == FLOATN_TYPE_NODE (i) || mv2 == FLOATN_TYPE_NODE (i)) |
1102 | return FLOATN_TYPE_NODE (i); |
1103 | |
1104 | /* Likewise, prefer long double to double even if same size. */ |
1105 | if (mv1 == long_double_type_node || mv2 == long_double_type_node) |
1106 | return long_double_type_node; |
1107 | |
1108 | /* Likewise, prefer double to float even if same size. |
1109 | We got a couple of embedded targets with 32 bit doubles, and the |
1110 | pdp11 might have 64 bit floats. */ |
1111 | if (mv1 == double_type_node || mv2 == double_type_node) |
1112 | return double_type_node; |
1113 | |
1114 | if (mv1 == float_type_node || mv2 == float_type_node) |
1115 | return float_type_node; |
1116 | |
1117 | for (int i = NUM_FLOATNX_TYPES - 1; i >= 0; i--) |
1118 | if (mv1 == FLOATNX_TYPE_NODE (i) || mv2 == FLOATNX_TYPE_NODE (i)) |
1119 | return FLOATNX_TYPE_NODE (i); |
1120 | |
1121 | if ((code1 == BITINT_TYPE || code2 == BITINT_TYPE) && code1 != code2) |
1122 | { |
1123 | /* Prefer any other integral types over bit-precise integer types. */ |
1124 | if (TYPE_UNSIGNED (t1) == TYPE_UNSIGNED (t2)) |
1125 | return code1 == BITINT_TYPE ? t2 : t1; |
1126 | /* If BITINT_TYPE is unsigned and the other type is signed |
1127 | non-BITINT_TYPE with the same precision, the latter has higher rank. |
1128 | In that case: |
1129 | Otherwise, both operands are converted to the unsigned integer type |
1130 | corresponding to the type of the operand with signed integer type. */ |
1131 | if (TYPE_UNSIGNED (code1 == BITINT_TYPE ? t1 : t2)) |
1132 | return c_common_unsigned_type (code1 == BITINT_TYPE ? t2 : t1); |
1133 | } |
1134 | |
1135 | /* Otherwise prefer the unsigned one. */ |
1136 | |
1137 | if (TYPE_UNSIGNED (t1)) |
1138 | return t1; |
1139 | else |
1140 | return t2; |
1141 | } |
1142 | |
1143 | /* Wrapper around c_common_type that is used by c-common.cc and other |
1144 | front end optimizations that remove promotions. ENUMERAL_TYPEs |
1145 | are allowed here and are converted to their compatible integer types. |
1146 | BOOLEAN_TYPEs are allowed here and return either boolean_type_node or |
1147 | preferably a non-Boolean type as the common type. */ |
1148 | tree |
1149 | common_type (tree t1, tree t2) |
1150 | { |
1151 | if (TREE_CODE (t1) == ENUMERAL_TYPE) |
1152 | t1 = ENUM_UNDERLYING_TYPE (t1); |
1153 | if (TREE_CODE (t2) == ENUMERAL_TYPE) |
1154 | t2 = ENUM_UNDERLYING_TYPE (t2); |
1155 | |
1156 | /* If both types are BOOLEAN_TYPE, then return boolean_type_node. */ |
1157 | if (TREE_CODE (t1) == BOOLEAN_TYPE |
1158 | && TREE_CODE (t2) == BOOLEAN_TYPE) |
1159 | return boolean_type_node; |
1160 | |
1161 | /* If either type is BOOLEAN_TYPE, then return the other. */ |
1162 | if (TREE_CODE (t1) == BOOLEAN_TYPE) |
1163 | return t2; |
1164 | if (TREE_CODE (t2) == BOOLEAN_TYPE) |
1165 | return t1; |
1166 | |
1167 | return c_common_type (t1, t2); |
1168 | } |
1169 | |
1170 | struct comptypes_data { |
1171 | bool enum_and_int_p; |
1172 | bool different_types_p; |
1173 | bool warning_needed; |
1174 | bool anon_field; |
1175 | bool equiv; |
1176 | |
1177 | const struct tagged_tu_seen_cache* cache; |
1178 | }; |
1179 | |
1180 | /* Return 1 if TYPE1 and TYPE2 are compatible types for assignment |
1181 | or various other operations. Return 2 if they are compatible |
1182 | but a warning may be needed if you use them together. */ |
1183 | |
1184 | int |
1185 | comptypes (tree type1, tree type2) |
1186 | { |
1187 | struct comptypes_data data = { }; |
1188 | bool ret = comptypes_internal (type1, type2, data: &data); |
1189 | |
1190 | return ret ? (data.warning_needed ? 2 : 1) : 0; |
1191 | } |
1192 | |
1193 | |
1194 | /* Like comptypes, but it returns non-zero only for identical |
1195 | types. */ |
1196 | |
1197 | bool |
1198 | comptypes_same_p (tree type1, tree type2) |
1199 | { |
1200 | struct comptypes_data data = { }; |
1201 | bool ret = comptypes_internal (type1, type2, data: &data); |
1202 | |
1203 | if (data.different_types_p) |
1204 | return false; |
1205 | |
1206 | return ret; |
1207 | } |
1208 | |
1209 | |
1210 | /* Like comptypes, but if it returns non-zero because enum and int are |
1211 | compatible, it sets *ENUM_AND_INT_P to true. */ |
1212 | |
1213 | int |
1214 | comptypes_check_enum_int (tree type1, tree type2, bool *enum_and_int_p) |
1215 | { |
1216 | struct comptypes_data data = { }; |
1217 | bool ret = comptypes_internal (type1, type2, data: &data); |
1218 | *enum_and_int_p = data.enum_and_int_p; |
1219 | |
1220 | return ret ? (data.warning_needed ? 2 : 1) : 0; |
1221 | } |
1222 | |
1223 | /* Like comptypes, but if it returns nonzero for different types, it |
1224 | sets *DIFFERENT_TYPES_P to true. */ |
1225 | |
1226 | int |
1227 | comptypes_check_different_types (tree type1, tree type2, |
1228 | bool *different_types_p) |
1229 | { |
1230 | struct comptypes_data data = { }; |
1231 | bool ret = comptypes_internal (type1, type2, data: &data); |
1232 | *different_types_p = data.different_types_p; |
1233 | |
1234 | return ret ? (data.warning_needed ? 2 : 1) : 0; |
1235 | } |
1236 | |
1237 | |
1238 | /* Like comptypes, but if it returns nonzero for struct and union |
1239 | types considered equivalent for aliasing purposes. */ |
1240 | |
1241 | bool |
1242 | comptypes_equiv_p (tree type1, tree type2) |
1243 | { |
1244 | struct comptypes_data data = { }; |
1245 | data.equiv = true; |
1246 | bool ret = comptypes_internal (type1, type2, data: &data); |
1247 | |
1248 | return ret; |
1249 | } |
1250 | |
1251 | |
1252 | /* Return true if TYPE1 and TYPE2 are compatible types for assignment |
1253 | or various other operations. If they are compatible but a warning may |
1254 | be needed if you use them together, 'warning_needed' in DATA is set. |
1255 | If one type is an enum and the other a compatible integer type, then |
1256 | this sets 'enum_and_int_p' in DATA to true (it is never set to |
1257 | false). If the types are compatible but different enough not to be |
1258 | permitted in C11 typedef redeclarations, then this sets |
1259 | 'different_types_p' in DATA to true; it is never set to |
1260 | false, but may or may not be set if the types are incompatible. |
1261 | This differs from comptypes, in that we don't free the seen |
1262 | types. */ |
1263 | |
1264 | static bool |
1265 | comptypes_internal (const_tree type1, const_tree type2, |
1266 | struct comptypes_data *data) |
1267 | { |
1268 | const_tree t1 = type1; |
1269 | const_tree t2 = type2; |
1270 | |
1271 | /* Suppress errors caused by previously reported errors. */ |
1272 | |
1273 | if (t1 == t2 || !t1 || !t2 |
1274 | || TREE_CODE (t1) == ERROR_MARK || TREE_CODE (t2) == ERROR_MARK) |
1275 | return true; |
1276 | |
1277 | /* Enumerated types are compatible with integer types, but this is |
1278 | not transitive: two enumerated types in the same translation unit |
1279 | are compatible with each other only if they are the same type. */ |
1280 | |
1281 | if (TREE_CODE (t1) == ENUMERAL_TYPE |
1282 | && COMPLETE_TYPE_P (t1) |
1283 | && TREE_CODE (t2) != ENUMERAL_TYPE) |
1284 | { |
1285 | t1 = ENUM_UNDERLYING_TYPE (t1); |
1286 | if (TREE_CODE (t2) != VOID_TYPE) |
1287 | { |
1288 | data->enum_and_int_p = true; |
1289 | data->different_types_p = true; |
1290 | } |
1291 | } |
1292 | else if (TREE_CODE (t2) == ENUMERAL_TYPE |
1293 | && COMPLETE_TYPE_P (t2) |
1294 | && TREE_CODE (t1) != ENUMERAL_TYPE) |
1295 | { |
1296 | t2 = ENUM_UNDERLYING_TYPE (t2); |
1297 | if (TREE_CODE (t1) != VOID_TYPE) |
1298 | { |
1299 | data->enum_and_int_p = true; |
1300 | data->different_types_p = true; |
1301 | } |
1302 | } |
1303 | |
1304 | if (t1 == t2) |
1305 | return true; |
1306 | |
1307 | /* Different classes of types can't be compatible. */ |
1308 | |
1309 | if (TREE_CODE (t1) != TREE_CODE (t2)) |
1310 | return false; |
1311 | |
1312 | /* Qualifiers must match. C99 6.7.3p9 */ |
1313 | |
1314 | if (TYPE_QUALS (t1) != TYPE_QUALS (t2)) |
1315 | return false; |
1316 | |
1317 | /* Allow for two different type nodes which have essentially the same |
1318 | definition. Note that we already checked for equality of the type |
1319 | qualifiers (just above). */ |
1320 | |
1321 | if (TREE_CODE (t1) != ARRAY_TYPE |
1322 | && TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2)) |
1323 | return true; |
1324 | |
1325 | int attrval; |
1326 | |
1327 | /* 1 if no need for warning yet, 2 if warning cause has been seen. */ |
1328 | if (!(attrval = comp_type_attributes (t1, t2))) |
1329 | return false; |
1330 | |
1331 | if (2 == attrval) |
1332 | data->warning_needed = true; |
1333 | |
1334 | switch (TREE_CODE (t1)) |
1335 | { |
1336 | case INTEGER_TYPE: |
1337 | case FIXED_POINT_TYPE: |
1338 | case REAL_TYPE: |
1339 | case BITINT_TYPE: |
1340 | /* With these nodes, we can't determine type equivalence by |
1341 | looking at what is stored in the nodes themselves, because |
1342 | two nodes might have different TYPE_MAIN_VARIANTs but still |
1343 | represent the same type. For example, wchar_t and int could |
1344 | have the same properties (TYPE_PRECISION, TYPE_MIN_VALUE, |
1345 | TYPE_MAX_VALUE, etc.), but have different TYPE_MAIN_VARIANTs |
1346 | and are distinct types. On the other hand, int and the |
1347 | following typedef |
1348 | |
1349 | typedef int INT __attribute((may_alias)); |
1350 | |
1351 | have identical properties, different TYPE_MAIN_VARIANTs, but |
1352 | represent the same type. The canonical type system keeps |
1353 | track of equivalence in this case, so we fall back on it. */ |
1354 | return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2); |
1355 | |
1356 | case POINTER_TYPE: |
1357 | /* Do not remove mode information. */ |
1358 | if (TYPE_MODE (t1) != TYPE_MODE (t2)) |
1359 | return false; |
1360 | return comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2), data); |
1361 | |
1362 | case FUNCTION_TYPE: |
1363 | return function_types_compatible_p (t1, t2, data); |
1364 | |
1365 | case ARRAY_TYPE: |
1366 | { |
1367 | tree d1 = TYPE_DOMAIN (t1); |
1368 | tree d2 = TYPE_DOMAIN (t2); |
1369 | bool d1_variable, d2_variable; |
1370 | bool d1_zero, d2_zero; |
1371 | |
1372 | /* Target types must match incl. qualifiers. */ |
1373 | if (!comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2), data)) |
1374 | return false; |
1375 | |
1376 | if ((d1 == NULL_TREE) != (d2 == NULL_TREE)) |
1377 | data->different_types_p = true; |
1378 | /* Ignore size mismatches. */ |
1379 | if (data->equiv) |
1380 | return true; |
1381 | /* Sizes must match unless one is missing or variable. */ |
1382 | if (d1 == NULL_TREE || d2 == NULL_TREE || d1 == d2) |
1383 | return true; |
1384 | |
1385 | d1_zero = !TYPE_MAX_VALUE (d1); |
1386 | d2_zero = !TYPE_MAX_VALUE (d2); |
1387 | |
1388 | d1_variable = (!d1_zero |
1389 | && (TREE_CODE (TYPE_MIN_VALUE (d1)) != INTEGER_CST |
1390 | || TREE_CODE (TYPE_MAX_VALUE (d1)) != INTEGER_CST)); |
1391 | d2_variable = (!d2_zero |
1392 | && (TREE_CODE (TYPE_MIN_VALUE (d2)) != INTEGER_CST |
1393 | || TREE_CODE (TYPE_MAX_VALUE (d2)) != INTEGER_CST)); |
1394 | d1_variable = d1_variable || (d1_zero && C_TYPE_VARIABLE_SIZE (t1)); |
1395 | d2_variable = d2_variable || (d2_zero && C_TYPE_VARIABLE_SIZE (t2)); |
1396 | |
1397 | if (d1_variable != d2_variable) |
1398 | data->different_types_p = true; |
1399 | if (d1_variable || d2_variable) |
1400 | return true; |
1401 | if (d1_zero && d2_zero) |
1402 | return true; |
1403 | if (d1_zero || d2_zero |
1404 | || !tree_int_cst_equal (TYPE_MIN_VALUE (d1), TYPE_MIN_VALUE (d2)) |
1405 | || !tree_int_cst_equal (TYPE_MAX_VALUE (d1), TYPE_MAX_VALUE (d2))) |
1406 | return false; |
1407 | |
1408 | return true; |
1409 | } |
1410 | |
1411 | case ENUMERAL_TYPE: |
1412 | case RECORD_TYPE: |
1413 | case UNION_TYPE: |
1414 | |
1415 | if (!flag_isoc23) |
1416 | return false; |
1417 | |
1418 | return tagged_types_tu_compatible_p (t1, t2, data); |
1419 | |
1420 | case VECTOR_TYPE: |
1421 | return known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2)) |
1422 | && comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2), data); |
1423 | |
1424 | default: |
1425 | return false; |
1426 | } |
1427 | gcc_unreachable (); |
1428 | } |
1429 | |
1430 | /* Return true if TTL and TTR are pointers to types that are equivalent, ignoring |
1431 | their qualifiers, except for named address spaces. If the pointers point to |
1432 | different named addresses, then we must determine if one address space is a |
1433 | subset of the other. */ |
1434 | |
1435 | static bool |
1436 | comp_target_types (location_t location, tree ttl, tree ttr) |
1437 | { |
1438 | int val; |
1439 | int val_ped; |
1440 | tree mvl = TREE_TYPE (ttl); |
1441 | tree mvr = TREE_TYPE (ttr); |
1442 | addr_space_t asl = TYPE_ADDR_SPACE (mvl); |
1443 | addr_space_t asr = TYPE_ADDR_SPACE (mvr); |
1444 | addr_space_t as_common; |
1445 | bool enum_and_int_p; |
1446 | |
1447 | /* Fail if pointers point to incompatible address spaces. */ |
1448 | if (!addr_space_superset (as1: asl, as2: asr, common: &as_common)) |
1449 | return 0; |
1450 | |
1451 | /* For pedantic record result of comptypes on arrays before losing |
1452 | qualifiers on the element type below. */ |
1453 | val_ped = 1; |
1454 | |
1455 | if (TREE_CODE (mvl) == ARRAY_TYPE |
1456 | && TREE_CODE (mvr) == ARRAY_TYPE) |
1457 | val_ped = comptypes (type1: mvl, type2: mvr); |
1458 | |
1459 | /* Qualifiers on element types of array types that are |
1460 | pointer targets are lost by taking their TYPE_MAIN_VARIANT. */ |
1461 | |
1462 | mvl = (TYPE_ATOMIC (strip_array_types (mvl)) |
1463 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mvl), TYPE_QUAL_ATOMIC) |
1464 | : TYPE_MAIN_VARIANT (mvl)); |
1465 | |
1466 | mvr = (TYPE_ATOMIC (strip_array_types (mvr)) |
1467 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mvr), TYPE_QUAL_ATOMIC) |
1468 | : TYPE_MAIN_VARIANT (mvr)); |
1469 | |
1470 | enum_and_int_p = false; |
1471 | val = comptypes_check_enum_int (type1: mvl, type2: mvr, enum_and_int_p: &enum_and_int_p); |
1472 | |
1473 | if (val == 1 && val_ped != 1) |
1474 | pedwarn_c11 (location, opt: OPT_Wpedantic, "invalid use of pointers to arrays with different qualifiers " |
1475 | "in ISO C before C23" ); |
1476 | |
1477 | if (val == 2) |
1478 | pedwarn (location, OPT_Wpedantic, "types are not quite compatible" ); |
1479 | |
1480 | if (val == 1 && enum_and_int_p && warn_cxx_compat) |
1481 | warning_at (location, OPT_Wc___compat, |
1482 | "pointer target types incompatible in C++" ); |
1483 | |
1484 | return val; |
1485 | } |
1486 | |
1487 | /* Subroutines of `comptypes'. */ |
1488 | |
1489 | /* Return true if two 'struct', 'union', or 'enum' types T1 and T2 are |
1490 | compatible. The two types are not the same (which has been |
1491 | checked earlier in comptypes_internal). */ |
1492 | |
1493 | static bool |
1494 | tagged_types_tu_compatible_p (const_tree t1, const_tree t2, |
1495 | struct comptypes_data *data) |
1496 | { |
1497 | tree s1, s2; |
1498 | |
1499 | /* We have to verify that the tags of the types are the same. This |
1500 | is harder than it looks because this may be a typedef, so we have |
1501 | to go look at the original type. It may even be a typedef of a |
1502 | typedef... |
1503 | In the case of compiler-created builtin structs the TYPE_DECL |
1504 | may be a dummy, with no DECL_ORIGINAL_TYPE. Don't fault. */ |
1505 | while (TYPE_NAME (t1) |
1506 | && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL |
1507 | && DECL_ORIGINAL_TYPE (TYPE_NAME (t1))) |
1508 | t1 = DECL_ORIGINAL_TYPE (TYPE_NAME (t1)); |
1509 | |
1510 | while (TYPE_NAME (t2) |
1511 | && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL |
1512 | && DECL_ORIGINAL_TYPE (TYPE_NAME (t2))) |
1513 | t2 = DECL_ORIGINAL_TYPE (TYPE_NAME (t2)); |
1514 | |
1515 | if (TYPE_NAME (t1) != TYPE_NAME (t2)) |
1516 | return false; |
1517 | |
1518 | if (!data->anon_field && NULL_TREE == TYPE_NAME (t1)) |
1519 | return false; |
1520 | |
1521 | if (!data->anon_field && TYPE_STUB_DECL (t1) != TYPE_STUB_DECL (t2)) |
1522 | data->different_types_p = true; |
1523 | |
1524 | /* Incomplete types are incompatible inside a TU. */ |
1525 | if (TYPE_SIZE (t1) == NULL || TYPE_SIZE (t2) == NULL) |
1526 | return false; |
1527 | |
1528 | if (ENUMERAL_TYPE != TREE_CODE (t1) |
1529 | && (TYPE_REVERSE_STORAGE_ORDER (t1) |
1530 | != TYPE_REVERSE_STORAGE_ORDER (t2))) |
1531 | return false; |
1532 | |
1533 | /* For types already being looked at in some active |
1534 | invocation of this function, assume compatibility. |
1535 | The cache is built as a linked list on the stack |
1536 | with the head of the list passed downwards. */ |
1537 | for (const struct tagged_tu_seen_cache *t = data->cache; |
1538 | t != NULL; t = t->next) |
1539 | if (t->t1 == t1 && t->t2 == t2) |
1540 | return true; |
1541 | |
1542 | const struct tagged_tu_seen_cache entry = { .next: data->cache, .t1: t1, .t2: t2 }; |
1543 | |
1544 | switch (TREE_CODE (t1)) |
1545 | { |
1546 | case ENUMERAL_TYPE: |
1547 | { |
1548 | if (!comptypes (ENUM_UNDERLYING_TYPE (t1), ENUM_UNDERLYING_TYPE (t2))) |
1549 | return false; |
1550 | |
1551 | /* Speed up the case where the type values are in the same order. */ |
1552 | tree tv1 = TYPE_VALUES (t1); |
1553 | tree tv2 = TYPE_VALUES (t2); |
1554 | |
1555 | if (tv1 == tv2) |
1556 | return true; |
1557 | |
1558 | for (;tv1 && tv2; tv1 = TREE_CHAIN (tv1), tv2 = TREE_CHAIN (tv2)) |
1559 | { |
1560 | if (TREE_PURPOSE (tv1) != TREE_PURPOSE (tv2)) |
1561 | break; |
1562 | |
1563 | if (simple_cst_equal (DECL_INITIAL (TREE_VALUE (tv1)), |
1564 | DECL_INITIAL (TREE_VALUE (tv2))) != 1) |
1565 | break; |
1566 | } |
1567 | |
1568 | if (tv1 == NULL_TREE && tv2 == NULL_TREE) |
1569 | return true; |
1570 | |
1571 | if (tv1 == NULL_TREE || tv2 == NULL_TREE) |
1572 | return false; |
1573 | |
1574 | if (list_length (TYPE_VALUES (t1)) != list_length (TYPE_VALUES (t2))) |
1575 | return false; |
1576 | |
1577 | for (s1 = TYPE_VALUES (t1); s1; s1 = TREE_CHAIN (s1)) |
1578 | { |
1579 | s2 = purpose_member (TREE_PURPOSE (s1), TYPE_VALUES (t2)); |
1580 | |
1581 | if (s2 == NULL |
1582 | || simple_cst_equal (DECL_INITIAL (TREE_VALUE (s1)), |
1583 | DECL_INITIAL (TREE_VALUE (s2))) != 1) |
1584 | return false; |
1585 | } |
1586 | |
1587 | return true; |
1588 | } |
1589 | |
1590 | case UNION_TYPE: |
1591 | case RECORD_TYPE: |
1592 | |
1593 | if (list_length (TYPE_FIELDS (t1)) != list_length (TYPE_FIELDS (t2))) |
1594 | return false; |
1595 | |
1596 | if (data->equiv && (C_TYPE_VARIABLE_SIZE (t1) || C_TYPE_VARIABLE_SIZE (t2))) |
1597 | return false; |
1598 | |
1599 | for (s1 = TYPE_FIELDS (t1), s2 = TYPE_FIELDS (t2); |
1600 | s1 && s2; |
1601 | s1 = DECL_CHAIN (s1), s2 = DECL_CHAIN (s2)) |
1602 | { |
1603 | gcc_assert (TREE_CODE (s1) == FIELD_DECL); |
1604 | gcc_assert (TREE_CODE (s2) == FIELD_DECL); |
1605 | |
1606 | if (DECL_NAME (s1) != DECL_NAME (s2)) |
1607 | return false; |
1608 | |
1609 | if (DECL_ALIGN (s1) != DECL_ALIGN (s2)) |
1610 | return false; |
1611 | |
1612 | data->anon_field = !DECL_NAME (s1); |
1613 | |
1614 | data->cache = &entry; |
1615 | if (!comptypes_internal (TREE_TYPE (s1), TREE_TYPE (s2), data)) |
1616 | return false; |
1617 | |
1618 | tree st1 = TYPE_SIZE (TREE_TYPE (s1)); |
1619 | tree st2 = TYPE_SIZE (TREE_TYPE (s2)); |
1620 | |
1621 | if (data->equiv |
1622 | && st1 && TREE_CODE (st1) == INTEGER_CST |
1623 | && st2 && TREE_CODE (st2) == INTEGER_CST |
1624 | && !tree_int_cst_equal (st1, st2)) |
1625 | return false; |
1626 | } |
1627 | return true; |
1628 | |
1629 | default: |
1630 | gcc_unreachable (); |
1631 | } |
1632 | } |
1633 | |
1634 | /* Return true if two function types F1 and F2 are compatible. |
1635 | If either type specifies no argument types, |
1636 | the other must specify a fixed number of self-promoting arg types. |
1637 | Otherwise, if one type specifies only the number of arguments, |
1638 | the other must specify that number of self-promoting arg types. |
1639 | Otherwise, the argument types must match. */ |
1640 | |
1641 | static bool |
1642 | function_types_compatible_p (const_tree f1, const_tree f2, |
1643 | struct comptypes_data *data) |
1644 | { |
1645 | tree args1, args2; |
1646 | /* 1 if no need for warning yet, 2 if warning cause has been seen. */ |
1647 | int val = 1; |
1648 | int val1; |
1649 | tree ret1, ret2; |
1650 | |
1651 | ret1 = TREE_TYPE (f1); |
1652 | ret2 = TREE_TYPE (f2); |
1653 | |
1654 | /* 'volatile' qualifiers on a function's return type used to mean |
1655 | the function is noreturn. */ |
1656 | if (TYPE_VOLATILE (ret1) != TYPE_VOLATILE (ret2)) |
1657 | pedwarn (input_location, 0, "function return types not compatible due to %<volatile%>" ); |
1658 | if (TYPE_VOLATILE (ret1)) |
1659 | ret1 = build_qualified_type (TYPE_MAIN_VARIANT (ret1), |
1660 | TYPE_QUALS (ret1) & ~TYPE_QUAL_VOLATILE); |
1661 | if (TYPE_VOLATILE (ret2)) |
1662 | ret2 = build_qualified_type (TYPE_MAIN_VARIANT (ret2), |
1663 | TYPE_QUALS (ret2) & ~TYPE_QUAL_VOLATILE); |
1664 | val = comptypes_internal (type1: ret1, type2: ret2, data); |
1665 | if (val == 0) |
1666 | return 0; |
1667 | |
1668 | args1 = TYPE_ARG_TYPES (f1); |
1669 | args2 = TYPE_ARG_TYPES (f2); |
1670 | |
1671 | if ((args1 == NULL_TREE) != (args2 == NULL_TREE)) |
1672 | data->different_types_p = true; |
1673 | |
1674 | /* An unspecified parmlist matches any specified parmlist |
1675 | whose argument types don't need default promotions. */ |
1676 | |
1677 | if (args1 == NULL_TREE) |
1678 | { |
1679 | if (TYPE_NO_NAMED_ARGS_STDARG_P (f1) != TYPE_NO_NAMED_ARGS_STDARG_P (f2)) |
1680 | return 0; |
1681 | if (!self_promoting_args_p (args2)) |
1682 | return 0; |
1683 | /* If one of these types comes from a non-prototype fn definition, |
1684 | compare that with the other type's arglist. |
1685 | If they don't match, ask for a warning (but no error). */ |
1686 | if (TYPE_ACTUAL_ARG_TYPES (f1) |
1687 | && type_lists_compatible_p (args2, TYPE_ACTUAL_ARG_TYPES (f1), |
1688 | data) != 1) |
1689 | { |
1690 | val = 1; |
1691 | data->warning_needed = true; |
1692 | } |
1693 | return val; |
1694 | } |
1695 | if (args2 == NULL_TREE) |
1696 | { |
1697 | if (TYPE_NO_NAMED_ARGS_STDARG_P (f1) != TYPE_NO_NAMED_ARGS_STDARG_P (f2)) |
1698 | return 0; |
1699 | if (!self_promoting_args_p (args1)) |
1700 | return 0; |
1701 | if (TYPE_ACTUAL_ARG_TYPES (f2) |
1702 | && type_lists_compatible_p (args1, TYPE_ACTUAL_ARG_TYPES (f2), |
1703 | data) != 1) |
1704 | { |
1705 | val = 1; |
1706 | data->warning_needed = true; |
1707 | } |
1708 | return val; |
1709 | } |
1710 | |
1711 | /* Both types have argument lists: compare them and propagate results. */ |
1712 | val1 = type_lists_compatible_p (args1, args2, data); |
1713 | return val1; |
1714 | } |
1715 | |
1716 | /* Check two lists of types for compatibility, returning false for |
1717 | incompatible, true for compatible. */ |
1718 | |
1719 | static bool |
1720 | type_lists_compatible_p (const_tree args1, const_tree args2, |
1721 | struct comptypes_data *data) |
1722 | { |
1723 | while (1) |
1724 | { |
1725 | tree a1, mv1, a2, mv2; |
1726 | if (args1 == NULL_TREE && args2 == NULL_TREE) |
1727 | return true; |
1728 | /* If one list is shorter than the other, |
1729 | they fail to match. */ |
1730 | if (args1 == NULL_TREE || args2 == NULL_TREE) |
1731 | return 0; |
1732 | mv1 = a1 = TREE_VALUE (args1); |
1733 | mv2 = a2 = TREE_VALUE (args2); |
1734 | if (mv1 && mv1 != error_mark_node && TREE_CODE (mv1) != ARRAY_TYPE) |
1735 | mv1 = (TYPE_ATOMIC (mv1) |
1736 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mv1), |
1737 | TYPE_QUAL_ATOMIC) |
1738 | : TYPE_MAIN_VARIANT (mv1)); |
1739 | if (mv2 && mv2 != error_mark_node && TREE_CODE (mv2) != ARRAY_TYPE) |
1740 | mv2 = (TYPE_ATOMIC (mv2) |
1741 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mv2), |
1742 | TYPE_QUAL_ATOMIC) |
1743 | : TYPE_MAIN_VARIANT (mv2)); |
1744 | /* A null pointer instead of a type |
1745 | means there is supposed to be an argument |
1746 | but nothing is specified about what type it has. |
1747 | So match anything that self-promotes. */ |
1748 | if ((a1 == NULL_TREE) != (a2 == NULL_TREE)) |
1749 | data->different_types_p = true; |
1750 | if (a1 == NULL_TREE) |
1751 | { |
1752 | if (c_type_promotes_to (type: a2) != a2) |
1753 | return 0; |
1754 | } |
1755 | else if (a2 == NULL_TREE) |
1756 | { |
1757 | if (c_type_promotes_to (type: a1) != a1) |
1758 | return 0; |
1759 | } |
1760 | /* If one of the lists has an error marker, ignore this arg. */ |
1761 | else if (TREE_CODE (a1) == ERROR_MARK |
1762 | || TREE_CODE (a2) == ERROR_MARK) |
1763 | ; |
1764 | else if (!comptypes_internal (type1: mv1, type2: mv2, data)) |
1765 | { |
1766 | data->different_types_p = true; |
1767 | /* Allow wait (union {union wait *u; int *i} *) |
1768 | and wait (union wait *) to be compatible. */ |
1769 | if (TREE_CODE (a1) == UNION_TYPE |
1770 | && (TYPE_NAME (a1) == NULL_TREE |
1771 | || TYPE_TRANSPARENT_AGGR (a1)) |
1772 | && TREE_CODE (TYPE_SIZE (a1)) == INTEGER_CST |
1773 | && tree_int_cst_equal (TYPE_SIZE (a1), |
1774 | TYPE_SIZE (a2))) |
1775 | { |
1776 | tree memb; |
1777 | for (memb = TYPE_FIELDS (a1); |
1778 | memb; memb = DECL_CHAIN (memb)) |
1779 | { |
1780 | tree mv3 = TREE_TYPE (memb); |
1781 | if (mv3 && mv3 != error_mark_node |
1782 | && TREE_CODE (mv3) != ARRAY_TYPE) |
1783 | mv3 = (TYPE_ATOMIC (mv3) |
1784 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mv3), |
1785 | TYPE_QUAL_ATOMIC) |
1786 | : TYPE_MAIN_VARIANT (mv3)); |
1787 | if (comptypes_internal (type1: mv3, type2: mv2, data)) |
1788 | break; |
1789 | } |
1790 | if (memb == NULL_TREE) |
1791 | return 0; |
1792 | } |
1793 | else if (TREE_CODE (a2) == UNION_TYPE |
1794 | && (TYPE_NAME (a2) == NULL_TREE |
1795 | || TYPE_TRANSPARENT_AGGR (a2)) |
1796 | && TREE_CODE (TYPE_SIZE (a2)) == INTEGER_CST |
1797 | && tree_int_cst_equal (TYPE_SIZE (a2), |
1798 | TYPE_SIZE (a1))) |
1799 | { |
1800 | tree memb; |
1801 | for (memb = TYPE_FIELDS (a2); |
1802 | memb; memb = DECL_CHAIN (memb)) |
1803 | { |
1804 | tree mv3 = TREE_TYPE (memb); |
1805 | if (mv3 && mv3 != error_mark_node |
1806 | && TREE_CODE (mv3) != ARRAY_TYPE) |
1807 | mv3 = (TYPE_ATOMIC (mv3) |
1808 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mv3), |
1809 | TYPE_QUAL_ATOMIC) |
1810 | : TYPE_MAIN_VARIANT (mv3)); |
1811 | if (comptypes_internal (type1: mv3, type2: mv1, data)) |
1812 | break; |
1813 | } |
1814 | if (memb == NULL_TREE) |
1815 | return 0; |
1816 | } |
1817 | else |
1818 | return 0; |
1819 | } |
1820 | |
1821 | args1 = TREE_CHAIN (args1); |
1822 | args2 = TREE_CHAIN (args2); |
1823 | } |
1824 | } |
1825 | |
1826 | /* Compute the size to increment a pointer by. When a function type or void |
1827 | type or incomplete type is passed, size_one_node is returned. |
1828 | This function does not emit any diagnostics; the caller is responsible |
1829 | for that. */ |
1830 | |
1831 | static tree |
1832 | c_size_in_bytes (const_tree type) |
1833 | { |
1834 | enum tree_code code = TREE_CODE (type); |
1835 | |
1836 | if (code == FUNCTION_TYPE || code == VOID_TYPE || code == ERROR_MARK |
1837 | || !COMPLETE_TYPE_P (type)) |
1838 | return size_one_node; |
1839 | |
1840 | /* Convert in case a char is more than one unit. */ |
1841 | return size_binop_loc (input_location, CEIL_DIV_EXPR, TYPE_SIZE_UNIT (type), |
1842 | size_int (TYPE_PRECISION (char_type_node) |
1843 | / BITS_PER_UNIT)); |
1844 | } |
1845 | |
1846 | /* Return either DECL or its known constant value (if it has one). */ |
1847 | |
1848 | tree |
1849 | decl_constant_value_1 (tree decl, bool in_init) |
1850 | { |
1851 | if (/* Note that DECL_INITIAL isn't valid for a PARM_DECL. */ |
1852 | TREE_CODE (decl) != PARM_DECL |
1853 | && !TREE_THIS_VOLATILE (decl) |
1854 | && TREE_READONLY (decl) |
1855 | && DECL_INITIAL (decl) != NULL_TREE |
1856 | && !error_operand_p (DECL_INITIAL (decl)) |
1857 | /* This is invalid if initial value is not constant. |
1858 | If it has either a function call, a memory reference, |
1859 | or a variable, then re-evaluating it could give different results. */ |
1860 | && TREE_CONSTANT (DECL_INITIAL (decl)) |
1861 | /* Check for cases where this is sub-optimal, even though valid. */ |
1862 | && (in_init || TREE_CODE (DECL_INITIAL (decl)) != CONSTRUCTOR)) |
1863 | return DECL_INITIAL (decl); |
1864 | return decl; |
1865 | } |
1866 | |
1867 | /* Return either DECL or its known constant value (if it has one). |
1868 | Like the above, but always return decl outside of functions. */ |
1869 | |
1870 | tree |
1871 | decl_constant_value (tree decl) |
1872 | { |
1873 | /* Don't change a variable array bound or initial value to a constant |
1874 | in a place where a variable is invalid. */ |
1875 | return current_function_decl ? decl_constant_value_1 (decl, in_init: false) : decl; |
1876 | } |
1877 | |
1878 | /* Convert the array expression EXP to a pointer. */ |
1879 | static tree |
1880 | array_to_pointer_conversion (location_t loc, tree exp) |
1881 | { |
1882 | tree orig_exp = exp; |
1883 | tree type = TREE_TYPE (exp); |
1884 | tree adr; |
1885 | tree restype = TREE_TYPE (type); |
1886 | tree ptrtype; |
1887 | |
1888 | gcc_assert (TREE_CODE (type) == ARRAY_TYPE); |
1889 | |
1890 | STRIP_TYPE_NOPS (exp); |
1891 | |
1892 | copy_warning (exp, orig_exp); |
1893 | |
1894 | ptrtype = build_pointer_type (restype); |
1895 | |
1896 | if (INDIRECT_REF_P (exp)) |
1897 | return convert (ptrtype, TREE_OPERAND (exp, 0)); |
1898 | |
1899 | /* In C++ array compound literals are temporary objects unless they are |
1900 | const or appear in namespace scope, so they are destroyed too soon |
1901 | to use them for much of anything (c++/53220). */ |
1902 | if (warn_cxx_compat && TREE_CODE (exp) == COMPOUND_LITERAL_EXPR) |
1903 | { |
1904 | tree decl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); |
1905 | if (!TREE_READONLY (decl) && !TREE_STATIC (decl)) |
1906 | warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wc___compat, |
1907 | "converting an array compound literal to a pointer " |
1908 | "leads to a dangling pointer in C++" ); |
1909 | } |
1910 | |
1911 | adr = build_unary_op (loc, ADDR_EXPR, exp, true); |
1912 | return convert (ptrtype, adr); |
1913 | } |
1914 | |
1915 | /* Convert the function expression EXP to a pointer. */ |
1916 | static tree |
1917 | function_to_pointer_conversion (location_t loc, tree exp) |
1918 | { |
1919 | tree orig_exp = exp; |
1920 | |
1921 | gcc_assert (TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE); |
1922 | |
1923 | STRIP_TYPE_NOPS (exp); |
1924 | |
1925 | copy_warning (exp, orig_exp); |
1926 | |
1927 | return build_unary_op (loc, ADDR_EXPR, exp, false); |
1928 | } |
1929 | |
1930 | /* Mark EXP as read, not just set, for set but not used -Wunused |
1931 | warning purposes. */ |
1932 | |
1933 | void |
1934 | mark_exp_read (tree exp) |
1935 | { |
1936 | switch (TREE_CODE (exp)) |
1937 | { |
1938 | case VAR_DECL: |
1939 | case PARM_DECL: |
1940 | DECL_READ_P (exp) = 1; |
1941 | break; |
1942 | case ARRAY_REF: |
1943 | case COMPONENT_REF: |
1944 | case MODIFY_EXPR: |
1945 | case REALPART_EXPR: |
1946 | case IMAGPART_EXPR: |
1947 | CASE_CONVERT: |
1948 | case ADDR_EXPR: |
1949 | case VIEW_CONVERT_EXPR: |
1950 | mark_exp_read (TREE_OPERAND (exp, 0)); |
1951 | break; |
1952 | case COMPOUND_EXPR: |
1953 | /* Pattern match what build_atomic_assign produces with modifycode |
1954 | NOP_EXPR. */ |
1955 | if (VAR_P (TREE_OPERAND (exp, 1)) |
1956 | && DECL_ARTIFICIAL (TREE_OPERAND (exp, 1)) |
1957 | && TREE_CODE (TREE_OPERAND (exp, 0)) == COMPOUND_EXPR) |
1958 | { |
1959 | tree t1 = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); |
1960 | tree t2 = TREE_OPERAND (TREE_OPERAND (exp, 0), 1); |
1961 | if (TREE_CODE (t1) == TARGET_EXPR |
1962 | && TARGET_EXPR_SLOT (t1) == TREE_OPERAND (exp, 1) |
1963 | && TREE_CODE (t2) == CALL_EXPR) |
1964 | { |
1965 | tree fndecl = get_callee_fndecl (t2); |
1966 | tree arg = NULL_TREE; |
1967 | if (fndecl |
1968 | && TREE_CODE (fndecl) == FUNCTION_DECL |
1969 | && fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL) |
1970 | && call_expr_nargs (t2) >= 2) |
1971 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
1972 | { |
1973 | case BUILT_IN_ATOMIC_STORE: |
1974 | arg = CALL_EXPR_ARG (t2, 1); |
1975 | break; |
1976 | case BUILT_IN_ATOMIC_STORE_1: |
1977 | case BUILT_IN_ATOMIC_STORE_2: |
1978 | case BUILT_IN_ATOMIC_STORE_4: |
1979 | case BUILT_IN_ATOMIC_STORE_8: |
1980 | case BUILT_IN_ATOMIC_STORE_16: |
1981 | arg = CALL_EXPR_ARG (t2, 0); |
1982 | break; |
1983 | default: |
1984 | break; |
1985 | } |
1986 | if (arg) |
1987 | { |
1988 | STRIP_NOPS (arg); |
1989 | if (TREE_CODE (arg) == ADDR_EXPR |
1990 | && DECL_P (TREE_OPERAND (arg, 0)) |
1991 | && TYPE_ATOMIC (TREE_TYPE (TREE_OPERAND (arg, 0)))) |
1992 | mark_exp_read (TREE_OPERAND (arg, 0)); |
1993 | } |
1994 | } |
1995 | } |
1996 | /* FALLTHRU */ |
1997 | case C_MAYBE_CONST_EXPR: |
1998 | mark_exp_read (TREE_OPERAND (exp, 1)); |
1999 | break; |
2000 | case OMP_ARRAY_SECTION: |
2001 | mark_exp_read (TREE_OPERAND (exp, 0)); |
2002 | if (TREE_OPERAND (exp, 1)) |
2003 | mark_exp_read (TREE_OPERAND (exp, 1)); |
2004 | if (TREE_OPERAND (exp, 2)) |
2005 | mark_exp_read (TREE_OPERAND (exp, 2)); |
2006 | break; |
2007 | default: |
2008 | break; |
2009 | } |
2010 | } |
2011 | |
2012 | /* Perform the default conversion of arrays and functions to pointers. |
2013 | Return the result of converting EXP. For any other expression, just |
2014 | return EXP. |
2015 | |
2016 | LOC is the location of the expression. */ |
2017 | |
2018 | struct c_expr |
2019 | default_function_array_conversion (location_t loc, struct c_expr exp) |
2020 | { |
2021 | tree orig_exp = exp.value; |
2022 | tree type = TREE_TYPE (exp.value); |
2023 | enum tree_code code = TREE_CODE (type); |
2024 | |
2025 | switch (code) |
2026 | { |
2027 | case ARRAY_TYPE: |
2028 | { |
2029 | bool not_lvalue = false; |
2030 | bool lvalue_array_p; |
2031 | |
2032 | while ((TREE_CODE (exp.value) == NON_LVALUE_EXPR |
2033 | || CONVERT_EXPR_P (exp.value)) |
2034 | && TREE_TYPE (TREE_OPERAND (exp.value, 0)) == type) |
2035 | { |
2036 | if (TREE_CODE (exp.value) == NON_LVALUE_EXPR) |
2037 | not_lvalue = true; |
2038 | exp.value = TREE_OPERAND (exp.value, 0); |
2039 | } |
2040 | |
2041 | copy_warning (exp.value, orig_exp); |
2042 | |
2043 | lvalue_array_p = !not_lvalue && lvalue_p (exp.value); |
2044 | if (!flag_isoc99 && !lvalue_array_p) |
2045 | { |
2046 | /* Before C99, non-lvalue arrays do not decay to pointers. |
2047 | Normally, using such an array would be invalid; but it can |
2048 | be used correctly inside sizeof or as a statement expression. |
2049 | Thus, do not give an error here; an error will result later. */ |
2050 | return exp; |
2051 | } |
2052 | |
2053 | exp.value = array_to_pointer_conversion (loc, exp: exp.value); |
2054 | } |
2055 | break; |
2056 | case FUNCTION_TYPE: |
2057 | exp.value = function_to_pointer_conversion (loc, exp: exp.value); |
2058 | break; |
2059 | default: |
2060 | break; |
2061 | } |
2062 | |
2063 | return exp; |
2064 | } |
2065 | |
2066 | struct c_expr |
2067 | default_function_array_read_conversion (location_t loc, struct c_expr exp) |
2068 | { |
2069 | mark_exp_read (exp: exp.value); |
2070 | return default_function_array_conversion (loc, exp); |
2071 | } |
2072 | |
2073 | /* Return whether EXPR should be treated as an atomic lvalue for the |
2074 | purposes of load and store handling. */ |
2075 | |
2076 | static bool |
2077 | really_atomic_lvalue (tree expr) |
2078 | { |
2079 | if (error_operand_p (t: expr)) |
2080 | return false; |
2081 | if (!TYPE_ATOMIC (TREE_TYPE (expr))) |
2082 | return false; |
2083 | if (!lvalue_p (expr)) |
2084 | return false; |
2085 | |
2086 | /* Ignore _Atomic on register variables, since their addresses can't |
2087 | be taken so (a) atomicity is irrelevant and (b) the normal atomic |
2088 | sequences wouldn't work. Ignore _Atomic on structures containing |
2089 | bit-fields, since accessing elements of atomic structures or |
2090 | unions is undefined behavior (C11 6.5.2.3#5), but it's unclear if |
2091 | it's undefined at translation time or execution time, and the |
2092 | normal atomic sequences again wouldn't work. */ |
2093 | while (handled_component_p (t: expr)) |
2094 | { |
2095 | if (TREE_CODE (expr) == COMPONENT_REF |
2096 | && DECL_C_BIT_FIELD (TREE_OPERAND (expr, 1))) |
2097 | return false; |
2098 | expr = TREE_OPERAND (expr, 0); |
2099 | } |
2100 | if (DECL_P (expr) && C_DECL_REGISTER (expr)) |
2101 | return false; |
2102 | return true; |
2103 | } |
2104 | |
2105 | /* If EXPR is a named constant (C23) derived from a constexpr variable |
2106 | - that is, a reference to such a variable, or a member extracted by |
2107 | a sequence of structure and union (but not array) member accesses |
2108 | (where union member accesses must access the same member as |
2109 | initialized) - then return the corresponding initializer; |
2110 | otherwise, return NULL_TREE. */ |
2111 | |
2112 | static tree |
2113 | maybe_get_constexpr_init (tree expr) |
2114 | { |
2115 | tree decl = NULL_TREE; |
2116 | if (TREE_CODE (expr) == VAR_DECL) |
2117 | decl = expr; |
2118 | else if (TREE_CODE (expr) == COMPOUND_LITERAL_EXPR) |
2119 | decl = COMPOUND_LITERAL_EXPR_DECL (expr); |
2120 | if (decl |
2121 | && C_DECL_DECLARED_CONSTEXPR (decl) |
2122 | && DECL_INITIAL (decl) != NULL_TREE |
2123 | && !error_operand_p (DECL_INITIAL (decl))) |
2124 | return DECL_INITIAL (decl); |
2125 | if (TREE_CODE (expr) != COMPONENT_REF) |
2126 | return NULL_TREE; |
2127 | tree inner = maybe_get_constexpr_init (TREE_OPERAND (expr, 0)); |
2128 | if (inner == NULL_TREE) |
2129 | return NULL_TREE; |
2130 | while ((CONVERT_EXPR_P (inner) || TREE_CODE (inner) == NON_LVALUE_EXPR) |
2131 | && !error_operand_p (t: inner) |
2132 | && (TYPE_MAIN_VARIANT (TREE_TYPE (inner)) |
2133 | == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (inner, 0))))) |
2134 | inner = TREE_OPERAND (inner, 0); |
2135 | if (TREE_CODE (inner) != CONSTRUCTOR) |
2136 | return NULL_TREE; |
2137 | tree field = TREE_OPERAND (expr, 1); |
2138 | unsigned HOST_WIDE_INT cidx; |
2139 | tree cfield, cvalue; |
2140 | bool have_other_init = false; |
2141 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (inner), cidx, cfield, cvalue) |
2142 | { |
2143 | if (cfield == field) |
2144 | return cvalue; |
2145 | have_other_init = true; |
2146 | } |
2147 | if (TREE_CODE (TREE_TYPE (inner)) == UNION_TYPE |
2148 | && (have_other_init || field != TYPE_FIELDS (TREE_TYPE (inner)))) |
2149 | return NULL_TREE; |
2150 | /* Return a default initializer. */ |
2151 | if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (expr))) |
2152 | return build_constructor (TREE_TYPE (expr), NULL); |
2153 | return build_zero_cst (TREE_TYPE (expr)); |
2154 | } |
2155 | |
2156 | /* Convert expression EXP (location LOC) from lvalue to rvalue, |
2157 | including converting functions and arrays to pointers if CONVERT_P. |
2158 | If READ_P, also mark the expression as having been read. If |
2159 | FOR_INIT, constexpr expressions of structure and union type should |
2160 | be replaced by the corresponding CONSTRUCTOR; otherwise, only |
2161 | constexpr scalars (including elements of structures and unions) are |
2162 | replaced by their initializers. */ |
2163 | |
2164 | struct c_expr |
2165 | convert_lvalue_to_rvalue (location_t loc, struct c_expr exp, |
2166 | bool convert_p, bool read_p, bool for_init) |
2167 | { |
2168 | bool force_non_npc = false; |
2169 | if (read_p) |
2170 | mark_exp_read (exp: exp.value); |
2171 | if (convert_p) |
2172 | exp = default_function_array_conversion (loc, exp); |
2173 | if (!VOID_TYPE_P (TREE_TYPE (exp.value))) |
2174 | exp.value = require_complete_type (loc, value: exp.value); |
2175 | if (for_init || !RECORD_OR_UNION_TYPE_P (TREE_TYPE (exp.value))) |
2176 | { |
2177 | tree init = maybe_get_constexpr_init (expr: exp.value); |
2178 | if (init != NULL_TREE) |
2179 | { |
2180 | /* A named constant of pointer type or type nullptr_t is not |
2181 | a null pointer constant even if the initializer is |
2182 | one. */ |
2183 | if (TREE_CODE (init) == INTEGER_CST |
2184 | && !INTEGRAL_TYPE_P (TREE_TYPE (init)) |
2185 | && integer_zerop (init)) |
2186 | force_non_npc = true; |
2187 | exp.value = init; |
2188 | } |
2189 | } |
2190 | if (really_atomic_lvalue (expr: exp.value)) |
2191 | { |
2192 | vec<tree, va_gc> *params; |
2193 | tree nonatomic_type, tmp, tmp_addr, fndecl, func_call; |
2194 | tree expr_type = TREE_TYPE (exp.value); |
2195 | tree expr_addr = build_unary_op (loc, ADDR_EXPR, exp.value, false); |
2196 | tree seq_cst = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST); |
2197 | |
2198 | gcc_assert (TYPE_ATOMIC (expr_type)); |
2199 | |
2200 | /* Expansion of a generic atomic load may require an addition |
2201 | element, so allocate enough to prevent a resize. */ |
2202 | vec_alloc (v&: params, nelems: 4); |
2203 | |
2204 | /* Remove the qualifiers for the rest of the expressions and |
2205 | create the VAL temp variable to hold the RHS. */ |
2206 | nonatomic_type = build_qualified_type (expr_type, TYPE_UNQUALIFIED); |
2207 | tmp = create_tmp_var_raw (nonatomic_type); |
2208 | tmp_addr = build_unary_op (loc, ADDR_EXPR, tmp, false); |
2209 | TREE_ADDRESSABLE (tmp) = 1; |
2210 | /* Do not disable warnings for TMP even though it's artificial. |
2211 | -Winvalid-memory-model depends on it. */ |
2212 | |
2213 | /* Issue __atomic_load (&expr, &tmp, SEQ_CST); */ |
2214 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_LOAD); |
2215 | params->quick_push (obj: expr_addr); |
2216 | params->quick_push (obj: tmp_addr); |
2217 | params->quick_push (obj: seq_cst); |
2218 | func_call = c_build_function_call_vec (loc, vNULL, fndecl, params, NULL); |
2219 | |
2220 | /* EXPR is always read. */ |
2221 | mark_exp_read (exp: exp.value); |
2222 | |
2223 | /* Return tmp which contains the value loaded. */ |
2224 | exp.value = build4 (TARGET_EXPR, nonatomic_type, tmp, func_call, |
2225 | NULL_TREE, NULL_TREE); |
2226 | } |
2227 | if (convert_p && !error_operand_p (t: exp.value) |
2228 | && (TREE_CODE (TREE_TYPE (exp.value)) != ARRAY_TYPE)) |
2229 | exp.value = convert (build_qualified_type (TREE_TYPE (exp.value), TYPE_UNQUALIFIED), exp.value); |
2230 | if (force_non_npc) |
2231 | exp.value = build1 (NOP_EXPR, TREE_TYPE (exp.value), exp.value); |
2232 | |
2233 | { |
2234 | tree false_value, true_value; |
2235 | if (convert_p && !error_operand_p (t: exp.value) |
2236 | && c_hardbool_type_attr (TREE_TYPE (exp.value), |
2237 | false_value: &false_value, true_value: &true_value)) |
2238 | { |
2239 | tree t = save_expr (exp.value); |
2240 | |
2241 | mark_exp_read (exp: exp.value); |
2242 | |
2243 | tree trapfn = builtin_decl_explicit (fncode: BUILT_IN_TRAP); |
2244 | tree expr = build_call_expr_loc (loc, trapfn, 0); |
2245 | expr = build_compound_expr (loc, expr, boolean_true_node); |
2246 | expr = fold_build3_loc (loc, COND_EXPR, boolean_type_node, |
2247 | fold_build2_loc (loc, NE_EXPR, |
2248 | boolean_type_node, |
2249 | t, true_value), |
2250 | expr, boolean_true_node); |
2251 | expr = fold_build3_loc (loc, COND_EXPR, boolean_type_node, |
2252 | fold_build2_loc (loc, NE_EXPR, |
2253 | boolean_type_node, |
2254 | t, false_value), |
2255 | expr, boolean_false_node); |
2256 | |
2257 | exp.value = expr; |
2258 | } |
2259 | } |
2260 | |
2261 | return exp; |
2262 | } |
2263 | |
2264 | /* EXP is an expression of integer type. Apply the integer promotions |
2265 | to it and return the promoted value. */ |
2266 | |
2267 | tree |
2268 | perform_integral_promotions (tree exp) |
2269 | { |
2270 | tree type = TREE_TYPE (exp); |
2271 | enum tree_code code = TREE_CODE (type); |
2272 | |
2273 | gcc_assert (INTEGRAL_TYPE_P (type)); |
2274 | |
2275 | /* Convert enums to the result of applying the integer promotions to |
2276 | their underlying type. */ |
2277 | if (code == ENUMERAL_TYPE) |
2278 | { |
2279 | type = ENUM_UNDERLYING_TYPE (type); |
2280 | if (c_promoting_integer_type_p (type)) |
2281 | { |
2282 | if (TYPE_UNSIGNED (type) |
2283 | && TYPE_PRECISION (type) == TYPE_PRECISION (integer_type_node)) |
2284 | type = unsigned_type_node; |
2285 | else |
2286 | type = integer_type_node; |
2287 | } |
2288 | |
2289 | return convert (type, exp); |
2290 | } |
2291 | |
2292 | /* ??? This should no longer be needed now bit-fields have their |
2293 | proper types. */ |
2294 | if (TREE_CODE (exp) == COMPONENT_REF |
2295 | && DECL_C_BIT_FIELD (TREE_OPERAND (exp, 1))) |
2296 | { |
2297 | if (TREE_CODE (DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))) |
2298 | == BITINT_TYPE) |
2299 | return convert (DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1)), exp); |
2300 | /* If it's thinner than an int, promote it like a |
2301 | c_promoting_integer_type_p, otherwise leave it alone. */ |
2302 | if (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)), |
2303 | TYPE_PRECISION (integer_type_node)) < 0) |
2304 | return convert (integer_type_node, exp); |
2305 | } |
2306 | |
2307 | if (c_promoting_integer_type_p (type)) |
2308 | { |
2309 | /* Preserve unsignedness if not really getting any wider. */ |
2310 | if (TYPE_UNSIGNED (type) |
2311 | && TYPE_PRECISION (type) == TYPE_PRECISION (integer_type_node)) |
2312 | return convert (unsigned_type_node, exp); |
2313 | |
2314 | return convert (integer_type_node, exp); |
2315 | } |
2316 | |
2317 | return exp; |
2318 | } |
2319 | |
2320 | |
2321 | /* Perform default promotions for C data used in expressions. |
2322 | Enumeral types or short or char are converted to int. |
2323 | In addition, manifest constants symbols are replaced by their values. */ |
2324 | |
2325 | tree |
2326 | default_conversion (tree exp) |
2327 | { |
2328 | tree orig_exp; |
2329 | tree type = TREE_TYPE (exp); |
2330 | enum tree_code code = TREE_CODE (type); |
2331 | tree promoted_type; |
2332 | |
2333 | mark_exp_read (exp); |
2334 | |
2335 | /* Functions and arrays have been converted during parsing. */ |
2336 | gcc_assert (code != FUNCTION_TYPE); |
2337 | if (code == ARRAY_TYPE) |
2338 | return exp; |
2339 | |
2340 | /* Constants can be used directly unless they're not loadable. */ |
2341 | if (TREE_CODE (exp) == CONST_DECL) |
2342 | exp = DECL_INITIAL (exp); |
2343 | |
2344 | /* Strip no-op conversions. */ |
2345 | orig_exp = exp; |
2346 | STRIP_TYPE_NOPS (exp); |
2347 | |
2348 | copy_warning (exp, orig_exp); |
2349 | |
2350 | if (code == VOID_TYPE) |
2351 | { |
2352 | error_at (EXPR_LOC_OR_LOC (exp, input_location), |
2353 | "void value not ignored as it ought to be" ); |
2354 | return error_mark_node; |
2355 | } |
2356 | |
2357 | exp = require_complete_type (EXPR_LOC_OR_LOC (exp, input_location), value: exp); |
2358 | if (exp == error_mark_node) |
2359 | return error_mark_node; |
2360 | |
2361 | promoted_type = targetm.promoted_type (type); |
2362 | if (promoted_type) |
2363 | return convert (promoted_type, exp); |
2364 | |
2365 | if (INTEGRAL_TYPE_P (type)) |
2366 | return perform_integral_promotions (exp); |
2367 | |
2368 | return exp; |
2369 | } |
2370 | |
2371 | /* Look up COMPONENT in a structure or union TYPE. |
2372 | |
2373 | If the component name is not found, returns NULL_TREE. Otherwise, |
2374 | the return value is a TREE_LIST, with each TREE_VALUE a FIELD_DECL |
2375 | stepping down the chain to the component, which is in the last |
2376 | TREE_VALUE of the list. Normally the list is of length one, but if |
2377 | the component is embedded within (nested) anonymous structures or |
2378 | unions, the list steps down the chain to the component. */ |
2379 | |
2380 | static tree |
2381 | lookup_field (tree type, tree component) |
2382 | { |
2383 | tree field; |
2384 | |
2385 | /* If TYPE_LANG_SPECIFIC is set, then it is a sorted array of pointers |
2386 | to the field elements. Use a binary search on this array to quickly |
2387 | find the element. Otherwise, do a linear search. TYPE_LANG_SPECIFIC |
2388 | will always be set for structures which have many elements. |
2389 | |
2390 | Duplicate field checking replaces duplicates with NULL_TREE so |
2391 | TYPE_LANG_SPECIFIC arrays are potentially no longer sorted. In that |
2392 | case just iterate using DECL_CHAIN. */ |
2393 | |
2394 | if (TYPE_LANG_SPECIFIC (type) && TYPE_LANG_SPECIFIC (type)->s |
2395 | && !seen_error ()) |
2396 | { |
2397 | int bot, top, half; |
2398 | tree *field_array = &TYPE_LANG_SPECIFIC (type)->s->elts[0]; |
2399 | |
2400 | field = TYPE_FIELDS (type); |
2401 | bot = 0; |
2402 | top = TYPE_LANG_SPECIFIC (type)->s->len; |
2403 | while (top - bot > 1) |
2404 | { |
2405 | half = (top - bot + 1) >> 1; |
2406 | field = field_array[bot+half]; |
2407 | |
2408 | if (DECL_NAME (field) == NULL_TREE) |
2409 | { |
2410 | /* Step through all anon unions in linear fashion. */ |
2411 | while (DECL_NAME (field_array[bot]) == NULL_TREE) |
2412 | { |
2413 | field = field_array[bot++]; |
2414 | if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))) |
2415 | { |
2416 | tree anon = lookup_field (TREE_TYPE (field), component); |
2417 | |
2418 | if (anon) |
2419 | return tree_cons (NULL_TREE, field, anon); |
2420 | |
2421 | /* The Plan 9 compiler permits referring |
2422 | directly to an anonymous struct/union field |
2423 | using a typedef name. */ |
2424 | if (flag_plan9_extensions |
2425 | && TYPE_NAME (TREE_TYPE (field)) != NULL_TREE |
2426 | && (TREE_CODE (TYPE_NAME (TREE_TYPE (field))) |
2427 | == TYPE_DECL) |
2428 | && (DECL_NAME (TYPE_NAME (TREE_TYPE (field))) |
2429 | == component)) |
2430 | break; |
2431 | } |
2432 | } |
2433 | |
2434 | /* Entire record is only anon unions. */ |
2435 | if (bot > top) |
2436 | return NULL_TREE; |
2437 | |
2438 | /* Restart the binary search, with new lower bound. */ |
2439 | continue; |
2440 | } |
2441 | |
2442 | if (DECL_NAME (field) == component) |
2443 | break; |
2444 | if (DECL_NAME (field) < component) |
2445 | bot += half; |
2446 | else |
2447 | top = bot + half; |
2448 | } |
2449 | |
2450 | if (DECL_NAME (field_array[bot]) == component) |
2451 | field = field_array[bot]; |
2452 | else if (DECL_NAME (field) != component) |
2453 | return NULL_TREE; |
2454 | } |
2455 | else |
2456 | { |
2457 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
2458 | { |
2459 | if (DECL_NAME (field) == NULL_TREE |
2460 | && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))) |
2461 | { |
2462 | tree anon = lookup_field (TREE_TYPE (field), component); |
2463 | |
2464 | if (anon) |
2465 | return tree_cons (NULL_TREE, field, anon); |
2466 | |
2467 | /* The Plan 9 compiler permits referring directly to an |
2468 | anonymous struct/union field using a typedef |
2469 | name. */ |
2470 | if (flag_plan9_extensions |
2471 | && TYPE_NAME (TREE_TYPE (field)) != NULL_TREE |
2472 | && TREE_CODE (TYPE_NAME (TREE_TYPE (field))) == TYPE_DECL |
2473 | && (DECL_NAME (TYPE_NAME (TREE_TYPE (field))) |
2474 | == component)) |
2475 | break; |
2476 | } |
2477 | |
2478 | if (DECL_NAME (field) == component) |
2479 | break; |
2480 | } |
2481 | |
2482 | if (field == NULL_TREE) |
2483 | return NULL_TREE; |
2484 | } |
2485 | |
2486 | return tree_cons (NULL_TREE, field, NULL_TREE); |
2487 | } |
2488 | |
2489 | /* Recursively append candidate IDENTIFIER_NODEs to CANDIDATES. */ |
2490 | |
2491 | static void |
2492 | lookup_field_fuzzy_find_candidates (tree type, tree component, |
2493 | vec<tree> *candidates) |
2494 | { |
2495 | tree field; |
2496 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
2497 | { |
2498 | if (DECL_NAME (field) == NULL_TREE |
2499 | && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))) |
2500 | lookup_field_fuzzy_find_candidates (TREE_TYPE (field), component, |
2501 | candidates); |
2502 | |
2503 | if (DECL_NAME (field)) |
2504 | candidates->safe_push (DECL_NAME (field)); |
2505 | } |
2506 | } |
2507 | |
2508 | /* Like "lookup_field", but find the closest matching IDENTIFIER_NODE, |
2509 | rather than returning a TREE_LIST for an exact match. */ |
2510 | |
2511 | static tree |
2512 | lookup_field_fuzzy (tree type, tree component) |
2513 | { |
2514 | gcc_assert (TREE_CODE (component) == IDENTIFIER_NODE); |
2515 | |
2516 | /* First, gather a list of candidates. */ |
2517 | auto_vec <tree> candidates; |
2518 | |
2519 | lookup_field_fuzzy_find_candidates (type, component, |
2520 | candidates: &candidates); |
2521 | |
2522 | return find_closest_identifier (target: component, candidates: &candidates); |
2523 | } |
2524 | |
2525 | /* Support function for build_component_ref's error-handling. |
2526 | |
2527 | Given DATUM_TYPE, and "DATUM.COMPONENT", where DATUM is *not* a |
2528 | struct or union, should we suggest "DATUM->COMPONENT" as a hint? */ |
2529 | |
2530 | static bool |
2531 | should_suggest_deref_p (tree datum_type) |
2532 | { |
2533 | /* We don't do it for Objective-C, since Objective-C 2.0 dot-syntax |
2534 | allows "." for ptrs; we could be handling a failed attempt |
2535 | to access a property. */ |
2536 | if (c_dialect_objc ()) |
2537 | return false; |
2538 | |
2539 | /* Only suggest it for pointers... */ |
2540 | if (TREE_CODE (datum_type) != POINTER_TYPE) |
2541 | return false; |
2542 | |
2543 | /* ...to structs/unions. */ |
2544 | tree underlying_type = TREE_TYPE (datum_type); |
2545 | enum tree_code code = TREE_CODE (underlying_type); |
2546 | if (code == RECORD_TYPE || code == UNION_TYPE) |
2547 | return true; |
2548 | else |
2549 | return false; |
2550 | } |
2551 | |
2552 | /* Make an expression to refer to the COMPONENT field of structure or |
2553 | union value DATUM. COMPONENT is an IDENTIFIER_NODE. LOC is the |
2554 | location of the COMPONENT_REF. COMPONENT_LOC is the location |
2555 | of COMPONENT. ARROW_LOC is the location of the first -> operand if |
2556 | it is from -> operator. */ |
2557 | |
2558 | tree |
2559 | build_component_ref (location_t loc, tree datum, tree component, |
2560 | location_t component_loc, location_t arrow_loc) |
2561 | { |
2562 | tree type = TREE_TYPE (datum); |
2563 | enum tree_code code = TREE_CODE (type); |
2564 | tree field = NULL; |
2565 | tree ref; |
2566 | bool datum_lvalue = lvalue_p (datum); |
2567 | |
2568 | if (!objc_is_public (datum, component)) |
2569 | return error_mark_node; |
2570 | |
2571 | /* Detect Objective-C property syntax object.property. */ |
2572 | if (c_dialect_objc () |
2573 | && (ref = objc_maybe_build_component_ref (datum, component))) |
2574 | return ref; |
2575 | |
2576 | /* See if there is a field or component with name COMPONENT. */ |
2577 | |
2578 | if (code == RECORD_TYPE || code == UNION_TYPE) |
2579 | { |
2580 | if (!COMPLETE_TYPE_P (type)) |
2581 | { |
2582 | c_incomplete_type_error (loc, NULL_TREE, type); |
2583 | return error_mark_node; |
2584 | } |
2585 | |
2586 | field = lookup_field (type, component); |
2587 | |
2588 | if (!field) |
2589 | { |
2590 | tree guessed_id = lookup_field_fuzzy (type, component); |
2591 | if (guessed_id) |
2592 | { |
2593 | /* Attempt to provide a fixit replacement hint, if |
2594 | we have a valid range for the component. */ |
2595 | location_t reported_loc |
2596 | = (component_loc != UNKNOWN_LOCATION) ? component_loc : loc; |
2597 | gcc_rich_location rich_loc (reported_loc); |
2598 | if (component_loc != UNKNOWN_LOCATION) |
2599 | rich_loc.add_fixit_misspelled_id (misspelled_token_loc: component_loc, hint_id: guessed_id); |
2600 | error_at (&rich_loc, |
2601 | "%qT has no member named %qE; did you mean %qE?" , |
2602 | type, component, guessed_id); |
2603 | } |
2604 | else |
2605 | error_at (loc, "%qT has no member named %qE" , type, component); |
2606 | return error_mark_node; |
2607 | } |
2608 | |
2609 | /* Accessing elements of atomic structures or unions is undefined |
2610 | behavior (C11 6.5.2.3#5). */ |
2611 | if (TYPE_ATOMIC (type) && c_inhibit_evaluation_warnings == 0) |
2612 | { |
2613 | if (code == RECORD_TYPE) |
2614 | warning_at (loc, 0, "accessing a member %qE of an atomic " |
2615 | "structure %qE" , component, datum); |
2616 | else |
2617 | warning_at (loc, 0, "accessing a member %qE of an atomic " |
2618 | "union %qE" , component, datum); |
2619 | } |
2620 | |
2621 | /* Chain the COMPONENT_REFs if necessary down to the FIELD. |
2622 | This might be better solved in future the way the C++ front |
2623 | end does it - by giving the anonymous entities each a |
2624 | separate name and type, and then have build_component_ref |
2625 | recursively call itself. We can't do that here. */ |
2626 | do |
2627 | { |
2628 | tree subdatum = TREE_VALUE (field); |
2629 | int quals; |
2630 | tree subtype; |
2631 | bool use_datum_quals; |
2632 | |
2633 | if (TREE_TYPE (subdatum) == error_mark_node) |
2634 | return error_mark_node; |
2635 | |
2636 | /* If this is an rvalue, it does not have qualifiers in C |
2637 | standard terms and we must avoid propagating such |
2638 | qualifiers down to a non-lvalue array that is then |
2639 | converted to a pointer. */ |
2640 | use_datum_quals = (datum_lvalue |
2641 | || TREE_CODE (TREE_TYPE (subdatum)) != ARRAY_TYPE); |
2642 | |
2643 | quals = TYPE_QUALS (strip_array_types (TREE_TYPE (subdatum))); |
2644 | if (use_datum_quals) |
2645 | quals |= TYPE_QUALS (TREE_TYPE (datum)); |
2646 | subtype = c_build_qualified_type (TREE_TYPE (subdatum), quals); |
2647 | |
2648 | ref = build3 (COMPONENT_REF, subtype, datum, subdatum, |
2649 | NULL_TREE); |
2650 | SET_EXPR_LOCATION (ref, loc); |
2651 | if (TREE_READONLY (subdatum) |
2652 | || (use_datum_quals && TREE_READONLY (datum))) |
2653 | TREE_READONLY (ref) = 1; |
2654 | if (TREE_THIS_VOLATILE (subdatum) |
2655 | || (use_datum_quals && TREE_THIS_VOLATILE (datum))) |
2656 | TREE_THIS_VOLATILE (ref) = 1; |
2657 | |
2658 | if (TREE_UNAVAILABLE (subdatum)) |
2659 | error_unavailable_use (subdatum, NULL_TREE); |
2660 | else if (TREE_DEPRECATED (subdatum)) |
2661 | warn_deprecated_use (subdatum, NULL_TREE); |
2662 | |
2663 | datum = ref; |
2664 | |
2665 | field = TREE_CHAIN (field); |
2666 | } |
2667 | while (field); |
2668 | |
2669 | return ref; |
2670 | } |
2671 | else if (should_suggest_deref_p (datum_type: type)) |
2672 | { |
2673 | /* Special-case the error message for "ptr.field" for the case |
2674 | where the user has confused "." vs "->". */ |
2675 | rich_location richloc (line_table, loc); |
2676 | if (INDIRECT_REF_P (datum) && arrow_loc != UNKNOWN_LOCATION) |
2677 | { |
2678 | richloc.add_fixit_insert_before (where: arrow_loc, new_content: "(*" ); |
2679 | richloc.add_fixit_insert_after (where: arrow_loc, new_content: ")" ); |
2680 | error_at (&richloc, |
2681 | "%qE is a pointer to pointer; did you mean to dereference " |
2682 | "it before applying %<->%> to it?" , |
2683 | TREE_OPERAND (datum, 0)); |
2684 | } |
2685 | else |
2686 | { |
2687 | /* "loc" should be the "." token. */ |
2688 | richloc.add_fixit_replace (new_content: "->" ); |
2689 | error_at (&richloc, |
2690 | "%qE is a pointer; did you mean to use %<->%>?" , |
2691 | datum); |
2692 | } |
2693 | return error_mark_node; |
2694 | } |
2695 | else if (code != ERROR_MARK) |
2696 | error_at (loc, |
2697 | "request for member %qE in something not a structure or union" , |
2698 | component); |
2699 | |
2700 | return error_mark_node; |
2701 | } |
2702 | |
2703 | /* Given an expression PTR for a pointer, return an expression |
2704 | for the value pointed to. |
2705 | ERRORSTRING is the name of the operator to appear in error messages. |
2706 | |
2707 | LOC is the location to use for the generated tree. */ |
2708 | |
2709 | tree |
2710 | build_indirect_ref (location_t loc, tree ptr, ref_operator errstring) |
2711 | { |
2712 | tree pointer = default_conversion (exp: ptr); |
2713 | tree type = TREE_TYPE (pointer); |
2714 | tree ref; |
2715 | |
2716 | if (TREE_CODE (type) == POINTER_TYPE) |
2717 | { |
2718 | if (CONVERT_EXPR_P (pointer) |
2719 | || TREE_CODE (pointer) == VIEW_CONVERT_EXPR) |
2720 | { |
2721 | /* If a warning is issued, mark it to avoid duplicates from |
2722 | the backend. This only needs to be done at |
2723 | warn_strict_aliasing > 2. */ |
2724 | if (warn_strict_aliasing > 2) |
2725 | if (strict_aliasing_warning (EXPR_LOCATION (pointer), |
2726 | type, TREE_OPERAND (pointer, 0))) |
2727 | suppress_warning (pointer, OPT_Wstrict_aliasing_); |
2728 | } |
2729 | |
2730 | if (TREE_CODE (pointer) == ADDR_EXPR |
2731 | && (TREE_TYPE (TREE_OPERAND (pointer, 0)) |
2732 | == TREE_TYPE (type))) |
2733 | { |
2734 | ref = TREE_OPERAND (pointer, 0); |
2735 | protected_set_expr_location (ref, loc); |
2736 | return ref; |
2737 | } |
2738 | else |
2739 | { |
2740 | tree t = TREE_TYPE (type); |
2741 | |
2742 | ref = build1 (INDIRECT_REF, t, pointer); |
2743 | |
2744 | if (VOID_TYPE_P (t) && c_inhibit_evaluation_warnings == 0) |
2745 | warning_at (loc, 0, "dereferencing %<void *%> pointer" ); |
2746 | |
2747 | /* We *must* set TREE_READONLY when dereferencing a pointer to const, |
2748 | so that we get the proper error message if the result is used |
2749 | to assign to. Also, &* is supposed to be a no-op. |
2750 | And ANSI C seems to specify that the type of the result |
2751 | should be the const type. */ |
2752 | /* A de-reference of a pointer to const is not a const. It is valid |
2753 | to change it via some other pointer. */ |
2754 | TREE_READONLY (ref) = TYPE_READONLY (t); |
2755 | TREE_SIDE_EFFECTS (ref) |
2756 | = TYPE_VOLATILE (t) || TREE_SIDE_EFFECTS (pointer); |
2757 | TREE_THIS_VOLATILE (ref) = TYPE_VOLATILE (t); |
2758 | protected_set_expr_location (ref, loc); |
2759 | return ref; |
2760 | } |
2761 | } |
2762 | else if (TREE_CODE (pointer) != ERROR_MARK) |
2763 | invalid_indirection_error (loc, type, errstring); |
2764 | |
2765 | return error_mark_node; |
2766 | } |
2767 | |
2768 | /* This handles expressions of the form "a[i]", which denotes |
2769 | an array reference. |
2770 | |
2771 | This is logically equivalent in C to *(a+i), but we may do it differently. |
2772 | If A is a variable or a member, we generate a primitive ARRAY_REF. |
2773 | This avoids forcing the array out of registers, and can work on |
2774 | arrays that are not lvalues (for example, members of structures returned |
2775 | by functions). |
2776 | |
2777 | For vector types, allow vector[i] but not i[vector], and create |
2778 | *(((type*)&vectortype) + i) for the expression. |
2779 | |
2780 | LOC is the location to use for the returned expression. */ |
2781 | |
2782 | tree |
2783 | build_array_ref (location_t loc, tree array, tree index) |
2784 | { |
2785 | tree ret; |
2786 | bool swapped = false; |
2787 | if (TREE_TYPE (array) == error_mark_node |
2788 | || TREE_TYPE (index) == error_mark_node) |
2789 | return error_mark_node; |
2790 | |
2791 | if (TREE_CODE (TREE_TYPE (array)) != ARRAY_TYPE |
2792 | && TREE_CODE (TREE_TYPE (array)) != POINTER_TYPE |
2793 | /* Allow vector[index] but not index[vector]. */ |
2794 | && !gnu_vector_type_p (TREE_TYPE (array))) |
2795 | { |
2796 | if (TREE_CODE (TREE_TYPE (index)) != ARRAY_TYPE |
2797 | && TREE_CODE (TREE_TYPE (index)) != POINTER_TYPE) |
2798 | { |
2799 | error_at (loc, |
2800 | "subscripted value is neither array nor pointer nor vector" ); |
2801 | |
2802 | return error_mark_node; |
2803 | } |
2804 | std::swap (a&: array, b&: index); |
2805 | swapped = true; |
2806 | } |
2807 | |
2808 | if (!INTEGRAL_TYPE_P (TREE_TYPE (index))) |
2809 | { |
2810 | error_at (loc, "array subscript is not an integer" ); |
2811 | return error_mark_node; |
2812 | } |
2813 | |
2814 | if (TREE_CODE (TREE_TYPE (TREE_TYPE (array))) == FUNCTION_TYPE) |
2815 | { |
2816 | error_at (loc, "subscripted value is pointer to function" ); |
2817 | return error_mark_node; |
2818 | } |
2819 | |
2820 | /* ??? Existing practice has been to warn only when the char |
2821 | index is syntactically the index, not for char[array]. */ |
2822 | if (!swapped) |
2823 | warn_array_subscript_with_type_char (loc, index); |
2824 | |
2825 | /* Apply default promotions *after* noticing character types. */ |
2826 | index = default_conversion (exp: index); |
2827 | if (index == error_mark_node) |
2828 | return error_mark_node; |
2829 | |
2830 | gcc_assert (TREE_CODE (TREE_TYPE (index)) == INTEGER_TYPE |
2831 | || TREE_CODE (TREE_TYPE (index)) == BITINT_TYPE); |
2832 | |
2833 | bool was_vector = VECTOR_TYPE_P (TREE_TYPE (array)); |
2834 | bool non_lvalue = convert_vector_to_array_for_subscript (loc, &array, index); |
2835 | |
2836 | if (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE) |
2837 | { |
2838 | tree rval, type; |
2839 | |
2840 | /* An array that is indexed by a non-constant |
2841 | cannot be stored in a register; we must be able to do |
2842 | address arithmetic on its address. |
2843 | Likewise an array of elements of variable size. */ |
2844 | if (TREE_CODE (index) != INTEGER_CST |
2845 | || (COMPLETE_TYPE_P (TREE_TYPE (TREE_TYPE (array))) |
2846 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (array)))) != INTEGER_CST)) |
2847 | { |
2848 | if (!c_mark_addressable (array, true)) |
2849 | return error_mark_node; |
2850 | } |
2851 | /* An array that is indexed by a constant value which is not within |
2852 | the array bounds cannot be stored in a register either; because we |
2853 | would get a crash in store_bit_field/extract_bit_field when trying |
2854 | to access a non-existent part of the register. */ |
2855 | if (TREE_CODE (index) == INTEGER_CST |
2856 | && TYPE_DOMAIN (TREE_TYPE (array)) |
2857 | && !int_fits_type_p (index, TYPE_DOMAIN (TREE_TYPE (array)))) |
2858 | { |
2859 | if (!c_mark_addressable (array)) |
2860 | return error_mark_node; |
2861 | } |
2862 | |
2863 | if ((pedantic || warn_c90_c99_compat) |
2864 | && ! was_vector) |
2865 | { |
2866 | tree foo = array; |
2867 | while (TREE_CODE (foo) == COMPONENT_REF) |
2868 | foo = TREE_OPERAND (foo, 0); |
2869 | if (VAR_P (foo) && C_DECL_REGISTER (foo)) |
2870 | pedwarn (loc, OPT_Wpedantic, |
2871 | "ISO C forbids subscripting %<register%> array" ); |
2872 | else if (!lvalue_p (foo)) |
2873 | pedwarn_c90 (loc, opt: OPT_Wpedantic, |
2874 | "ISO C90 forbids subscripting non-lvalue " |
2875 | "array" ); |
2876 | } |
2877 | |
2878 | if (TREE_CODE (TREE_TYPE (index)) == BITINT_TYPE |
2879 | && TYPE_PRECISION (TREE_TYPE (index)) > TYPE_PRECISION (sizetype)) |
2880 | index = fold_convert (sizetype, index); |
2881 | |
2882 | type = TREE_TYPE (TREE_TYPE (array)); |
2883 | rval = build4 (ARRAY_REF, type, array, index, NULL_TREE, NULL_TREE); |
2884 | /* Array ref is const/volatile if the array elements are |
2885 | or if the array is. */ |
2886 | TREE_READONLY (rval) |
2887 | |= (TYPE_READONLY (TREE_TYPE (TREE_TYPE (array))) |
2888 | | TREE_READONLY (array)); |
2889 | TREE_SIDE_EFFECTS (rval) |
2890 | |= (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (array))) |
2891 | | TREE_SIDE_EFFECTS (array)); |
2892 | TREE_THIS_VOLATILE (rval) |
2893 | |= (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (array))) |
2894 | /* This was added by rms on 16 Nov 91. |
2895 | It fixes vol struct foo *a; a->elts[1] |
2896 | in an inline function. |
2897 | Hope it doesn't break something else. */ |
2898 | | TREE_THIS_VOLATILE (array)); |
2899 | ret = require_complete_type (loc, value: rval); |
2900 | protected_set_expr_location (ret, loc); |
2901 | if (non_lvalue) |
2902 | ret = non_lvalue_loc (loc, ret); |
2903 | return ret; |
2904 | } |
2905 | else |
2906 | { |
2907 | tree ar = default_conversion (exp: array); |
2908 | |
2909 | if (ar == error_mark_node) |
2910 | return ar; |
2911 | |
2912 | gcc_assert (TREE_CODE (TREE_TYPE (ar)) == POINTER_TYPE); |
2913 | gcc_assert (TREE_CODE (TREE_TYPE (TREE_TYPE (ar))) != FUNCTION_TYPE); |
2914 | |
2915 | ret = build_indirect_ref (loc, ptr: build_binary_op (loc, PLUS_EXPR, ar, |
2916 | index, false), |
2917 | errstring: RO_ARRAY_INDEXING); |
2918 | if (non_lvalue) |
2919 | ret = non_lvalue_loc (loc, ret); |
2920 | return ret; |
2921 | } |
2922 | } |
2923 | |
2924 | /* Build an OpenMP array section reference, creating an exact type for the |
2925 | resulting expression based on the element type and bounds if possible. If |
2926 | we have variable bounds, create an incomplete array type for the result |
2927 | instead. */ |
2928 | |
2929 | tree |
2930 | build_omp_array_section (location_t loc, tree array, tree index, tree length) |
2931 | { |
2932 | tree type = TREE_TYPE (array); |
2933 | gcc_assert (type); |
2934 | |
2935 | tree sectype, eltype = TREE_TYPE (type); |
2936 | |
2937 | /* It's not an array or pointer type. Just reuse the type of the original |
2938 | expression as the type of the array section (an error will be raised |
2939 | anyway, later). */ |
2940 | if (eltype == NULL_TREE || error_operand_p (t: eltype)) |
2941 | sectype = TREE_TYPE (array); |
2942 | else |
2943 | { |
2944 | tree idxtype = NULL_TREE; |
2945 | |
2946 | if (index != NULL_TREE |
2947 | && length != NULL_TREE |
2948 | && INTEGRAL_TYPE_P (TREE_TYPE (index)) |
2949 | && INTEGRAL_TYPE_P (TREE_TYPE (length))) |
2950 | { |
2951 | tree low = fold_convert (sizetype, index); |
2952 | tree high = fold_convert (sizetype, length); |
2953 | high = size_binop (PLUS_EXPR, low, high); |
2954 | high = size_binop (MINUS_EXPR, high, size_one_node); |
2955 | idxtype = build_range_type (sizetype, low, high); |
2956 | } |
2957 | else if ((index == NULL_TREE || integer_zerop (index)) |
2958 | && length != NULL_TREE |
2959 | && INTEGRAL_TYPE_P (TREE_TYPE (length))) |
2960 | idxtype = build_index_type (length); |
2961 | |
2962 | gcc_assert (!error_operand_p (idxtype)); |
2963 | |
2964 | sectype = build_array_type (eltype, idxtype); |
2965 | } |
2966 | |
2967 | return build3_loc (loc, code: OMP_ARRAY_SECTION, type: sectype, arg0: array, arg1: index, arg2: length); |
2968 | } |
2969 | |
2970 | |
2971 | /* Build an external reference to identifier ID. FUN indicates |
2972 | whether this will be used for a function call. LOC is the source |
2973 | location of the identifier. This sets *TYPE to the type of the |
2974 | identifier, which is not the same as the type of the returned value |
2975 | for CONST_DECLs defined as enum constants. If the type of the |
2976 | identifier is not available, *TYPE is set to NULL. */ |
2977 | tree |
2978 | build_external_ref (location_t loc, tree id, bool fun, tree *type) |
2979 | { |
2980 | tree ref; |
2981 | tree decl = lookup_name (id); |
2982 | |
2983 | /* In Objective-C, an instance variable (ivar) may be preferred to |
2984 | whatever lookup_name() found. */ |
2985 | decl = objc_lookup_ivar (decl, id); |
2986 | |
2987 | *type = NULL; |
2988 | if (decl && decl != error_mark_node) |
2989 | { |
2990 | ref = decl; |
2991 | *type = TREE_TYPE (ref); |
2992 | if (DECL_P (decl) && C_DECL_UNDERSPECIFIED (decl)) |
2993 | error_at (loc, "underspecified %qD referenced in its initializer" , |
2994 | decl); |
2995 | } |
2996 | else if (fun) |
2997 | /* Implicit function declaration. */ |
2998 | ref = implicitly_declare (loc, id); |
2999 | else if (decl == error_mark_node) |
3000 | /* Don't complain about something that's already been |
3001 | complained about. */ |
3002 | return error_mark_node; |
3003 | else |
3004 | { |
3005 | undeclared_variable (loc, id); |
3006 | return error_mark_node; |
3007 | } |
3008 | |
3009 | /* For an OpenMP map clause, we can get better diagnostics for decls with |
3010 | unmappable types if we return the decl with an error_mark_node type, |
3011 | rather than returning error_mark_node for the decl itself. */ |
3012 | if (TREE_TYPE (ref) == error_mark_node |
3013 | && !c_omp_array_section_p) |
3014 | return error_mark_node; |
3015 | |
3016 | if (TREE_UNAVAILABLE (ref)) |
3017 | error_unavailable_use (ref, NULL_TREE); |
3018 | else if (TREE_DEPRECATED (ref)) |
3019 | warn_deprecated_use (ref, NULL_TREE); |
3020 | |
3021 | /* Recursive call does not count as usage. */ |
3022 | if (ref != current_function_decl) |
3023 | { |
3024 | TREE_USED (ref) = 1; |
3025 | } |
3026 | |
3027 | if (TREE_CODE (ref) == FUNCTION_DECL && !in_alignof) |
3028 | { |
3029 | if (!in_sizeof && !in_typeof) |
3030 | C_DECL_USED (ref) = 1; |
3031 | else if (DECL_INITIAL (ref) == NULL_TREE |
3032 | && DECL_EXTERNAL (ref) |
3033 | && !TREE_PUBLIC (ref)) |
3034 | record_maybe_used_decl (ref); |
3035 | } |
3036 | |
3037 | if (TREE_CODE (ref) == CONST_DECL) |
3038 | { |
3039 | used_types_insert (TREE_TYPE (ref)); |
3040 | |
3041 | if (warn_cxx_compat |
3042 | && TREE_CODE (TREE_TYPE (ref)) == ENUMERAL_TYPE |
3043 | && C_TYPE_DEFINED_IN_STRUCT (TREE_TYPE (ref))) |
3044 | { |
3045 | warning_at (loc, OPT_Wc___compat, |
3046 | ("enum constant defined in struct or union " |
3047 | "is not visible in C++" )); |
3048 | inform (DECL_SOURCE_LOCATION (ref), "enum constant defined here" ); |
3049 | } |
3050 | |
3051 | ref = DECL_INITIAL (ref); |
3052 | TREE_CONSTANT (ref) = 1; |
3053 | } |
3054 | else if (current_function_decl != NULL_TREE |
3055 | && !DECL_FILE_SCOPE_P (current_function_decl) |
3056 | && (VAR_OR_FUNCTION_DECL_P (ref) |
3057 | || TREE_CODE (ref) == PARM_DECL)) |
3058 | { |
3059 | tree context = decl_function_context (ref); |
3060 | |
3061 | if (context != NULL_TREE && context != current_function_decl) |
3062 | DECL_NONLOCAL (ref) = 1; |
3063 | } |
3064 | /* C99 6.7.4p3: An inline definition of a function with external |
3065 | linkage ... shall not contain a reference to an identifier with |
3066 | internal linkage. */ |
3067 | else if (current_function_decl != NULL_TREE |
3068 | && DECL_DECLARED_INLINE_P (current_function_decl) |
3069 | && DECL_EXTERNAL (current_function_decl) |
3070 | && VAR_OR_FUNCTION_DECL_P (ref) |
3071 | && (!VAR_P (ref) || TREE_STATIC (ref)) |
3072 | && ! TREE_PUBLIC (ref) |
3073 | && DECL_CONTEXT (ref) != current_function_decl) |
3074 | record_inline_static (loc, current_function_decl, ref, |
3075 | csi_internal); |
3076 | |
3077 | return ref; |
3078 | } |
3079 | |
3080 | /* Record details of decls possibly used inside sizeof or typeof. */ |
3081 | struct maybe_used_decl |
3082 | { |
3083 | /* The decl. */ |
3084 | tree decl; |
3085 | /* The level seen at (in_sizeof + in_typeof). */ |
3086 | int level; |
3087 | /* The next one at this level or above, or NULL. */ |
3088 | struct maybe_used_decl *next; |
3089 | }; |
3090 | |
3091 | static struct maybe_used_decl *maybe_used_decls; |
3092 | |
3093 | /* Record that DECL, an undefined static function reference seen |
3094 | inside sizeof or typeof, might be used if the operand of sizeof is |
3095 | a VLA type or the operand of typeof is a variably modified |
3096 | type. */ |
3097 | |
3098 | static void |
3099 | record_maybe_used_decl (tree decl) |
3100 | { |
3101 | struct maybe_used_decl *t = XOBNEW (&parser_obstack, struct maybe_used_decl); |
3102 | t->decl = decl; |
3103 | t->level = in_sizeof + in_typeof; |
3104 | t->next = maybe_used_decls; |
3105 | maybe_used_decls = t; |
3106 | } |
3107 | |
3108 | /* Pop the stack of decls possibly used inside sizeof or typeof. If |
3109 | USED is false, just discard them. If it is true, mark them used |
3110 | (if no longer inside sizeof or typeof) or move them to the next |
3111 | level up (if still inside sizeof or typeof). */ |
3112 | |
3113 | void |
3114 | pop_maybe_used (bool used) |
3115 | { |
3116 | struct maybe_used_decl *p = maybe_used_decls; |
3117 | int cur_level = in_sizeof + in_typeof; |
3118 | while (p && p->level > cur_level) |
3119 | { |
3120 | if (used) |
3121 | { |
3122 | if (cur_level == 0) |
3123 | C_DECL_USED (p->decl) = 1; |
3124 | else |
3125 | p->level = cur_level; |
3126 | } |
3127 | p = p->next; |
3128 | } |
3129 | if (!used || cur_level == 0) |
3130 | maybe_used_decls = p; |
3131 | } |
3132 | |
3133 | /* Return the result of sizeof applied to EXPR. */ |
3134 | |
3135 | struct c_expr |
3136 | c_expr_sizeof_expr (location_t loc, struct c_expr expr) |
3137 | { |
3138 | struct c_expr ret; |
3139 | if (expr.value == error_mark_node) |
3140 | { |
3141 | ret.value = error_mark_node; |
3142 | ret.original_code = ERROR_MARK; |
3143 | ret.original_type = NULL; |
3144 | ret.m_decimal = 0; |
3145 | pop_maybe_used (used: false); |
3146 | } |
3147 | else |
3148 | { |
3149 | bool expr_const_operands = true; |
3150 | |
3151 | if (TREE_CODE (expr.value) == PARM_DECL |
3152 | && C_ARRAY_PARAMETER (expr.value)) |
3153 | { |
3154 | auto_diagnostic_group d; |
3155 | if (warning_at (loc, OPT_Wsizeof_array_argument, |
3156 | "%<sizeof%> on array function parameter %qE will " |
3157 | "return size of %qT" , expr.value, |
3158 | TREE_TYPE (expr.value))) |
3159 | inform (DECL_SOURCE_LOCATION (expr.value), "declared here" ); |
3160 | } |
3161 | tree folded_expr = c_fully_fold (expr.value, require_constant_value, |
3162 | &expr_const_operands); |
3163 | ret.value = c_sizeof (loc, TREE_TYPE (folded_expr)); |
3164 | c_last_sizeof_arg = expr.value; |
3165 | c_last_sizeof_loc = loc; |
3166 | ret.original_code = SIZEOF_EXPR; |
3167 | ret.original_type = NULL; |
3168 | ret.m_decimal = 0; |
3169 | if (C_TYPE_VARIABLE_SIZE (TREE_TYPE (folded_expr))) |
3170 | { |
3171 | /* sizeof is evaluated when given a vla (C99 6.5.3.4p2). */ |
3172 | ret.value = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (ret.value), |
3173 | folded_expr, ret.value); |
3174 | C_MAYBE_CONST_EXPR_NON_CONST (ret.value) = !expr_const_operands; |
3175 | SET_EXPR_LOCATION (ret.value, loc); |
3176 | } |
3177 | pop_maybe_used (C_TYPE_VARIABLE_SIZE (TREE_TYPE (folded_expr))); |
3178 | } |
3179 | return ret; |
3180 | } |
3181 | |
3182 | /* Return the result of sizeof applied to T, a structure for the type |
3183 | name passed to sizeof (rather than the type itself). LOC is the |
3184 | location of the original expression. */ |
3185 | |
3186 | struct c_expr |
3187 | c_expr_sizeof_type (location_t loc, struct c_type_name *t) |
3188 | { |
3189 | tree type; |
3190 | struct c_expr ret; |
3191 | tree type_expr = NULL_TREE; |
3192 | bool type_expr_const = true; |
3193 | type = groktypename (t, &type_expr, &type_expr_const); |
3194 | ret.value = c_sizeof (loc, type); |
3195 | c_last_sizeof_arg = type; |
3196 | c_last_sizeof_loc = loc; |
3197 | ret.original_code = SIZEOF_EXPR; |
3198 | ret.original_type = NULL; |
3199 | ret.m_decimal = 0; |
3200 | if (type == error_mark_node) |
3201 | { |
3202 | ret.value = error_mark_node; |
3203 | ret.original_code = ERROR_MARK; |
3204 | } |
3205 | else |
3206 | if ((type_expr || TREE_CODE (ret.value) == INTEGER_CST) |
3207 | && C_TYPE_VARIABLE_SIZE (type)) |
3208 | { |
3209 | /* If the type is a [*] array, it is a VLA but is represented as |
3210 | having a size of zero. In such a case we must ensure that |
3211 | the result of sizeof does not get folded to a constant by |
3212 | c_fully_fold, because if the size is evaluated the result is |
3213 | not constant and so constraints on zero or negative size |
3214 | arrays must not be applied when this sizeof call is inside |
3215 | another array declarator. */ |
3216 | if (!type_expr) |
3217 | type_expr = integer_zero_node; |
3218 | ret.value = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (ret.value), |
3219 | type_expr, ret.value); |
3220 | C_MAYBE_CONST_EXPR_NON_CONST (ret.value) = !type_expr_const; |
3221 | } |
3222 | pop_maybe_used (used: type != error_mark_node |
3223 | ? C_TYPE_VARIABLE_SIZE (type) : false); |
3224 | return ret; |
3225 | } |
3226 | |
3227 | /* Build a function call to function FUNCTION with parameters PARAMS. |
3228 | The function call is at LOC. |
3229 | PARAMS is a list--a chain of TREE_LIST nodes--in which the |
3230 | TREE_VALUE of each node is a parameter-expression. |
3231 | FUNCTION's data type may be a function type or a pointer-to-function. */ |
3232 | |
3233 | tree |
3234 | build_function_call (location_t loc, tree function, tree params) |
3235 | { |
3236 | vec<tree, va_gc> *v; |
3237 | tree ret; |
3238 | |
3239 | vec_alloc (v, nelems: list_length (params)); |
3240 | for (; params; params = TREE_CHAIN (params)) |
3241 | v->quick_push (TREE_VALUE (params)); |
3242 | ret = c_build_function_call_vec (loc, vNULL, function, v, NULL); |
3243 | vec_free (v); |
3244 | return ret; |
3245 | } |
3246 | |
3247 | /* Give a note about the location of the declaration of DECL. */ |
3248 | |
3249 | static void |
3250 | inform_declaration (tree decl) |
3251 | { |
3252 | if (decl && (TREE_CODE (decl) != FUNCTION_DECL |
3253 | || !DECL_IS_UNDECLARED_BUILTIN (decl))) |
3254 | inform (DECL_SOURCE_LOCATION (decl), "declared here" ); |
3255 | } |
3256 | |
3257 | /* Build a function call to function FUNCTION with parameters PARAMS. |
3258 | If FUNCTION is the result of resolving an overloaded target built-in, |
3259 | ORIG_FUNDECL is the original function decl, otherwise it is null. |
3260 | ORIGTYPES, if not NULL, is a vector of types; each element is |
3261 | either NULL or the original type of the corresponding element in |
3262 | PARAMS. The original type may differ from TREE_TYPE of the |
3263 | parameter for enums. FUNCTION's data type may be a function type |
3264 | or pointer-to-function. This function changes the elements of |
3265 | PARAMS. */ |
3266 | |
3267 | tree |
3268 | build_function_call_vec (location_t loc, vec<location_t> arg_loc, |
3269 | tree function, vec<tree, va_gc> *params, |
3270 | vec<tree, va_gc> *origtypes, tree orig_fundecl) |
3271 | { |
3272 | tree fntype, fundecl = NULL_TREE; |
3273 | tree name = NULL_TREE, result; |
3274 | tree tem; |
3275 | int nargs; |
3276 | tree *argarray; |
3277 | |
3278 | |
3279 | /* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */ |
3280 | STRIP_TYPE_NOPS (function); |
3281 | |
3282 | /* Convert anything with function type to a pointer-to-function. */ |
3283 | if (TREE_CODE (function) == FUNCTION_DECL) |
3284 | { |
3285 | name = DECL_NAME (function); |
3286 | |
3287 | if (flag_tm) |
3288 | tm_malloc_replacement (function); |
3289 | fundecl = function; |
3290 | if (!orig_fundecl) |
3291 | orig_fundecl = fundecl; |
3292 | /* Atomic functions have type checking/casting already done. They are |
3293 | often rewritten and don't match the original parameter list. */ |
3294 | if (name && startswith (IDENTIFIER_POINTER (name), prefix: "__atomic_" )) |
3295 | origtypes = NULL; |
3296 | } |
3297 | if (TREE_CODE (TREE_TYPE (function)) == FUNCTION_TYPE) |
3298 | function = function_to_pointer_conversion (loc, exp: function); |
3299 | |
3300 | /* For Objective-C, convert any calls via a cast to OBJC_TYPE_REF |
3301 | expressions, like those used for ObjC messenger dispatches. */ |
3302 | if (params && !params->is_empty ()) |
3303 | function = objc_rewrite_function_call (function, (*params)[0]); |
3304 | |
3305 | function = c_fully_fold (function, false, NULL); |
3306 | |
3307 | fntype = TREE_TYPE (function); |
3308 | |
3309 | if (TREE_CODE (fntype) == ERROR_MARK) |
3310 | return error_mark_node; |
3311 | |
3312 | if (!(TREE_CODE (fntype) == POINTER_TYPE |
3313 | && TREE_CODE (TREE_TYPE (fntype)) == FUNCTION_TYPE)) |
3314 | { |
3315 | if (!flag_diagnostics_show_caret && !STATEMENT_CLASS_P (function)) |
3316 | error_at (loc, |
3317 | "called object %qE is not a function or function pointer" , |
3318 | function); |
3319 | else if (DECL_P (function)) |
3320 | { |
3321 | error_at (loc, |
3322 | "called object %qD is not a function or function pointer" , |
3323 | function); |
3324 | inform_declaration (decl: function); |
3325 | } |
3326 | else |
3327 | error_at (loc, |
3328 | "called object is not a function or function pointer" ); |
3329 | return error_mark_node; |
3330 | } |
3331 | |
3332 | if (fundecl && TREE_THIS_VOLATILE (fundecl)) |
3333 | current_function_returns_abnormally = 1; |
3334 | |
3335 | /* fntype now gets the type of function pointed to. */ |
3336 | fntype = TREE_TYPE (fntype); |
3337 | tree return_type = TREE_TYPE (fntype); |
3338 | |
3339 | /* Convert the parameters to the types declared in the |
3340 | function prototype, or apply default promotions. */ |
3341 | |
3342 | nargs = convert_arguments (loc, arg_loc, TYPE_ARG_TYPES (fntype), params, |
3343 | origtypes, function, fundecl); |
3344 | if (nargs < 0) |
3345 | return error_mark_node; |
3346 | |
3347 | /* Check that the function is called through a compatible prototype. |
3348 | If it is not, warn. */ |
3349 | if (CONVERT_EXPR_P (function) |
3350 | && TREE_CODE (tem = TREE_OPERAND (function, 0)) == ADDR_EXPR |
3351 | && TREE_CODE (tem = TREE_OPERAND (tem, 0)) == FUNCTION_DECL |
3352 | && !comptypes (type1: fntype, TREE_TYPE (tem))) |
3353 | { |
3354 | /* This situation leads to run-time undefined behavior. We can't, |
3355 | therefore, simply error unless we can prove that all possible |
3356 | executions of the program must execute the code. */ |
3357 | warning_at (loc, 0, "function called through a non-compatible type" ); |
3358 | |
3359 | if (VOID_TYPE_P (return_type) |
3360 | && TYPE_QUALS (return_type) != TYPE_UNQUALIFIED) |
3361 | pedwarn (loc, 0, |
3362 | "function with qualified void return type called" ); |
3363 | } |
3364 | |
3365 | argarray = vec_safe_address (v: params); |
3366 | |
3367 | /* Check that arguments to builtin functions match the expectations. */ |
3368 | if (fundecl |
3369 | && fndecl_built_in_p (node: fundecl) |
3370 | && !check_builtin_function_arguments (loc, arg_loc, fundecl, |
3371 | orig_fundecl, nargs, argarray)) |
3372 | return error_mark_node; |
3373 | |
3374 | /* Check that the arguments to the function are valid. */ |
3375 | bool warned_p = check_function_arguments (loc, fundecl, fntype, |
3376 | nargs, argarray, &arg_loc); |
3377 | |
3378 | if (TYPE_QUALS (return_type) != TYPE_UNQUALIFIED |
3379 | && !VOID_TYPE_P (return_type)) |
3380 | return_type = c_build_qualified_type (return_type, TYPE_UNQUALIFIED); |
3381 | if (name != NULL_TREE |
3382 | && startswith (IDENTIFIER_POINTER (name), prefix: "__builtin_" )) |
3383 | { |
3384 | if (require_constant_value) |
3385 | result |
3386 | = fold_build_call_array_initializer_loc (loc, return_type, |
3387 | function, nargs, argarray); |
3388 | else |
3389 | result = fold_build_call_array_loc (loc, return_type, |
3390 | function, nargs, argarray); |
3391 | if (TREE_CODE (result) == NOP_EXPR |
3392 | && TREE_CODE (TREE_OPERAND (result, 0)) == INTEGER_CST) |
3393 | STRIP_TYPE_NOPS (result); |
3394 | } |
3395 | else |
3396 | result = build_call_array_loc (loc, return_type, |
3397 | function, nargs, argarray); |
3398 | /* If -Wnonnull warning has been diagnosed, avoid diagnosing it again |
3399 | later. */ |
3400 | if (warned_p && TREE_CODE (result) == CALL_EXPR) |
3401 | suppress_warning (result, OPT_Wnonnull); |
3402 | |
3403 | /* In this improbable scenario, a nested function returns a VM type. |
3404 | Create a TARGET_EXPR so that the call always has a LHS, much as |
3405 | what the C++ FE does for functions returning non-PODs. */ |
3406 | if (C_TYPE_VARIABLY_MODIFIED (TREE_TYPE (fntype))) |
3407 | { |
3408 | tree tmp = create_tmp_var_raw (TREE_TYPE (fntype)); |
3409 | result = build4 (TARGET_EXPR, TREE_TYPE (fntype), tmp, result, |
3410 | NULL_TREE, NULL_TREE); |
3411 | } |
3412 | |
3413 | if (VOID_TYPE_P (TREE_TYPE (result))) |
3414 | { |
3415 | if (TYPE_QUALS (TREE_TYPE (result)) != TYPE_UNQUALIFIED) |
3416 | pedwarn (loc, 0, |
3417 | "function with qualified void return type called" ); |
3418 | return result; |
3419 | } |
3420 | return require_complete_type (loc, value: result); |
3421 | } |
3422 | |
3423 | /* Like build_function_call_vec, but call also resolve_overloaded_builtin. */ |
3424 | |
3425 | tree |
3426 | c_build_function_call_vec (location_t loc, const vec<location_t> &arg_loc, |
3427 | tree function, vec<tree, va_gc> *params, |
3428 | vec<tree, va_gc> *origtypes) |
3429 | { |
3430 | /* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */ |
3431 | STRIP_TYPE_NOPS (function); |
3432 | |
3433 | /* Convert anything with function type to a pointer-to-function. */ |
3434 | if (TREE_CODE (function) == FUNCTION_DECL) |
3435 | { |
3436 | /* Implement type-directed function overloading for builtins. |
3437 | resolve_overloaded_builtin and targetm.resolve_overloaded_builtin |
3438 | handle all the type checking. The result is a complete expression |
3439 | that implements this function call. */ |
3440 | tree tem = resolve_overloaded_builtin (loc, function, params); |
3441 | if (tem) |
3442 | return tem; |
3443 | } |
3444 | return build_function_call_vec (loc, arg_loc, function, params, origtypes); |
3445 | } |
3446 | |
3447 | /* Helper for convert_arguments called to convert the VALue of argument |
3448 | number ARGNUM from ORIGTYPE to the corresponding parameter number |
3449 | PARMNUM and TYPE. |
3450 | PLOC is the location where the conversion is being performed. |
3451 | FUNCTION and FUNDECL are the same as in convert_arguments. |
3452 | VALTYPE is the original type of VAL before the conversion and, |
3453 | for EXCESS_PRECISION_EXPR, the operand of the expression. |
3454 | NPC is true if VAL represents the null pointer constant (VAL itself |
3455 | will have been folded to an integer constant). |
3456 | RNAME is the same as FUNCTION except in Objective C when it's |
3457 | the function selector. |
3458 | EXCESS_PRECISION is true when VAL was originally represented |
3459 | as EXCESS_PRECISION_EXPR. |
3460 | WARNOPT is the same as in convert_for_assignment. */ |
3461 | |
3462 | static tree |
3463 | convert_argument (location_t ploc, tree function, tree fundecl, |
3464 | tree type, tree origtype, tree val, tree valtype, |
3465 | bool npc, tree rname, int parmnum, int argnum, |
3466 | bool excess_precision, int warnopt) |
3467 | { |
3468 | /* Formal parm type is specified by a function prototype. */ |
3469 | |
3470 | if (type == error_mark_node || !COMPLETE_TYPE_P (type)) |
3471 | { |
3472 | error_at (ploc, "type of formal parameter %d is incomplete" , |
3473 | parmnum + 1); |
3474 | return error_mark_node; |
3475 | } |
3476 | |
3477 | /* Optionally warn about conversions that differ from the default |
3478 | conversions. */ |
3479 | if (warn_traditional_conversion || warn_traditional) |
3480 | { |
3481 | if (INTEGRAL_TYPE_P (type) |
3482 | && SCALAR_FLOAT_TYPE_P (valtype)) |
3483 | warning_at (ploc, OPT_Wtraditional_conversion, |
3484 | "passing argument %d of %qE as integer rather " |
3485 | "than floating due to prototype" , |
3486 | argnum, rname); |
3487 | if (INTEGRAL_TYPE_P (type) |
3488 | && TREE_CODE (valtype) == COMPLEX_TYPE) |
3489 | warning_at (ploc, OPT_Wtraditional_conversion, |
3490 | "passing argument %d of %qE as integer rather " |
3491 | "than complex due to prototype" , |
3492 | argnum, rname); |
3493 | else if (TREE_CODE (type) == COMPLEX_TYPE |
3494 | && SCALAR_FLOAT_TYPE_P (valtype)) |
3495 | warning_at (ploc, OPT_Wtraditional_conversion, |
3496 | "passing argument %d of %qE as complex rather " |
3497 | "than floating due to prototype" , |
3498 | argnum, rname); |
3499 | else if (SCALAR_FLOAT_TYPE_P (type) |
3500 | && INTEGRAL_TYPE_P (valtype)) |
3501 | warning_at (ploc, OPT_Wtraditional_conversion, |
3502 | "passing argument %d of %qE as floating rather " |
3503 | "than integer due to prototype" , |
3504 | argnum, rname); |
3505 | else if (TREE_CODE (type) == COMPLEX_TYPE |
3506 | && INTEGRAL_TYPE_P (valtype)) |
3507 | warning_at (ploc, OPT_Wtraditional_conversion, |
3508 | "passing argument %d of %qE as complex rather " |
3509 | "than integer due to prototype" , |
3510 | argnum, rname); |
3511 | else if (SCALAR_FLOAT_TYPE_P (type) |
3512 | && TREE_CODE (valtype) == COMPLEX_TYPE) |
3513 | warning_at (ploc, OPT_Wtraditional_conversion, |
3514 | "passing argument %d of %qE as floating rather " |
3515 | "than complex due to prototype" , |
3516 | argnum, rname); |
3517 | /* ??? At some point, messages should be written about |
3518 | conversions between complex types, but that's too messy |
3519 | to do now. */ |
3520 | else if (SCALAR_FLOAT_TYPE_P (type) |
3521 | && SCALAR_FLOAT_TYPE_P (valtype)) |
3522 | { |
3523 | unsigned int formal_prec = TYPE_PRECISION (type); |
3524 | |
3525 | /* Warn if any argument is passed as `float', |
3526 | since without a prototype it would be `double'. */ |
3527 | if (formal_prec == TYPE_PRECISION (float_type_node) |
3528 | && type != dfloat32_type_node) |
3529 | warning_at (ploc, 0, |
3530 | "passing argument %d of %qE as %<float%> " |
3531 | "rather than %<double%> due to prototype" , |
3532 | argnum, rname); |
3533 | |
3534 | /* Warn if mismatch between argument and prototype |
3535 | for decimal float types. Warn of conversions with |
3536 | binary float types and of precision narrowing due to |
3537 | prototype. */ |
3538 | else if (type != valtype |
3539 | && (type == dfloat32_type_node |
3540 | || type == dfloat64_type_node |
3541 | || type == dfloat128_type_node |
3542 | || valtype == dfloat32_type_node |
3543 | || valtype == dfloat64_type_node |
3544 | || valtype == dfloat128_type_node) |
3545 | && (formal_prec |
3546 | <= TYPE_PRECISION (valtype) |
3547 | || (type == dfloat128_type_node |
3548 | && (valtype |
3549 | != dfloat64_type_node |
3550 | && (valtype |
3551 | != dfloat32_type_node))) |
3552 | || (type == dfloat64_type_node |
3553 | && (valtype |
3554 | != dfloat32_type_node)))) |
3555 | warning_at (ploc, 0, |
3556 | "passing argument %d of %qE as %qT " |
3557 | "rather than %qT due to prototype" , |
3558 | argnum, rname, type, valtype); |
3559 | |
3560 | } |
3561 | /* Detect integer changing in width or signedness. |
3562 | These warnings are only activated with |
3563 | -Wtraditional-conversion, not with -Wtraditional. */ |
3564 | else if (warn_traditional_conversion |
3565 | && INTEGRAL_TYPE_P (type) |
3566 | && INTEGRAL_TYPE_P (valtype)) |
3567 | { |
3568 | unsigned int formal_prec = TYPE_PRECISION (type); |
3569 | tree would_have_been = default_conversion (exp: val); |
3570 | tree type1 = TREE_TYPE (would_have_been); |
3571 | |
3572 | if (val == error_mark_node) |
3573 | /* VAL could have been of incomplete type. */; |
3574 | else if (TREE_CODE (type) == ENUMERAL_TYPE |
3575 | && (TYPE_MAIN_VARIANT (type) |
3576 | == TYPE_MAIN_VARIANT (valtype))) |
3577 | /* No warning if function asks for enum |
3578 | and the actual arg is that enum type. */ |
3579 | ; |
3580 | else if (formal_prec != TYPE_PRECISION (type1)) |
3581 | warning_at (ploc, OPT_Wtraditional_conversion, |
3582 | "passing argument %d of %qE " |
3583 | "with different width due to prototype" , |
3584 | argnum, rname); |
3585 | else if (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (type1)) |
3586 | ; |
3587 | /* Don't complain if the formal parameter type |
3588 | is an enum, because we can't tell now whether |
3589 | the value was an enum--even the same enum. */ |
3590 | else if (TREE_CODE (type) == ENUMERAL_TYPE) |
3591 | ; |
3592 | else if (TREE_CODE (val) == INTEGER_CST |
3593 | && int_fits_type_p (val, type)) |
3594 | /* Change in signedness doesn't matter |
3595 | if a constant value is unaffected. */ |
3596 | ; |
3597 | /* If the value is extended from a narrower |
3598 | unsigned type, it doesn't matter whether we |
3599 | pass it as signed or unsigned; the value |
3600 | certainly is the same either way. */ |
3601 | else if (TYPE_PRECISION (valtype) < TYPE_PRECISION (type) |
3602 | && TYPE_UNSIGNED (valtype)) |
3603 | ; |
3604 | else if (TYPE_UNSIGNED (type)) |
3605 | warning_at (ploc, OPT_Wtraditional_conversion, |
3606 | "passing argument %d of %qE " |
3607 | "as unsigned due to prototype" , |
3608 | argnum, rname); |
3609 | else |
3610 | warning_at (ploc, OPT_Wtraditional_conversion, |
3611 | "passing argument %d of %qE " |
3612 | "as signed due to prototype" , |
3613 | argnum, rname); |
3614 | } |
3615 | } |
3616 | |
3617 | /* Possibly restore an EXCESS_PRECISION_EXPR for the |
3618 | sake of better warnings from convert_and_check. */ |
3619 | if (excess_precision) |
3620 | val = build1 (EXCESS_PRECISION_EXPR, valtype, val); |
3621 | |
3622 | tree parmval = convert_for_assignment (ploc, ploc, type, |
3623 | val, origtype, ic_argpass, |
3624 | npc, fundecl, function, |
3625 | parmnum + 1, warnopt); |
3626 | |
3627 | if (targetm.calls.promote_prototypes (fundecl ? TREE_TYPE (fundecl) : 0) |
3628 | && INTEGRAL_TYPE_P (type) |
3629 | && (TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))) |
3630 | parmval = default_conversion (exp: parmval); |
3631 | |
3632 | return parmval; |
3633 | } |
3634 | |
3635 | /* Convert the argument expressions in the vector VALUES |
3636 | to the types in the list TYPELIST. |
3637 | |
3638 | If TYPELIST is exhausted, or when an element has NULL as its type, |
3639 | perform the default conversions. |
3640 | |
3641 | ORIGTYPES is the original types of the expressions in VALUES. This |
3642 | holds the type of enum values which have been converted to integral |
3643 | types. It may be NULL. |
3644 | |
3645 | FUNCTION is a tree for the called function. It is used only for |
3646 | error messages, where it is formatted with %qE. |
3647 | |
3648 | This is also where warnings about wrong number of args are generated. |
3649 | |
3650 | ARG_LOC are locations of function arguments (if any). |
3651 | |
3652 | Returns the actual number of arguments processed (which may be less |
3653 | than the length of VALUES in some error situations), or -1 on |
3654 | failure. */ |
3655 | |
3656 | static int |
3657 | convert_arguments (location_t loc, vec<location_t> arg_loc, tree typelist, |
3658 | vec<tree, va_gc> *values, vec<tree, va_gc> *origtypes, |
3659 | tree function, tree fundecl) |
3660 | { |
3661 | unsigned int parmnum; |
3662 | bool error_args = false; |
3663 | const bool type_generic = fundecl |
3664 | && lookup_attribute (attr_name: "type generic" , TYPE_ATTRIBUTES (TREE_TYPE (fundecl))); |
3665 | bool type_generic_remove_excess_precision = false; |
3666 | bool type_generic_overflow_p = false; |
3667 | bool type_generic_bit_query = false; |
3668 | tree selector; |
3669 | |
3670 | /* Change pointer to function to the function itself for |
3671 | diagnostics. */ |
3672 | if (TREE_CODE (function) == ADDR_EXPR |
3673 | && TREE_CODE (TREE_OPERAND (function, 0)) == FUNCTION_DECL) |
3674 | function = TREE_OPERAND (function, 0); |
3675 | |
3676 | /* Handle an ObjC selector specially for diagnostics. */ |
3677 | selector = objc_message_selector (); |
3678 | |
3679 | /* For a call to a built-in function declared without a prototype, |
3680 | set to the built-in function's argument list. */ |
3681 | tree builtin_typelist = NULL_TREE; |
3682 | |
3683 | /* For type-generic built-in functions, determine whether excess |
3684 | precision should be removed (classification) or not |
3685 | (comparison). */ |
3686 | if (fundecl |
3687 | && fndecl_built_in_p (node: fundecl, klass: BUILT_IN_NORMAL)) |
3688 | { |
3689 | built_in_function code = DECL_FUNCTION_CODE (decl: fundecl); |
3690 | if (C_DECL_BUILTIN_PROTOTYPE (fundecl)) |
3691 | { |
3692 | /* For a call to a built-in function declared without a prototype |
3693 | use the types of the parameters of the internal built-in to |
3694 | match those of the arguments to. */ |
3695 | if (tree bdecl = builtin_decl_explicit (fncode: code)) |
3696 | builtin_typelist = TYPE_ARG_TYPES (TREE_TYPE (bdecl)); |
3697 | } |
3698 | |
3699 | /* For type-generic built-in functions, determine whether excess |
3700 | precision should be removed (classification) or not |
3701 | (comparison). */ |
3702 | if (type_generic) |
3703 | switch (code) |
3704 | { |
3705 | case BUILT_IN_ISFINITE: |
3706 | case BUILT_IN_ISINF: |
3707 | case BUILT_IN_ISINF_SIGN: |
3708 | case BUILT_IN_ISNAN: |
3709 | case BUILT_IN_ISNORMAL: |
3710 | case BUILT_IN_ISSIGNALING: |
3711 | case BUILT_IN_FPCLASSIFY: |
3712 | type_generic_remove_excess_precision = true; |
3713 | break; |
3714 | |
3715 | case BUILT_IN_ADD_OVERFLOW_P: |
3716 | case BUILT_IN_SUB_OVERFLOW_P: |
3717 | case BUILT_IN_MUL_OVERFLOW_P: |
3718 | /* The last argument of these type-generic builtins |
3719 | should not be promoted. */ |
3720 | type_generic_overflow_p = true; |
3721 | break; |
3722 | |
3723 | case BUILT_IN_CLZG: |
3724 | case BUILT_IN_CTZG: |
3725 | case BUILT_IN_CLRSBG: |
3726 | case BUILT_IN_FFSG: |
3727 | case BUILT_IN_PARITYG: |
3728 | case BUILT_IN_POPCOUNTG: |
3729 | /* The first argument of these type-generic builtins |
3730 | should not be promoted. */ |
3731 | type_generic_bit_query = true; |
3732 | break; |
3733 | |
3734 | default: |
3735 | break; |
3736 | } |
3737 | } |
3738 | |
3739 | /* Scan the given expressions (VALUES) and types (TYPELIST), producing |
3740 | individual converted arguments. */ |
3741 | |
3742 | tree typetail, builtin_typetail, val; |
3743 | for (typetail = typelist, |
3744 | builtin_typetail = builtin_typelist, |
3745 | parmnum = 0; |
3746 | values && values->iterate (ix: parmnum, ptr: &val); |
3747 | ++parmnum) |
3748 | { |
3749 | /* The type of the function parameter (if it was declared with one). */ |
3750 | tree type = typetail ? TREE_VALUE (typetail) : NULL_TREE; |
3751 | /* The type of the built-in function parameter (if the function |
3752 | is a built-in). Used to detect type incompatibilities in |
3753 | calls to built-ins declared without a prototype. */ |
3754 | tree builtin_type = (builtin_typetail |
3755 | ? TREE_VALUE (builtin_typetail) : NULL_TREE); |
3756 | /* The original type of the argument being passed to the function. */ |
3757 | tree valtype = TREE_TYPE (val); |
3758 | /* The called function (or function selector in Objective C). */ |
3759 | tree rname = function; |
3760 | int argnum = parmnum + 1; |
3761 | const char *invalid_func_diag; |
3762 | /* Set for EXCESS_PRECISION_EXPR arguments. */ |
3763 | bool excess_precision = false; |
3764 | /* The value of the argument after conversion to the type |
3765 | of the function parameter it is passed to. */ |
3766 | tree parmval; |
3767 | /* Some __atomic_* builtins have additional hidden argument at |
3768 | position 0. */ |
3769 | location_t ploc |
3770 | = !arg_loc.is_empty () && values->length () == arg_loc.length () |
3771 | ? expansion_point_location_if_in_system_header (arg_loc[parmnum]) |
3772 | : input_location; |
3773 | |
3774 | if (type == void_type_node) |
3775 | { |
3776 | if (selector) |
3777 | error_at (loc, "too many arguments to method %qE" , selector); |
3778 | else |
3779 | error_at (loc, "too many arguments to function %qE" , function); |
3780 | inform_declaration (decl: fundecl); |
3781 | return error_args ? -1 : (int) parmnum; |
3782 | } |
3783 | |
3784 | if (builtin_type == void_type_node) |
3785 | { |
3786 | if (warning_at (loc, OPT_Wbuiltin_declaration_mismatch, |
3787 | "too many arguments to built-in function %qE " |
3788 | "expecting %d" , function, parmnum)) |
3789 | inform_declaration (decl: fundecl); |
3790 | builtin_typetail = NULL_TREE; |
3791 | } |
3792 | |
3793 | if (selector && argnum > 2) |
3794 | { |
3795 | rname = selector; |
3796 | argnum -= 2; |
3797 | } |
3798 | |
3799 | /* Determine if VAL is a null pointer constant before folding it. */ |
3800 | bool npc = null_pointer_constant_p (expr: val); |
3801 | |
3802 | /* If there is excess precision and a prototype, convert once to |
3803 | the required type rather than converting via the semantic |
3804 | type. Likewise without a prototype a float value represented |
3805 | as long double should be converted once to double. But for |
3806 | type-generic classification functions excess precision must |
3807 | be removed here. */ |
3808 | if (TREE_CODE (val) == EXCESS_PRECISION_EXPR |
3809 | && (type || !type_generic || !type_generic_remove_excess_precision)) |
3810 | { |
3811 | val = TREE_OPERAND (val, 0); |
3812 | excess_precision = true; |
3813 | } |
3814 | val = c_fully_fold (val, false, NULL); |
3815 | STRIP_TYPE_NOPS (val); |
3816 | |
3817 | val = require_complete_type (loc: ploc, value: val); |
3818 | |
3819 | /* Some floating-point arguments must be promoted to double when |
3820 | no type is specified by a prototype. This applies to |
3821 | arguments of type float, and to architecture-specific types |
3822 | (ARM __fp16), but not to _FloatN or _FloatNx types. */ |
3823 | bool promote_float_arg = false; |
3824 | if (type == NULL_TREE |
3825 | && TREE_CODE (valtype) == REAL_TYPE |
3826 | && (TYPE_PRECISION (valtype) |
3827 | <= TYPE_PRECISION (double_type_node)) |
3828 | && TYPE_MAIN_VARIANT (valtype) != double_type_node |
3829 | && TYPE_MAIN_VARIANT (valtype) != long_double_type_node |
3830 | && !DECIMAL_FLOAT_MODE_P (TYPE_MODE (valtype))) |
3831 | { |
3832 | /* Promote this argument, unless it has a _FloatN or |
3833 | _FloatNx type. */ |
3834 | promote_float_arg = true; |
3835 | for (int i = 0; i < NUM_FLOATN_NX_TYPES; i++) |
3836 | if (TYPE_MAIN_VARIANT (valtype) == FLOATN_NX_TYPE_NODE (i)) |
3837 | { |
3838 | promote_float_arg = false; |
3839 | break; |
3840 | } |
3841 | /* Don't promote __bf16 either. */ |
3842 | if (TYPE_MAIN_VARIANT (valtype) == bfloat16_type_node) |
3843 | promote_float_arg = false; |
3844 | } |
3845 | |
3846 | if (type != NULL_TREE) |
3847 | { |
3848 | tree origtype = (!origtypes) ? NULL_TREE : (*origtypes)[parmnum]; |
3849 | parmval = convert_argument (ploc, function, fundecl, type, origtype, |
3850 | val, valtype, npc, rname, parmnum, argnum, |
3851 | excess_precision, warnopt: 0); |
3852 | } |
3853 | else if (promote_float_arg) |
3854 | { |
3855 | if (type_generic) |
3856 | parmval = val; |
3857 | else |
3858 | { |
3859 | /* Convert `float' to `double'. */ |
3860 | if (warn_double_promotion && !c_inhibit_evaluation_warnings) |
3861 | warning_at (ploc, OPT_Wdouble_promotion, |
3862 | "implicit conversion from %qT to %qT when passing " |
3863 | "argument to function" , |
3864 | valtype, double_type_node); |
3865 | parmval = convert (double_type_node, val); |
3866 | } |
3867 | } |
3868 | else if ((excess_precision && !type_generic) |
3869 | || (type_generic_overflow_p && parmnum == 2) |
3870 | || (type_generic_bit_query && parmnum == 0)) |
3871 | /* A "double" argument with excess precision being passed |
3872 | without a prototype or in variable arguments. |
3873 | The last argument of __builtin_*_overflow_p should not be |
3874 | promoted, similarly the first argument of |
3875 | __builtin_{clz,ctz,clrsb,ffs,parity,popcount}g. */ |
3876 | parmval = convert (valtype, val); |
3877 | else if ((invalid_func_diag = |
3878 | targetm.calls.invalid_arg_for_unprototyped_fn (typelist, fundecl, val))) |
3879 | { |
3880 | error (invalid_func_diag); |
3881 | return -1; |
3882 | } |
3883 | else if (TREE_CODE (val) == ADDR_EXPR && reject_gcc_builtin (val)) |
3884 | { |
3885 | return -1; |
3886 | } |
3887 | else |
3888 | /* Convert `short' and `char' to full-size `int'. */ |
3889 | parmval = default_conversion (exp: val); |
3890 | |
3891 | (*values)[parmnum] = parmval; |
3892 | if (parmval == error_mark_node) |
3893 | error_args = true; |
3894 | |
3895 | if (!type && builtin_type && TREE_CODE (builtin_type) != VOID_TYPE) |
3896 | { |
3897 | /* For a call to a built-in function declared without a prototype, |
3898 | perform the conversions from the argument to the expected type |
3899 | but issue warnings rather than errors for any mismatches. |
3900 | Ignore the converted argument and use the PARMVAL obtained |
3901 | above by applying default conversions instead. */ |
3902 | tree origtype = (!origtypes) ? NULL_TREE : (*origtypes)[parmnum]; |
3903 | convert_argument (ploc, function, fundecl, type: builtin_type, origtype, |
3904 | val, valtype, npc, rname, parmnum, argnum, |
3905 | excess_precision, |
3906 | warnopt: OPT_Wbuiltin_declaration_mismatch); |
3907 | } |
3908 | |
3909 | if (typetail) |
3910 | typetail = TREE_CHAIN (typetail); |
3911 | |
3912 | if (builtin_typetail) |
3913 | builtin_typetail = TREE_CHAIN (builtin_typetail); |
3914 | } |
3915 | |
3916 | gcc_assert (parmnum == vec_safe_length (values)); |
3917 | |
3918 | if (typetail != NULL_TREE && TREE_VALUE (typetail) != void_type_node) |
3919 | { |
3920 | error_at (loc, "too few arguments to function %qE" , function); |
3921 | inform_declaration (decl: fundecl); |
3922 | return -1; |
3923 | } |
3924 | |
3925 | if (builtin_typetail && TREE_VALUE (builtin_typetail) != void_type_node) |
3926 | { |
3927 | unsigned nargs = parmnum; |
3928 | for (tree t = builtin_typetail; t; t = TREE_CHAIN (t)) |
3929 | ++nargs; |
3930 | |
3931 | if (warning_at (loc, OPT_Wbuiltin_declaration_mismatch, |
3932 | "too few arguments to built-in function %qE " |
3933 | "expecting %u" , function, nargs - 1)) |
3934 | inform_declaration (decl: fundecl); |
3935 | } |
3936 | |
3937 | return error_args ? -1 : (int) parmnum; |
3938 | } |
3939 | |
3940 | /* This is the entry point used by the parser to build unary operators |
3941 | in the input. CODE, a tree_code, specifies the unary operator, and |
3942 | ARG is the operand. For unary plus, the C parser currently uses |
3943 | CONVERT_EXPR for code. |
3944 | |
3945 | LOC is the location to use for the tree generated. |
3946 | */ |
3947 | |
3948 | struct c_expr |
3949 | parser_build_unary_op (location_t loc, enum tree_code code, struct c_expr arg) |
3950 | { |
3951 | struct c_expr result; |
3952 | |
3953 | result.original_code = code; |
3954 | result.original_type = NULL; |
3955 | result.m_decimal = 0; |
3956 | |
3957 | if (reject_gcc_builtin (arg.value)) |
3958 | { |
3959 | result.value = error_mark_node; |
3960 | } |
3961 | else |
3962 | { |
3963 | result.value = build_unary_op (loc, code, arg.value, false); |
3964 | |
3965 | if (TREE_OVERFLOW_P (result.value) && !TREE_OVERFLOW_P (arg.value)) |
3966 | overflow_warning (loc, result.value, arg.value); |
3967 | } |
3968 | |
3969 | /* We are typically called when parsing a prefix token at LOC acting on |
3970 | ARG. Reflect this by updating the source range of the result to |
3971 | start at LOC and end at the end of ARG. */ |
3972 | set_c_expr_source_range (expr: &result, |
3973 | start: loc, finish: arg.get_finish ()); |
3974 | |
3975 | return result; |
3976 | } |
3977 | |
3978 | /* Returns true if TYPE is a character type, *not* including wchar_t. */ |
3979 | |
3980 | bool |
3981 | char_type_p (tree type) |
3982 | { |
3983 | return (type == char_type_node |
3984 | || type == unsigned_char_type_node |
3985 | || type == signed_char_type_node |
3986 | || type == char16_type_node |
3987 | || type == char32_type_node); |
3988 | } |
3989 | |
3990 | /* This is the entry point used by the parser to build binary operators |
3991 | in the input. CODE, a tree_code, specifies the binary operator, and |
3992 | ARG1 and ARG2 are the operands. In addition to constructing the |
3993 | expression, we check for operands that were written with other binary |
3994 | operators in a way that is likely to confuse the user. |
3995 | |
3996 | LOCATION is the location of the binary operator. */ |
3997 | |
3998 | struct c_expr |
3999 | parser_build_binary_op (location_t location, enum tree_code code, |
4000 | struct c_expr arg1, struct c_expr arg2) |
4001 | { |
4002 | struct c_expr result; |
4003 | result.m_decimal = 0; |
4004 | |
4005 | enum tree_code code1 = arg1.original_code; |
4006 | enum tree_code code2 = arg2.original_code; |
4007 | tree type1 = (arg1.original_type |
4008 | ? arg1.original_type |
4009 | : TREE_TYPE (arg1.value)); |
4010 | tree type2 = (arg2.original_type |
4011 | ? arg2.original_type |
4012 | : TREE_TYPE (arg2.value)); |
4013 | |
4014 | result.value = build_binary_op (location, code, |
4015 | arg1.value, arg2.value, true); |
4016 | result.original_code = code; |
4017 | result.original_type = NULL; |
4018 | result.m_decimal = 0; |
4019 | |
4020 | if (TREE_CODE (result.value) == ERROR_MARK) |
4021 | { |
4022 | set_c_expr_source_range (expr: &result, |
4023 | start: arg1.get_start (), |
4024 | finish: arg2.get_finish ()); |
4025 | return result; |
4026 | } |
4027 | |
4028 | if (location != UNKNOWN_LOCATION) |
4029 | protected_set_expr_location (result.value, location); |
4030 | |
4031 | set_c_expr_source_range (expr: &result, |
4032 | start: arg1.get_start (), |
4033 | finish: arg2.get_finish ()); |
4034 | |
4035 | /* Check for cases such as x+y<<z which users are likely |
4036 | to misinterpret. */ |
4037 | if (warn_parentheses) |
4038 | warn_about_parentheses (location, code, code1, arg1.value, code2, |
4039 | arg2.value); |
4040 | |
4041 | if (warn_logical_op) |
4042 | warn_logical_operator (location, code, TREE_TYPE (result.value), |
4043 | code1, arg1.value, code2, arg2.value); |
4044 | |
4045 | if (warn_tautological_compare) |
4046 | { |
4047 | tree lhs = arg1.value; |
4048 | tree rhs = arg2.value; |
4049 | if (TREE_CODE (lhs) == C_MAYBE_CONST_EXPR) |
4050 | { |
4051 | if (C_MAYBE_CONST_EXPR_PRE (lhs) != NULL_TREE |
4052 | && TREE_SIDE_EFFECTS (C_MAYBE_CONST_EXPR_PRE (lhs))) |
4053 | lhs = NULL_TREE; |
4054 | else |
4055 | lhs = C_MAYBE_CONST_EXPR_EXPR (lhs); |
4056 | } |
4057 | if (TREE_CODE (rhs) == C_MAYBE_CONST_EXPR) |
4058 | { |
4059 | if (C_MAYBE_CONST_EXPR_PRE (rhs) != NULL_TREE |
4060 | && TREE_SIDE_EFFECTS (C_MAYBE_CONST_EXPR_PRE (rhs))) |
4061 | rhs = NULL_TREE; |
4062 | else |
4063 | rhs = C_MAYBE_CONST_EXPR_EXPR (rhs); |
4064 | } |
4065 | if (lhs != NULL_TREE && rhs != NULL_TREE) |
4066 | warn_tautological_cmp (location, code, lhs, rhs); |
4067 | } |
4068 | |
4069 | if (warn_logical_not_paren |
4070 | && TREE_CODE_CLASS (code) == tcc_comparison |
4071 | && code1 == TRUTH_NOT_EXPR |
4072 | && code2 != TRUTH_NOT_EXPR |
4073 | /* Avoid warning for !!x == y. */ |
4074 | && (TREE_CODE (arg1.value) != NE_EXPR |
4075 | || !integer_zerop (TREE_OPERAND (arg1.value, 1)))) |
4076 | { |
4077 | /* Avoid warning for !b == y where b has _Bool type. */ |
4078 | tree t = integer_zero_node; |
4079 | if (TREE_CODE (arg1.value) == EQ_EXPR |
4080 | && integer_zerop (TREE_OPERAND (arg1.value, 1)) |
4081 | && TREE_TYPE (TREE_OPERAND (arg1.value, 0)) == integer_type_node) |
4082 | { |
4083 | t = TREE_OPERAND (arg1.value, 0); |
4084 | do |
4085 | { |
4086 | if (TREE_TYPE (t) != integer_type_node) |
4087 | break; |
4088 | if (TREE_CODE (t) == C_MAYBE_CONST_EXPR) |
4089 | t = C_MAYBE_CONST_EXPR_EXPR (t); |
4090 | else if (CONVERT_EXPR_P (t)) |
4091 | t = TREE_OPERAND (t, 0); |
4092 | else |
4093 | break; |
4094 | } |
4095 | while (1); |
4096 | } |
4097 | if (!C_BOOLEAN_TYPE_P (TREE_TYPE (t))) |
4098 | warn_logical_not_parentheses (location, code, arg1.value, arg2.value); |
4099 | } |
4100 | |
4101 | /* Warn about comparisons against string literals, with the exception |
4102 | of testing for equality or inequality of a string literal with NULL. */ |
4103 | if (code == EQ_EXPR || code == NE_EXPR) |
4104 | { |
4105 | if ((code1 == STRING_CST |
4106 | && !integer_zerop (tree_strip_nop_conversions (arg2.value))) |
4107 | || (code2 == STRING_CST |
4108 | && !integer_zerop (tree_strip_nop_conversions (arg1.value)))) |
4109 | warning_at (location, OPT_Waddress, |
4110 | "comparison with string literal results in unspecified behavior" ); |
4111 | /* Warn for ptr == '\0', it's likely that it should've been ptr[0]. */ |
4112 | if (POINTER_TYPE_P (type1) |
4113 | && null_pointer_constant_p (expr: arg2.value) |
4114 | && char_type_p (type: type2)) |
4115 | { |
4116 | auto_diagnostic_group d; |
4117 | if (warning_at (location, OPT_Wpointer_compare, |
4118 | "comparison between pointer and zero character " |
4119 | "constant" )) |
4120 | inform (arg1.get_start (), |
4121 | "did you mean to dereference the pointer?" ); |
4122 | } |
4123 | else if (POINTER_TYPE_P (type2) |
4124 | && null_pointer_constant_p (expr: arg1.value) |
4125 | && char_type_p (type: type1)) |
4126 | { |
4127 | auto_diagnostic_group d; |
4128 | if (warning_at (location, OPT_Wpointer_compare, |
4129 | "comparison between pointer and zero character " |
4130 | "constant" )) |
4131 | inform (arg2.get_start (), |
4132 | "did you mean to dereference the pointer?" ); |
4133 | } |
4134 | } |
4135 | else if (TREE_CODE_CLASS (code) == tcc_comparison |
4136 | && (code1 == STRING_CST || code2 == STRING_CST)) |
4137 | warning_at (location, OPT_Waddress, |
4138 | "comparison with string literal results in unspecified " |
4139 | "behavior" ); |
4140 | |
4141 | if (warn_array_compare |
4142 | && TREE_CODE_CLASS (code) == tcc_comparison |
4143 | && TREE_CODE (type1) == ARRAY_TYPE |
4144 | && TREE_CODE (type2) == ARRAY_TYPE) |
4145 | do_warn_array_compare (location, code, arg1.value, arg2.value); |
4146 | |
4147 | if (TREE_OVERFLOW_P (result.value) |
4148 | && !TREE_OVERFLOW_P (arg1.value) |
4149 | && !TREE_OVERFLOW_P (arg2.value)) |
4150 | overflow_warning (location, result.value); |
4151 | |
4152 | /* Warn about comparisons of different enum types. */ |
4153 | if (warn_enum_compare |
4154 | && TREE_CODE_CLASS (code) == tcc_comparison |
4155 | && TREE_CODE (type1) == ENUMERAL_TYPE |
4156 | && TREE_CODE (type2) == ENUMERAL_TYPE |
4157 | && TYPE_MAIN_VARIANT (type1) != TYPE_MAIN_VARIANT (type2)) |
4158 | warning_at (location, OPT_Wenum_compare, |
4159 | "comparison between %qT and %qT" , |
4160 | type1, type2); |
4161 | |
4162 | if (warn_xor_used_as_pow |
4163 | && code == BIT_XOR_EXPR |
4164 | && arg1.m_decimal |
4165 | && arg2.m_decimal) |
4166 | check_for_xor_used_as_pow (lhs_loc: arg1.get_location (), lhs_val: arg1.value, |
4167 | operator_loc: location, |
4168 | rhs_loc: arg2.get_location (), rhs_val: arg2.value); |
4169 | |
4170 | return result; |
4171 | } |
4172 | |
4173 | /* Return a tree for the difference of pointers OP0 and OP1. |
4174 | The resulting tree has type ptrdiff_t. If POINTER_SUBTRACT sanitization is |
4175 | enabled, assign to INSTRUMENT_EXPR call to libsanitizer. */ |
4176 | |
4177 | static tree |
4178 | pointer_diff (location_t loc, tree op0, tree op1, tree *instrument_expr) |
4179 | { |
4180 | tree restype = ptrdiff_type_node; |
4181 | tree result, inttype; |
4182 | |
4183 | addr_space_t as0 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (op0))); |
4184 | addr_space_t as1 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (op1))); |
4185 | tree target_type = TREE_TYPE (TREE_TYPE (op0)); |
4186 | tree orig_op0 = op0; |
4187 | tree orig_op1 = op1; |
4188 | |
4189 | /* If the operands point into different address spaces, we need to |
4190 | explicitly convert them to pointers into the common address space |
4191 | before we can subtract the numerical address values. */ |
4192 | if (as0 != as1) |
4193 | { |
4194 | addr_space_t as_common; |
4195 | tree common_type; |
4196 | |
4197 | /* Determine the common superset address space. This is guaranteed |
4198 | to exist because the caller verified that comp_target_types |
4199 | returned non-zero. */ |
4200 | if (!addr_space_superset (as1: as0, as2: as1, common: &as_common)) |
4201 | gcc_unreachable (); |
4202 | |
4203 | common_type = common_pointer_type (TREE_TYPE (op0), TREE_TYPE (op1)); |
4204 | op0 = convert (common_type, op0); |
4205 | op1 = convert (common_type, op1); |
4206 | } |
4207 | |
4208 | /* Determine integer type result of the subtraction. This will usually |
4209 | be the same as the result type (ptrdiff_t), but may need to be a wider |
4210 | type if pointers for the address space are wider than ptrdiff_t. */ |
4211 | if (TYPE_PRECISION (restype) < TYPE_PRECISION (TREE_TYPE (op0))) |
4212 | inttype = c_common_type_for_size (TYPE_PRECISION (TREE_TYPE (op0)), 0); |
4213 | else |
4214 | inttype = restype; |
4215 | |
4216 | if (VOID_TYPE_P (target_type)) |
4217 | pedwarn (loc, OPT_Wpointer_arith, |
4218 | "pointer of type %<void *%> used in subtraction" ); |
4219 | if (TREE_CODE (target_type) == FUNCTION_TYPE) |
4220 | pedwarn (loc, OPT_Wpointer_arith, |
4221 | "pointer to a function used in subtraction" ); |
4222 | |
4223 | if (current_function_decl != NULL_TREE |
4224 | && sanitize_flags_p (flag: SANITIZE_POINTER_SUBTRACT)) |
4225 | { |
4226 | op0 = save_expr (op0); |
4227 | op1 = save_expr (op1); |
4228 | |
4229 | tree tt = builtin_decl_explicit (fncode: BUILT_IN_ASAN_POINTER_SUBTRACT); |
4230 | *instrument_expr = build_call_expr_loc (loc, tt, 2, op0, op1); |
4231 | } |
4232 | |
4233 | /* First do the subtraction, then build the divide operator |
4234 | and only convert at the very end. |
4235 | Do not do default conversions in case restype is a short type. */ |
4236 | |
4237 | /* POINTER_DIFF_EXPR requires a signed integer type of the same size as |
4238 | pointers. If some platform cannot provide that, or has a larger |
4239 | ptrdiff_type to support differences larger than half the address |
4240 | space, cast the pointers to some larger integer type and do the |
4241 | computations in that type. */ |
4242 | if (TYPE_PRECISION (inttype) > TYPE_PRECISION (TREE_TYPE (op0))) |
4243 | op0 = build_binary_op (loc, MINUS_EXPR, convert (inttype, op0), |
4244 | convert (inttype, op1), false); |
4245 | else |
4246 | { |
4247 | /* Cast away qualifiers. */ |
4248 | op0 = convert (c_common_type (TREE_TYPE (op0), TREE_TYPE (op0)), op0); |
4249 | op1 = convert (c_common_type (TREE_TYPE (op1), TREE_TYPE (op1)), op1); |
4250 | op0 = build2_loc (loc, code: POINTER_DIFF_EXPR, type: inttype, arg0: op0, arg1: op1); |
4251 | } |
4252 | |
4253 | /* This generates an error if op1 is pointer to incomplete type. */ |
4254 | if (!COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (TREE_TYPE (orig_op1)))) |
4255 | error_at (loc, "arithmetic on pointer to an incomplete type" ); |
4256 | else if (verify_type_context (loc, TCTX_POINTER_ARITH, |
4257 | TREE_TYPE (TREE_TYPE (orig_op0)))) |
4258 | verify_type_context (loc, TCTX_POINTER_ARITH, |
4259 | TREE_TYPE (TREE_TYPE (orig_op1))); |
4260 | |
4261 | op1 = c_size_in_bytes (type: target_type); |
4262 | |
4263 | if (pointer_to_zero_sized_aggr_p (TREE_TYPE (orig_op1))) |
4264 | error_at (loc, "arithmetic on pointer to an empty aggregate" ); |
4265 | |
4266 | /* Divide by the size, in easiest possible way. */ |
4267 | result = fold_build2_loc (loc, EXACT_DIV_EXPR, inttype, |
4268 | op0, convert (inttype, op1)); |
4269 | |
4270 | /* Convert to final result type if necessary. */ |
4271 | return convert (restype, result); |
4272 | } |
4273 | |
4274 | /* Expand atomic compound assignments into an appropriate sequence as |
4275 | specified by the C11 standard section 6.5.16.2. |
4276 | |
4277 | _Atomic T1 E1 |
4278 | T2 E2 |
4279 | E1 op= E2 |
4280 | |
4281 | This sequence is used for all types for which these operations are |
4282 | supported. |
4283 | |
4284 | In addition, built-in versions of the 'fe' prefixed routines may |
4285 | need to be invoked for floating point (real, complex or vector) when |
4286 | floating-point exceptions are supported. See 6.5.16.2 footnote 113. |
4287 | |
4288 | T1 newval; |
4289 | T1 old; |
4290 | T1 *addr |
4291 | T2 val |
4292 | fenv_t fenv |
4293 | |
4294 | addr = &E1; |
4295 | val = (E2); |
4296 | __atomic_load (addr, &old, SEQ_CST); |
4297 | feholdexcept (&fenv); |
4298 | loop: |
4299 | newval = old op val; |
4300 | if (__atomic_compare_exchange_strong (addr, &old, &newval, SEQ_CST, |
4301 | SEQ_CST)) |
4302 | goto done; |
4303 | feclearexcept (FE_ALL_EXCEPT); |
4304 | goto loop: |
4305 | done: |
4306 | feupdateenv (&fenv); |
4307 | |
4308 | The compiler will issue the __atomic_fetch_* built-in when possible, |
4309 | otherwise it will generate the generic form of the atomic operations. |
4310 | This requires temp(s) and has their address taken. The atomic processing |
4311 | is smart enough to figure out when the size of an object can utilize |
4312 | a lock-free version, and convert the built-in call to the appropriate |
4313 | lock-free routine. The optimizers will then dispose of any temps that |
4314 | are no longer required, and lock-free implementations are utilized as |
4315 | long as there is target support for the required size. |
4316 | |
4317 | If the operator is NOP_EXPR, then this is a simple assignment, and |
4318 | an __atomic_store is issued to perform the assignment rather than |
4319 | the above loop. */ |
4320 | |
4321 | /* Build an atomic assignment at LOC, expanding into the proper |
4322 | sequence to store LHS MODIFYCODE= RHS. Return a value representing |
4323 | the result of the operation, unless RETURN_OLD_P, in which case |
4324 | return the old value of LHS (this is only for postincrement and |
4325 | postdecrement). */ |
4326 | |
4327 | static tree |
4328 | build_atomic_assign (location_t loc, tree lhs, enum tree_code modifycode, |
4329 | tree rhs, bool return_old_p) |
4330 | { |
4331 | tree fndecl, func_call; |
4332 | vec<tree, va_gc> *params; |
4333 | tree val, nonatomic_lhs_type, nonatomic_rhs_type, newval, newval_addr; |
4334 | tree old, old_addr; |
4335 | tree compound_stmt = NULL_TREE; |
4336 | tree stmt, goto_stmt; |
4337 | tree loop_label, loop_decl, done_label, done_decl; |
4338 | |
4339 | tree lhs_type = TREE_TYPE (lhs); |
4340 | tree lhs_addr = build_unary_op (loc, ADDR_EXPR, lhs, false); |
4341 | tree seq_cst = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST); |
4342 | tree rhs_semantic_type = TREE_TYPE (rhs); |
4343 | tree nonatomic_rhs_semantic_type; |
4344 | tree rhs_type; |
4345 | |
4346 | gcc_assert (TYPE_ATOMIC (lhs_type)); |
4347 | |
4348 | if (return_old_p) |
4349 | gcc_assert (modifycode == PLUS_EXPR || modifycode == MINUS_EXPR); |
4350 | |
4351 | /* Allocate enough vector items for a compare_exchange. */ |
4352 | vec_alloc (v&: params, nelems: 6); |
4353 | |
4354 | /* Create a compound statement to hold the sequence of statements |
4355 | with a loop. */ |
4356 | if (modifycode != NOP_EXPR) |
4357 | { |
4358 | compound_stmt = c_begin_compound_stmt (false); |
4359 | |
4360 | /* For consistency with build_modify_expr on non-_Atomic, |
4361 | mark the lhs as read. Also, it would be very hard to match |
4362 | such expressions in mark_exp_read. */ |
4363 | mark_exp_read (exp: lhs); |
4364 | } |
4365 | |
4366 | /* Remove any excess precision (which is only present here in the |
4367 | case of compound assignments). */ |
4368 | if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR) |
4369 | { |
4370 | gcc_assert (modifycode != NOP_EXPR); |
4371 | rhs = TREE_OPERAND (rhs, 0); |
4372 | } |
4373 | rhs_type = TREE_TYPE (rhs); |
4374 | |
4375 | /* Fold the RHS if it hasn't already been folded. */ |
4376 | if (modifycode != NOP_EXPR) |
4377 | rhs = c_fully_fold (rhs, false, NULL); |
4378 | |
4379 | /* Remove the qualifiers for the rest of the expressions and create |
4380 | the VAL temp variable to hold the RHS. */ |
4381 | nonatomic_lhs_type = build_qualified_type (lhs_type, TYPE_UNQUALIFIED); |
4382 | nonatomic_rhs_type = build_qualified_type (rhs_type, TYPE_UNQUALIFIED); |
4383 | nonatomic_rhs_semantic_type = build_qualified_type (rhs_semantic_type, |
4384 | TYPE_UNQUALIFIED); |
4385 | val = create_tmp_var_raw (nonatomic_rhs_type); |
4386 | TREE_ADDRESSABLE (val) = 1; |
4387 | suppress_warning (val); |
4388 | rhs = build4 (TARGET_EXPR, nonatomic_rhs_type, val, rhs, NULL_TREE, |
4389 | NULL_TREE); |
4390 | TREE_SIDE_EFFECTS (rhs) = 1; |
4391 | SET_EXPR_LOCATION (rhs, loc); |
4392 | if (modifycode != NOP_EXPR) |
4393 | add_stmt (rhs); |
4394 | |
4395 | /* NOP_EXPR indicates it's a straight store of the RHS. Simply issue |
4396 | an atomic_store. */ |
4397 | if (modifycode == NOP_EXPR) |
4398 | { |
4399 | compound_stmt = rhs; |
4400 | /* Build __atomic_store (&lhs, &val, SEQ_CST) */ |
4401 | rhs = build_unary_op (loc, ADDR_EXPR, val, false); |
4402 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_STORE); |
4403 | params->quick_push (obj: lhs_addr); |
4404 | params->quick_push (obj: rhs); |
4405 | params->quick_push (obj: seq_cst); |
4406 | func_call = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
4407 | |
4408 | compound_stmt = build2 (COMPOUND_EXPR, void_type_node, |
4409 | compound_stmt, func_call); |
4410 | |
4411 | /* VAL is the value which was stored, return a COMPOUND_STMT of |
4412 | the statement and that value. */ |
4413 | return build2 (COMPOUND_EXPR, nonatomic_lhs_type, compound_stmt, val); |
4414 | } |
4415 | |
4416 | /* Attempt to implement the atomic operation as an __atomic_fetch_* or |
4417 | __atomic_*_fetch built-in rather than a CAS loop. atomic_bool type |
4418 | isn't applicable for such builtins. ??? Do we want to handle enums? */ |
4419 | if ((TREE_CODE (lhs_type) == INTEGER_TYPE || POINTER_TYPE_P (lhs_type)) |
4420 | && TREE_CODE (rhs_type) == INTEGER_TYPE) |
4421 | { |
4422 | built_in_function fncode; |
4423 | switch (modifycode) |
4424 | { |
4425 | case PLUS_EXPR: |
4426 | case POINTER_PLUS_EXPR: |
4427 | fncode = (return_old_p |
4428 | ? BUILT_IN_ATOMIC_FETCH_ADD_N |
4429 | : BUILT_IN_ATOMIC_ADD_FETCH_N); |
4430 | break; |
4431 | case MINUS_EXPR: |
4432 | fncode = (return_old_p |
4433 | ? BUILT_IN_ATOMIC_FETCH_SUB_N |
4434 | : BUILT_IN_ATOMIC_SUB_FETCH_N); |
4435 | break; |
4436 | case BIT_AND_EXPR: |
4437 | fncode = (return_old_p |
4438 | ? BUILT_IN_ATOMIC_FETCH_AND_N |
4439 | : BUILT_IN_ATOMIC_AND_FETCH_N); |
4440 | break; |
4441 | case BIT_IOR_EXPR: |
4442 | fncode = (return_old_p |
4443 | ? BUILT_IN_ATOMIC_FETCH_OR_N |
4444 | : BUILT_IN_ATOMIC_OR_FETCH_N); |
4445 | break; |
4446 | case BIT_XOR_EXPR: |
4447 | fncode = (return_old_p |
4448 | ? BUILT_IN_ATOMIC_FETCH_XOR_N |
4449 | : BUILT_IN_ATOMIC_XOR_FETCH_N); |
4450 | break; |
4451 | default: |
4452 | goto cas_loop; |
4453 | } |
4454 | |
4455 | /* We can only use "_1" through "_16" variants of the atomic fetch |
4456 | built-ins. */ |
4457 | unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (lhs_type)); |
4458 | if (size != 1 && size != 2 && size != 4 && size != 8 && size != 16) |
4459 | goto cas_loop; |
4460 | |
4461 | /* If this is a pointer type, we need to multiply by the size of |
4462 | the pointer target type. */ |
4463 | if (POINTER_TYPE_P (lhs_type)) |
4464 | { |
4465 | if (!COMPLETE_TYPE_P (TREE_TYPE (lhs_type)) |
4466 | /* ??? This would introduce -Wdiscarded-qualifiers |
4467 | warning: __atomic_fetch_* expect volatile void * |
4468 | type as the first argument. (Assignments between |
4469 | atomic and non-atomic objects are OK.) */ |
4470 | || TYPE_RESTRICT (lhs_type)) |
4471 | goto cas_loop; |
4472 | tree sz = TYPE_SIZE_UNIT (TREE_TYPE (lhs_type)); |
4473 | rhs = fold_build2_loc (loc, MULT_EXPR, ptrdiff_type_node, |
4474 | convert (ptrdiff_type_node, rhs), |
4475 | convert (ptrdiff_type_node, sz)); |
4476 | } |
4477 | |
4478 | /* Build __atomic_fetch_* (&lhs, &val, SEQ_CST), or |
4479 | __atomic_*_fetch (&lhs, &val, SEQ_CST). */ |
4480 | fndecl = builtin_decl_explicit (fncode); |
4481 | params->quick_push (obj: lhs_addr); |
4482 | params->quick_push (obj: rhs); |
4483 | params->quick_push (obj: seq_cst); |
4484 | func_call = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
4485 | |
4486 | newval = create_tmp_var_raw (nonatomic_lhs_type); |
4487 | TREE_ADDRESSABLE (newval) = 1; |
4488 | suppress_warning (newval); |
4489 | rhs = build4 (TARGET_EXPR, nonatomic_lhs_type, newval, func_call, |
4490 | NULL_TREE, NULL_TREE); |
4491 | SET_EXPR_LOCATION (rhs, loc); |
4492 | add_stmt (rhs); |
4493 | |
4494 | /* Finish the compound statement. */ |
4495 | compound_stmt = c_end_compound_stmt (loc, compound_stmt, false); |
4496 | |
4497 | /* NEWVAL is the value which was stored, return a COMPOUND_STMT of |
4498 | the statement and that value. */ |
4499 | return build2 (COMPOUND_EXPR, nonatomic_lhs_type, compound_stmt, newval); |
4500 | } |
4501 | |
4502 | cas_loop: |
4503 | /* Create the variables and labels required for the op= form. */ |
4504 | old = create_tmp_var_raw (nonatomic_lhs_type); |
4505 | old_addr = build_unary_op (loc, ADDR_EXPR, old, false); |
4506 | TREE_ADDRESSABLE (old) = 1; |
4507 | suppress_warning (old); |
4508 | |
4509 | newval = create_tmp_var_raw (nonatomic_lhs_type); |
4510 | newval_addr = build_unary_op (loc, ADDR_EXPR, newval, false); |
4511 | TREE_ADDRESSABLE (newval) = 1; |
4512 | suppress_warning (newval); |
4513 | |
4514 | loop_decl = create_artificial_label (loc); |
4515 | loop_label = build1 (LABEL_EXPR, void_type_node, loop_decl); |
4516 | |
4517 | done_decl = create_artificial_label (loc); |
4518 | done_label = build1 (LABEL_EXPR, void_type_node, done_decl); |
4519 | |
4520 | /* __atomic_load (addr, &old, SEQ_CST). */ |
4521 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_LOAD); |
4522 | params->quick_push (obj: lhs_addr); |
4523 | params->quick_push (obj: old_addr); |
4524 | params->quick_push (obj: seq_cst); |
4525 | func_call = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
4526 | old = build4 (TARGET_EXPR, nonatomic_lhs_type, old, func_call, NULL_TREE, |
4527 | NULL_TREE); |
4528 | add_stmt (old); |
4529 | params->truncate (size: 0); |
4530 | |
4531 | /* Create the expressions for floating-point environment |
4532 | manipulation, if required. */ |
4533 | bool need_fenv = (flag_trapping_math |
4534 | && (FLOAT_TYPE_P (lhs_type) || FLOAT_TYPE_P (rhs_type))); |
4535 | tree hold_call = NULL_TREE, clear_call = NULL_TREE, update_call = NULL_TREE; |
4536 | if (need_fenv) |
4537 | targetm.atomic_assign_expand_fenv (&hold_call, &clear_call, &update_call); |
4538 | |
4539 | if (hold_call) |
4540 | add_stmt (hold_call); |
4541 | |
4542 | /* loop: */ |
4543 | add_stmt (loop_label); |
4544 | |
4545 | /* newval = old + val; */ |
4546 | if (rhs_type != rhs_semantic_type) |
4547 | val = build1 (EXCESS_PRECISION_EXPR, nonatomic_rhs_semantic_type, val); |
4548 | rhs = build_binary_op (loc, modifycode, old, val, true); |
4549 | if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR) |
4550 | { |
4551 | tree eptype = TREE_TYPE (rhs); |
4552 | rhs = c_fully_fold (TREE_OPERAND (rhs, 0), false, NULL); |
4553 | rhs = build1 (EXCESS_PRECISION_EXPR, eptype, rhs); |
4554 | } |
4555 | else |
4556 | rhs = c_fully_fold (rhs, false, NULL); |
4557 | rhs = convert_for_assignment (loc, UNKNOWN_LOCATION, nonatomic_lhs_type, |
4558 | rhs, NULL_TREE, ic_assign, false, NULL_TREE, |
4559 | NULL_TREE, 0); |
4560 | if (rhs != error_mark_node) |
4561 | { |
4562 | rhs = build4 (TARGET_EXPR, nonatomic_lhs_type, newval, rhs, NULL_TREE, |
4563 | NULL_TREE); |
4564 | SET_EXPR_LOCATION (rhs, loc); |
4565 | add_stmt (rhs); |
4566 | } |
4567 | |
4568 | /* if (__atomic_compare_exchange (addr, &old, &new, false, SEQ_CST, SEQ_CST)) |
4569 | goto done; */ |
4570 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_COMPARE_EXCHANGE); |
4571 | params->quick_push (obj: lhs_addr); |
4572 | params->quick_push (obj: old_addr); |
4573 | params->quick_push (obj: newval_addr); |
4574 | params->quick_push (integer_zero_node); |
4575 | params->quick_push (obj: seq_cst); |
4576 | params->quick_push (obj: seq_cst); |
4577 | func_call = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
4578 | |
4579 | goto_stmt = build1 (GOTO_EXPR, void_type_node, done_decl); |
4580 | SET_EXPR_LOCATION (goto_stmt, loc); |
4581 | |
4582 | stmt = build3 (COND_EXPR, void_type_node, func_call, goto_stmt, NULL_TREE); |
4583 | SET_EXPR_LOCATION (stmt, loc); |
4584 | add_stmt (stmt); |
4585 | |
4586 | if (clear_call) |
4587 | add_stmt (clear_call); |
4588 | |
4589 | /* goto loop; */ |
4590 | goto_stmt = build1 (GOTO_EXPR, void_type_node, loop_decl); |
4591 | SET_EXPR_LOCATION (goto_stmt, loc); |
4592 | add_stmt (goto_stmt); |
4593 | |
4594 | /* done: */ |
4595 | add_stmt (done_label); |
4596 | |
4597 | if (update_call) |
4598 | add_stmt (update_call); |
4599 | |
4600 | /* Finish the compound statement. */ |
4601 | compound_stmt = c_end_compound_stmt (loc, compound_stmt, false); |
4602 | |
4603 | /* NEWVAL is the value that was successfully stored, return a |
4604 | COMPOUND_EXPR of the statement and the appropriate value. */ |
4605 | return build2 (COMPOUND_EXPR, nonatomic_lhs_type, compound_stmt, |
4606 | return_old_p ? old : newval); |
4607 | } |
4608 | |
4609 | /* Construct and perhaps optimize a tree representation |
4610 | for a unary operation. CODE, a tree_code, specifies the operation |
4611 | and XARG is the operand. |
4612 | For any CODE other than ADDR_EXPR, NOCONVERT suppresses the default |
4613 | promotions (such as from short to int). |
4614 | For ADDR_EXPR, the default promotions are not applied; NOCONVERT allows |
4615 | non-lvalues; this is only used to handle conversion of non-lvalue arrays |
4616 | to pointers in C99. |
4617 | |
4618 | LOCATION is the location of the operator. */ |
4619 | |
4620 | tree |
4621 | build_unary_op (location_t location, enum tree_code code, tree xarg, |
4622 | bool noconvert) |
4623 | { |
4624 | /* No default_conversion here. It causes trouble for ADDR_EXPR. */ |
4625 | tree arg = xarg; |
4626 | tree argtype = NULL_TREE; |
4627 | enum tree_code typecode; |
4628 | tree val; |
4629 | tree ret = error_mark_node; |
4630 | tree eptype = NULL_TREE; |
4631 | const char *invalid_op_diag; |
4632 | bool int_operands; |
4633 | |
4634 | int_operands = EXPR_INT_CONST_OPERANDS (xarg); |
4635 | if (int_operands) |
4636 | arg = remove_c_maybe_const_expr (expr: arg); |
4637 | |
4638 | if (code != ADDR_EXPR) |
4639 | arg = require_complete_type (loc: location, value: arg); |
4640 | |
4641 | typecode = TREE_CODE (TREE_TYPE (arg)); |
4642 | if (typecode == ERROR_MARK) |
4643 | return error_mark_node; |
4644 | if (typecode == ENUMERAL_TYPE || typecode == BOOLEAN_TYPE) |
4645 | typecode = INTEGER_TYPE; |
4646 | |
4647 | if ((invalid_op_diag |
4648 | = targetm.invalid_unary_op (code, TREE_TYPE (xarg)))) |
4649 | { |
4650 | error_at (location, invalid_op_diag); |
4651 | return error_mark_node; |
4652 | } |
4653 | |
4654 | if (TREE_CODE (arg) == EXCESS_PRECISION_EXPR) |
4655 | { |
4656 | eptype = TREE_TYPE (arg); |
4657 | arg = TREE_OPERAND (arg, 0); |
4658 | } |
4659 | |
4660 | switch (code) |
4661 | { |
4662 | case CONVERT_EXPR: |
4663 | /* This is used for unary plus, because a CONVERT_EXPR |
4664 | is enough to prevent anybody from looking inside for |
4665 | associativity, but won't generate any code. */ |
4666 | if (!(typecode == INTEGER_TYPE || typecode == REAL_TYPE |
4667 | || typecode == FIXED_POINT_TYPE || typecode == COMPLEX_TYPE |
4668 | || typecode == BITINT_TYPE |
4669 | || gnu_vector_type_p (TREE_TYPE (arg)))) |
4670 | { |
4671 | error_at (location, "wrong type argument to unary plus" ); |
4672 | return error_mark_node; |
4673 | } |
4674 | else if (!noconvert) |
4675 | arg = default_conversion (exp: arg); |
4676 | arg = non_lvalue_loc (location, arg); |
4677 | break; |
4678 | |
4679 | case NEGATE_EXPR: |
4680 | if (!(typecode == INTEGER_TYPE || typecode == REAL_TYPE |
4681 | || typecode == FIXED_POINT_TYPE || typecode == COMPLEX_TYPE |
4682 | || typecode == BITINT_TYPE |
4683 | || gnu_vector_type_p (TREE_TYPE (arg)))) |
4684 | { |
4685 | error_at (location, "wrong type argument to unary minus" ); |
4686 | return error_mark_node; |
4687 | } |
4688 | else if (!noconvert) |
4689 | arg = default_conversion (exp: arg); |
4690 | break; |
4691 | |
4692 | case BIT_NOT_EXPR: |
4693 | /* ~ works on integer types and non float vectors. */ |
4694 | if (typecode == INTEGER_TYPE |
4695 | || typecode == BITINT_TYPE |
4696 | || (gnu_vector_type_p (TREE_TYPE (arg)) |
4697 | && !VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg)))) |
4698 | { |
4699 | tree e = arg; |
4700 | |
4701 | /* Warn if the expression has boolean value. */ |
4702 | while (TREE_CODE (e) == COMPOUND_EXPR) |
4703 | e = TREE_OPERAND (e, 1); |
4704 | |
4705 | if ((C_BOOLEAN_TYPE_P (TREE_TYPE (arg)) |
4706 | || truth_value_p (TREE_CODE (e)))) |
4707 | { |
4708 | auto_diagnostic_group d; |
4709 | if (warning_at (location, OPT_Wbool_operation, |
4710 | "%<~%> on a boolean expression" )) |
4711 | { |
4712 | gcc_rich_location richloc (location); |
4713 | richloc.add_fixit_insert_before (where: location, new_content: "!" ); |
4714 | inform (&richloc, "did you mean to use logical not?" ); |
4715 | } |
4716 | } |
4717 | if (!noconvert) |
4718 | arg = default_conversion (exp: arg); |
4719 | } |
4720 | else if (typecode == COMPLEX_TYPE) |
4721 | { |
4722 | code = CONJ_EXPR; |
4723 | pedwarn (location, OPT_Wpedantic, |
4724 | "ISO C does not support %<~%> for complex conjugation" ); |
4725 | if (!noconvert) |
4726 | arg = default_conversion (exp: arg); |
4727 | } |
4728 | else |
4729 | { |
4730 | error_at (location, "wrong type argument to bit-complement" ); |
4731 | return error_mark_node; |
4732 | } |
4733 | break; |
4734 | |
4735 | case ABS_EXPR: |
4736 | if (!(typecode == INTEGER_TYPE || typecode == REAL_TYPE)) |
4737 | { |
4738 | error_at (location, "wrong type argument to abs" ); |
4739 | return error_mark_node; |
4740 | } |
4741 | else if (!noconvert) |
4742 | arg = default_conversion (exp: arg); |
4743 | break; |
4744 | |
4745 | case ABSU_EXPR: |
4746 | if (!(typecode == INTEGER_TYPE)) |
4747 | { |
4748 | error_at (location, "wrong type argument to absu" ); |
4749 | return error_mark_node; |
4750 | } |
4751 | else if (!noconvert) |
4752 | arg = default_conversion (exp: arg); |
4753 | break; |
4754 | |
4755 | case CONJ_EXPR: |
4756 | /* Conjugating a real value is a no-op, but allow it anyway. */ |
4757 | if (!(typecode == INTEGER_TYPE || typecode == REAL_TYPE |
4758 | || typecode == COMPLEX_TYPE)) |
4759 | { |
4760 | error_at (location, "wrong type argument to conjugation" ); |
4761 | return error_mark_node; |
4762 | } |
4763 | else if (!noconvert) |
4764 | arg = default_conversion (exp: arg); |
4765 | break; |
4766 | |
4767 | case TRUTH_NOT_EXPR: |
4768 | if (typecode != INTEGER_TYPE && typecode != FIXED_POINT_TYPE |
4769 | && typecode != REAL_TYPE && typecode != POINTER_TYPE |
4770 | && typecode != COMPLEX_TYPE && typecode != NULLPTR_TYPE |
4771 | && typecode != BITINT_TYPE) |
4772 | { |
4773 | error_at (location, |
4774 | "wrong type argument to unary exclamation mark" ); |
4775 | return error_mark_node; |
4776 | } |
4777 | if (int_operands) |
4778 | { |
4779 | arg = c_objc_common_truthvalue_conversion (location, xarg); |
4780 | arg = remove_c_maybe_const_expr (expr: arg); |
4781 | } |
4782 | else |
4783 | arg = c_objc_common_truthvalue_conversion (location, arg); |
4784 | ret = invert_truthvalue_loc (location, arg); |
4785 | /* If the TRUTH_NOT_EXPR has been folded, reset the location. */ |
4786 | if (EXPR_P (ret) && EXPR_HAS_LOCATION (ret)) |
4787 | location = EXPR_LOCATION (ret); |
4788 | goto return_build_unary_op; |
4789 | |
4790 | case REALPART_EXPR: |
4791 | case IMAGPART_EXPR: |
4792 | ret = build_real_imag_expr (location, code, arg); |
4793 | if (ret == error_mark_node) |
4794 | return error_mark_node; |
4795 | if (eptype && TREE_CODE (eptype) == COMPLEX_TYPE) |
4796 | eptype = TREE_TYPE (eptype); |
4797 | goto return_build_unary_op; |
4798 | |
4799 | case PREINCREMENT_EXPR: |
4800 | case POSTINCREMENT_EXPR: |
4801 | case PREDECREMENT_EXPR: |
4802 | case POSTDECREMENT_EXPR: |
4803 | |
4804 | if (TREE_CODE (arg) == C_MAYBE_CONST_EXPR) |
4805 | { |
4806 | tree inner = build_unary_op (location, code, |
4807 | C_MAYBE_CONST_EXPR_EXPR (arg), |
4808 | noconvert); |
4809 | if (inner == error_mark_node) |
4810 | return error_mark_node; |
4811 | ret = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (inner), |
4812 | C_MAYBE_CONST_EXPR_PRE (arg), inner); |
4813 | gcc_assert (!C_MAYBE_CONST_EXPR_INT_OPERANDS (arg)); |
4814 | C_MAYBE_CONST_EXPR_NON_CONST (ret) = 1; |
4815 | goto return_build_unary_op; |
4816 | } |
4817 | |
4818 | /* Complain about anything that is not a true lvalue. In |
4819 | Objective-C, skip this check for property_refs. */ |
4820 | if (!objc_is_property_ref (arg) |
4821 | && !lvalue_or_else (location, |
4822 | arg, ((code == PREINCREMENT_EXPR |
4823 | || code == POSTINCREMENT_EXPR) |
4824 | ? lv_increment |
4825 | : lv_decrement))) |
4826 | return error_mark_node; |
4827 | |
4828 | if (warn_cxx_compat && TREE_CODE (TREE_TYPE (arg)) == ENUMERAL_TYPE) |
4829 | { |
4830 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
4831 | warning_at (location, OPT_Wc___compat, |
4832 | "increment of enumeration value is invalid in C++" ); |
4833 | else |
4834 | warning_at (location, OPT_Wc___compat, |
4835 | "decrement of enumeration value is invalid in C++" ); |
4836 | } |
4837 | |
4838 | if (C_BOOLEAN_TYPE_P (TREE_TYPE (arg))) |
4839 | { |
4840 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
4841 | warning_at (location, OPT_Wbool_operation, |
4842 | "increment of a boolean expression" ); |
4843 | else |
4844 | warning_at (location, OPT_Wbool_operation, |
4845 | "decrement of a boolean expression" ); |
4846 | } |
4847 | |
4848 | /* Ensure the argument is fully folded inside any SAVE_EXPR. */ |
4849 | arg = c_fully_fold (arg, false, NULL, true); |
4850 | |
4851 | bool atomic_op; |
4852 | atomic_op = really_atomic_lvalue (expr: arg); |
4853 | |
4854 | /* Increment or decrement the real part of the value, |
4855 | and don't change the imaginary part. */ |
4856 | if (typecode == COMPLEX_TYPE) |
4857 | { |
4858 | tree real, imag; |
4859 | |
4860 | pedwarn (location, OPT_Wpedantic, |
4861 | "ISO C does not support %<++%> and %<--%> on complex types" ); |
4862 | |
4863 | if (!atomic_op) |
4864 | { |
4865 | arg = stabilize_reference (arg); |
4866 | real = build_unary_op (EXPR_LOCATION (arg), code: REALPART_EXPR, xarg: arg, |
4867 | noconvert: true); |
4868 | imag = build_unary_op (EXPR_LOCATION (arg), code: IMAGPART_EXPR, xarg: arg, |
4869 | noconvert: true); |
4870 | real = build_unary_op (EXPR_LOCATION (arg), code, xarg: real, noconvert: true); |
4871 | if (real == error_mark_node || imag == error_mark_node) |
4872 | return error_mark_node; |
4873 | ret = build2 (COMPLEX_EXPR, TREE_TYPE (arg), |
4874 | real, imag); |
4875 | goto return_build_unary_op; |
4876 | } |
4877 | } |
4878 | |
4879 | /* Report invalid types. */ |
4880 | |
4881 | if (typecode != POINTER_TYPE && typecode != FIXED_POINT_TYPE |
4882 | && typecode != INTEGER_TYPE && typecode != REAL_TYPE |
4883 | && typecode != COMPLEX_TYPE && typecode != BITINT_TYPE |
4884 | && !gnu_vector_type_p (TREE_TYPE (arg))) |
4885 | { |
4886 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
4887 | error_at (location, "wrong type argument to increment" ); |
4888 | else |
4889 | error_at (location, "wrong type argument to decrement" ); |
4890 | |
4891 | return error_mark_node; |
4892 | } |
4893 | |
4894 | { |
4895 | tree inc; |
4896 | |
4897 | argtype = TREE_TYPE (arg); |
4898 | |
4899 | /* Compute the increment. */ |
4900 | |
4901 | if (typecode == POINTER_TYPE) |
4902 | { |
4903 | /* If pointer target is an incomplete type, |
4904 | we just cannot know how to do the arithmetic. */ |
4905 | if (!COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (argtype))) |
4906 | { |
4907 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
4908 | error_at (location, |
4909 | "increment of pointer to an incomplete type %qT" , |
4910 | TREE_TYPE (argtype)); |
4911 | else |
4912 | error_at (location, |
4913 | "decrement of pointer to an incomplete type %qT" , |
4914 | TREE_TYPE (argtype)); |
4915 | } |
4916 | else if (TREE_CODE (TREE_TYPE (argtype)) == FUNCTION_TYPE |
4917 | || VOID_TYPE_P (TREE_TYPE (argtype))) |
4918 | { |
4919 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
4920 | pedwarn (location, OPT_Wpointer_arith, |
4921 | "wrong type argument to increment" ); |
4922 | else |
4923 | pedwarn (location, OPT_Wpointer_arith, |
4924 | "wrong type argument to decrement" ); |
4925 | } |
4926 | else |
4927 | verify_type_context (location, TCTX_POINTER_ARITH, |
4928 | TREE_TYPE (argtype)); |
4929 | |
4930 | inc = c_size_in_bytes (TREE_TYPE (argtype)); |
4931 | inc = convert_to_ptrofftype_loc (loc: location, off: inc); |
4932 | } |
4933 | else if (FRACT_MODE_P (TYPE_MODE (argtype))) |
4934 | { |
4935 | /* For signed fract types, we invert ++ to -- or |
4936 | -- to ++, and change inc from 1 to -1, because |
4937 | it is not possible to represent 1 in signed fract constants. |
4938 | For unsigned fract types, the result always overflows and |
4939 | we get an undefined (original) or the maximum value. */ |
4940 | if (code == PREINCREMENT_EXPR) |
4941 | code = PREDECREMENT_EXPR; |
4942 | else if (code == PREDECREMENT_EXPR) |
4943 | code = PREINCREMENT_EXPR; |
4944 | else if (code == POSTINCREMENT_EXPR) |
4945 | code = POSTDECREMENT_EXPR; |
4946 | else /* code == POSTDECREMENT_EXPR */ |
4947 | code = POSTINCREMENT_EXPR; |
4948 | |
4949 | inc = integer_minus_one_node; |
4950 | inc = convert (argtype, inc); |
4951 | } |
4952 | else |
4953 | { |
4954 | inc = VECTOR_TYPE_P (argtype) |
4955 | ? build_one_cst (argtype) |
4956 | : integer_one_node; |
4957 | inc = convert (argtype, inc); |
4958 | } |
4959 | |
4960 | /* If 'arg' is an Objective-C PROPERTY_REF expression, then we |
4961 | need to ask Objective-C to build the increment or decrement |
4962 | expression for it. */ |
4963 | if (objc_is_property_ref (arg)) |
4964 | return objc_build_incr_expr_for_property_ref (location, code, |
4965 | arg, inc); |
4966 | |
4967 | /* Report a read-only lvalue. */ |
4968 | if (TYPE_READONLY (argtype)) |
4969 | { |
4970 | readonly_error (location, arg, |
4971 | ((code == PREINCREMENT_EXPR |
4972 | || code == POSTINCREMENT_EXPR) |
4973 | ? lv_increment : lv_decrement)); |
4974 | return error_mark_node; |
4975 | } |
4976 | else if (TREE_READONLY (arg)) |
4977 | readonly_warning (arg, |
4978 | ((code == PREINCREMENT_EXPR |
4979 | || code == POSTINCREMENT_EXPR) |
4980 | ? lv_increment : lv_decrement)); |
4981 | |
4982 | /* If the argument is atomic, use the special code sequences for |
4983 | atomic compound assignment. */ |
4984 | if (atomic_op) |
4985 | { |
4986 | arg = stabilize_reference (arg); |
4987 | ret = build_atomic_assign (loc: location, lhs: arg, |
4988 | modifycode: ((code == PREINCREMENT_EXPR |
4989 | || code == POSTINCREMENT_EXPR) |
4990 | ? PLUS_EXPR |
4991 | : MINUS_EXPR), |
4992 | rhs: (FRACT_MODE_P (TYPE_MODE (argtype)) |
4993 | ? inc |
4994 | : integer_one_node), |
4995 | return_old_p: (code == POSTINCREMENT_EXPR |
4996 | || code == POSTDECREMENT_EXPR)); |
4997 | goto return_build_unary_op; |
4998 | } |
4999 | |
5000 | if (C_BOOLEAN_TYPE_P (TREE_TYPE (arg))) |
5001 | val = boolean_increment (code, arg); |
5002 | else |
5003 | val = build2 (code, TREE_TYPE (arg), arg, inc); |
5004 | TREE_SIDE_EFFECTS (val) = 1; |
5005 | if (TYPE_QUALS (TREE_TYPE (val)) != TYPE_UNQUALIFIED) |
5006 | TREE_TYPE (val) = c_build_qualified_type (TREE_TYPE (val), |
5007 | TYPE_UNQUALIFIED); |
5008 | ret = val; |
5009 | goto return_build_unary_op; |
5010 | } |
5011 | |
5012 | case ADDR_EXPR: |
5013 | /* Note that this operation never does default_conversion. */ |
5014 | |
5015 | /* The operand of unary '&' must be an lvalue (which excludes |
5016 | expressions of type void), or, in C99, the result of a [] or |
5017 | unary '*' operator. */ |
5018 | if (VOID_TYPE_P (TREE_TYPE (arg)) |
5019 | && TYPE_QUALS (TREE_TYPE (arg)) == TYPE_UNQUALIFIED |
5020 | && (!INDIRECT_REF_P (arg) || !flag_isoc99)) |
5021 | pedwarn (location, 0, "taking address of expression of type %<void%>" ); |
5022 | |
5023 | /* Let &* cancel out to simplify resulting code. */ |
5024 | if (INDIRECT_REF_P (arg)) |
5025 | { |
5026 | /* Don't let this be an lvalue. */ |
5027 | if (lvalue_p (TREE_OPERAND (arg, 0))) |
5028 | return non_lvalue_loc (location, TREE_OPERAND (arg, 0)); |
5029 | ret = TREE_OPERAND (arg, 0); |
5030 | goto return_build_unary_op; |
5031 | } |
5032 | |
5033 | /* Anything not already handled and not a true memory reference |
5034 | or a non-lvalue array is an error. */ |
5035 | if (typecode != FUNCTION_TYPE && !noconvert |
5036 | && !lvalue_or_else (location, arg, lv_addressof)) |
5037 | return error_mark_node; |
5038 | |
5039 | /* Move address operations inside C_MAYBE_CONST_EXPR to simplify |
5040 | folding later. */ |
5041 | if (TREE_CODE (arg) == C_MAYBE_CONST_EXPR) |
5042 | { |
5043 | tree inner = build_unary_op (location, code, |
5044 | C_MAYBE_CONST_EXPR_EXPR (arg), |
5045 | noconvert); |
5046 | ret = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (inner), |
5047 | C_MAYBE_CONST_EXPR_PRE (arg), inner); |
5048 | gcc_assert (!C_MAYBE_CONST_EXPR_INT_OPERANDS (arg)); |
5049 | C_MAYBE_CONST_EXPR_NON_CONST (ret) |
5050 | = C_MAYBE_CONST_EXPR_NON_CONST (arg); |
5051 | goto return_build_unary_op; |
5052 | } |
5053 | |
5054 | /* Ordinary case; arg is a COMPONENT_REF or a decl. */ |
5055 | argtype = TREE_TYPE (arg); |
5056 | |
5057 | /* If the lvalue is const or volatile, merge that into the type |
5058 | to which the address will point. This is only needed |
5059 | for function types. */ |
5060 | if ((DECL_P (arg) || REFERENCE_CLASS_P (arg)) |
5061 | && (TREE_READONLY (arg) || TREE_THIS_VOLATILE (arg)) |
5062 | && TREE_CODE (argtype) == FUNCTION_TYPE) |
5063 | { |
5064 | int orig_quals = TYPE_QUALS (strip_array_types (argtype)); |
5065 | int quals = orig_quals; |
5066 | |
5067 | if (TREE_READONLY (arg)) |
5068 | quals |= TYPE_QUAL_CONST; |
5069 | if (TREE_THIS_VOLATILE (arg)) |
5070 | quals |= TYPE_QUAL_VOLATILE; |
5071 | |
5072 | argtype = c_build_qualified_type (argtype, quals); |
5073 | } |
5074 | |
5075 | switch (TREE_CODE (arg)) |
5076 | { |
5077 | case COMPONENT_REF: |
5078 | if (DECL_C_BIT_FIELD (TREE_OPERAND (arg, 1))) |
5079 | { |
5080 | error_at (location, "cannot take address of bit-field %qD" , |
5081 | TREE_OPERAND (arg, 1)); |
5082 | return error_mark_node; |
5083 | } |
5084 | |
5085 | /* fall through */ |
5086 | |
5087 | case ARRAY_REF: |
5088 | if (TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_OPERAND (arg, 0)))) |
5089 | { |
5090 | if (!AGGREGATE_TYPE_P (TREE_TYPE (arg)) |
5091 | && !POINTER_TYPE_P (TREE_TYPE (arg)) |
5092 | && !VECTOR_TYPE_P (TREE_TYPE (arg))) |
5093 | { |
5094 | error_at (location, "cannot take address of scalar with " |
5095 | "reverse storage order" ); |
5096 | return error_mark_node; |
5097 | } |
5098 | |
5099 | if (TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE |
5100 | && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (arg))) |
5101 | warning_at (location, OPT_Wscalar_storage_order, |
5102 | "address of array with reverse scalar storage " |
5103 | "order requested" ); |
5104 | } |
5105 | |
5106 | default: |
5107 | break; |
5108 | } |
5109 | |
5110 | if (!c_mark_addressable (arg)) |
5111 | return error_mark_node; |
5112 | |
5113 | gcc_assert (TREE_CODE (arg) != COMPONENT_REF |
5114 | || !DECL_C_BIT_FIELD (TREE_OPERAND (arg, 1))); |
5115 | |
5116 | argtype = build_pointer_type (argtype); |
5117 | |
5118 | /* ??? Cope with user tricks that amount to offsetof. Delete this |
5119 | when we have proper support for integer constant expressions. */ |
5120 | val = get_base_address (t: arg); |
5121 | if (val && INDIRECT_REF_P (val) |
5122 | && TREE_CONSTANT (TREE_OPERAND (val, 0))) |
5123 | { |
5124 | ret = fold_offsetof (arg, argtype); |
5125 | goto return_build_unary_op; |
5126 | } |
5127 | |
5128 | val = build1 (ADDR_EXPR, argtype, arg); |
5129 | |
5130 | ret = val; |
5131 | goto return_build_unary_op; |
5132 | |
5133 | case PAREN_EXPR: |
5134 | ret = build1 (code, TREE_TYPE (arg), arg); |
5135 | goto return_build_unary_op; |
5136 | |
5137 | default: |
5138 | gcc_unreachable (); |
5139 | } |
5140 | |
5141 | if (argtype == NULL_TREE) |
5142 | argtype = TREE_TYPE (arg); |
5143 | if (TREE_CODE (arg) == INTEGER_CST) |
5144 | ret = (require_constant_value |
5145 | ? fold_build1_initializer_loc (location, code, argtype, arg) |
5146 | : fold_build1_loc (location, code, argtype, arg)); |
5147 | else |
5148 | ret = build1 (code, argtype, arg); |
5149 | return_build_unary_op: |
5150 | gcc_assert (ret != error_mark_node); |
5151 | if (TREE_CODE (ret) == INTEGER_CST && !TREE_OVERFLOW (ret) |
5152 | && !(TREE_CODE (xarg) == INTEGER_CST && !TREE_OVERFLOW (xarg))) |
5153 | ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret); |
5154 | else if (TREE_CODE (ret) != INTEGER_CST && int_operands) |
5155 | ret = note_integer_operands (expr: ret); |
5156 | if (eptype) |
5157 | ret = build1 (EXCESS_PRECISION_EXPR, eptype, ret); |
5158 | protected_set_expr_location (ret, location); |
5159 | return ret; |
5160 | } |
5161 | |
5162 | /* Return nonzero if REF is an lvalue valid for this language. |
5163 | Lvalues can be assigned, unless their type has TYPE_READONLY. |
5164 | Lvalues can have their address taken, unless they have C_DECL_REGISTER. */ |
5165 | |
5166 | bool |
5167 | lvalue_p (const_tree ref) |
5168 | { |
5169 | const enum tree_code code = TREE_CODE (ref); |
5170 | |
5171 | switch (code) |
5172 | { |
5173 | case REALPART_EXPR: |
5174 | case IMAGPART_EXPR: |
5175 | case COMPONENT_REF: |
5176 | return lvalue_p (TREE_OPERAND (ref, 0)); |
5177 | |
5178 | case C_MAYBE_CONST_EXPR: |
5179 | return lvalue_p (TREE_OPERAND (ref, 1)); |
5180 | |
5181 | case COMPOUND_LITERAL_EXPR: |
5182 | case STRING_CST: |
5183 | return true; |
5184 | |
5185 | case MEM_REF: |
5186 | case TARGET_MEM_REF: |
5187 | /* MEM_REFs can appear from -fgimple parsing or folding, so allow them |
5188 | here as well. */ |
5189 | case INDIRECT_REF: |
5190 | case ARRAY_REF: |
5191 | case VAR_DECL: |
5192 | case PARM_DECL: |
5193 | case RESULT_DECL: |
5194 | case ERROR_MARK: |
5195 | return (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE |
5196 | && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE); |
5197 | |
5198 | case BIND_EXPR: |
5199 | return TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE; |
5200 | |
5201 | default: |
5202 | return false; |
5203 | } |
5204 | } |
5205 | |
5206 | /* Give a warning for storing in something that is read-only in GCC |
5207 | terms but not const in ISO C terms. */ |
5208 | |
5209 | static void |
5210 | readonly_warning (tree arg, enum lvalue_use use) |
5211 | { |
5212 | switch (use) |
5213 | { |
5214 | case lv_assign: |
5215 | warning (0, "assignment of read-only location %qE" , arg); |
5216 | break; |
5217 | case lv_increment: |
5218 | warning (0, "increment of read-only location %qE" , arg); |
5219 | break; |
5220 | case lv_decrement: |
5221 | warning (0, "decrement of read-only location %qE" , arg); |
5222 | break; |
5223 | default: |
5224 | gcc_unreachable (); |
5225 | } |
5226 | return; |
5227 | } |
5228 | |
5229 | |
5230 | /* Return nonzero if REF is an lvalue valid for this language; |
5231 | otherwise, print an error message and return zero. USE says |
5232 | how the lvalue is being used and so selects the error message. |
5233 | LOCATION is the location at which any error should be reported. */ |
5234 | |
5235 | static int |
5236 | lvalue_or_else (location_t loc, const_tree ref, enum lvalue_use use) |
5237 | { |
5238 | int win = lvalue_p (ref); |
5239 | |
5240 | if (!win) |
5241 | lvalue_error (loc, use); |
5242 | |
5243 | return win; |
5244 | } |
5245 | |
5246 | /* Mark EXP saying that we need to be able to take the |
5247 | address of it; it should not be allocated in a register. |
5248 | Returns true if successful. ARRAY_REF_P is true if this |
5249 | is for ARRAY_REF construction - in that case we don't want |
5250 | to look through VIEW_CONVERT_EXPR from VECTOR_TYPE to ARRAY_TYPE, |
5251 | it is fine to use ARRAY_REFs for vector subscripts on vector |
5252 | register variables. */ |
5253 | |
5254 | bool |
5255 | c_mark_addressable (tree exp, bool array_ref_p) |
5256 | { |
5257 | tree x = exp; |
5258 | |
5259 | while (1) |
5260 | switch (TREE_CODE (x)) |
5261 | { |
5262 | case VIEW_CONVERT_EXPR: |
5263 | if (array_ref_p |
5264 | && TREE_CODE (TREE_TYPE (x)) == ARRAY_TYPE |
5265 | && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0)))) |
5266 | return true; |
5267 | x = TREE_OPERAND (x, 0); |
5268 | break; |
5269 | |
5270 | case COMPONENT_REF: |
5271 | if (DECL_C_BIT_FIELD (TREE_OPERAND (x, 1))) |
5272 | { |
5273 | error ("cannot take address of bit-field %qD" , |
5274 | TREE_OPERAND (x, 1)); |
5275 | return false; |
5276 | } |
5277 | /* FALLTHRU */ |
5278 | case ADDR_EXPR: |
5279 | case ARRAY_REF: |
5280 | case REALPART_EXPR: |
5281 | case IMAGPART_EXPR: |
5282 | x = TREE_OPERAND (x, 0); |
5283 | break; |
5284 | |
5285 | case COMPOUND_LITERAL_EXPR: |
5286 | if (C_DECL_REGISTER (COMPOUND_LITERAL_EXPR_DECL (x))) |
5287 | { |
5288 | error ("address of register compound literal requested" ); |
5289 | return false; |
5290 | } |
5291 | TREE_ADDRESSABLE (x) = 1; |
5292 | TREE_ADDRESSABLE (COMPOUND_LITERAL_EXPR_DECL (x)) = 1; |
5293 | return true; |
5294 | |
5295 | case CONSTRUCTOR: |
5296 | TREE_ADDRESSABLE (x) = 1; |
5297 | return true; |
5298 | |
5299 | case VAR_DECL: |
5300 | case CONST_DECL: |
5301 | case PARM_DECL: |
5302 | case RESULT_DECL: |
5303 | if (C_DECL_REGISTER (x) |
5304 | && DECL_NONLOCAL (x)) |
5305 | { |
5306 | if (TREE_PUBLIC (x) || is_global_var (t: x)) |
5307 | { |
5308 | error |
5309 | ("global register variable %qD used in nested function" , x); |
5310 | return false; |
5311 | } |
5312 | pedwarn (input_location, 0, "register variable %qD used in nested function" , x); |
5313 | } |
5314 | else if (C_DECL_REGISTER (x)) |
5315 | { |
5316 | if (TREE_PUBLIC (x) || is_global_var (t: x)) |
5317 | error ("address of global register variable %qD requested" , x); |
5318 | else |
5319 | error ("address of register variable %qD requested" , x); |
5320 | return false; |
5321 | } |
5322 | |
5323 | /* FALLTHRU */ |
5324 | case FUNCTION_DECL: |
5325 | TREE_ADDRESSABLE (x) = 1; |
5326 | /* FALLTHRU */ |
5327 | default: |
5328 | return true; |
5329 | } |
5330 | } |
5331 | |
5332 | /* Convert EXPR to TYPE, warning about conversion problems with |
5333 | constants. SEMANTIC_TYPE is the type this conversion would use |
5334 | without excess precision. If SEMANTIC_TYPE is NULL, this function |
5335 | is equivalent to convert_and_check. This function is a wrapper that |
5336 | handles conversions that may be different than |
5337 | the usual ones because of excess precision. */ |
5338 | |
5339 | static tree |
5340 | ep_convert_and_check (location_t loc, tree type, tree expr, |
5341 | tree semantic_type) |
5342 | { |
5343 | if (TREE_TYPE (expr) == type) |
5344 | return expr; |
5345 | |
5346 | /* For C11, integer conversions may have results with excess |
5347 | precision. */ |
5348 | if (flag_isoc11 || !semantic_type) |
5349 | return convert_and_check (loc, type, expr); |
5350 | |
5351 | if (TREE_CODE (TREE_TYPE (expr)) == INTEGER_TYPE |
5352 | && TREE_TYPE (expr) != semantic_type) |
5353 | { |
5354 | /* For integers, we need to check the real conversion, not |
5355 | the conversion to the excess precision type. */ |
5356 | expr = convert_and_check (loc, semantic_type, expr); |
5357 | } |
5358 | /* Result type is the excess precision type, which should be |
5359 | large enough, so do not check. */ |
5360 | return convert (type, expr); |
5361 | } |
5362 | |
5363 | /* If EXPR refers to a built-in declared without a prototype returns |
5364 | the actual type of the built-in and, if non-null, set *BLTIN to |
5365 | a pointer to the built-in. Otherwise return the type of EXPR |
5366 | and clear *BLTIN if non-null. */ |
5367 | |
5368 | static tree |
5369 | type_or_builtin_type (tree expr, tree *bltin = NULL) |
5370 | { |
5371 | tree dummy; |
5372 | if (!bltin) |
5373 | bltin = &dummy; |
5374 | |
5375 | *bltin = NULL_TREE; |
5376 | |
5377 | tree type = TREE_TYPE (expr); |
5378 | if (TREE_CODE (expr) != ADDR_EXPR) |
5379 | return type; |
5380 | |
5381 | tree oper = TREE_OPERAND (expr, 0); |
5382 | if (!DECL_P (oper) |
5383 | || TREE_CODE (oper) != FUNCTION_DECL |
5384 | || !fndecl_built_in_p (node: oper, klass: BUILT_IN_NORMAL)) |
5385 | return type; |
5386 | |
5387 | built_in_function code = DECL_FUNCTION_CODE (decl: oper); |
5388 | if (!C_DECL_BUILTIN_PROTOTYPE (oper)) |
5389 | return type; |
5390 | |
5391 | if ((*bltin = builtin_decl_implicit (fncode: code))) |
5392 | type = build_pointer_type (TREE_TYPE (*bltin)); |
5393 | |
5394 | return type; |
5395 | } |
5396 | |
5397 | /* Build and return a conditional expression IFEXP ? OP1 : OP2. If |
5398 | IFEXP_BCP then the condition is a call to __builtin_constant_p, and |
5399 | if folded to an integer constant then the unselected half may |
5400 | contain arbitrary operations not normally permitted in constant |
5401 | expressions. Set the location of the expression to LOC. */ |
5402 | |
5403 | tree |
5404 | build_conditional_expr (location_t colon_loc, tree ifexp, bool ifexp_bcp, |
5405 | tree op1, tree op1_original_type, location_t op1_loc, |
5406 | tree op2, tree op2_original_type, location_t op2_loc) |
5407 | { |
5408 | tree type1; |
5409 | tree type2; |
5410 | enum tree_code code1; |
5411 | enum tree_code code2; |
5412 | tree result_type = NULL; |
5413 | tree semantic_result_type = NULL; |
5414 | tree orig_op1 = op1, orig_op2 = op2; |
5415 | bool int_const, op1_int_operands, op2_int_operands, int_operands; |
5416 | bool ifexp_int_operands; |
5417 | tree ret; |
5418 | |
5419 | op1_int_operands = EXPR_INT_CONST_OPERANDS (orig_op1); |
5420 | if (op1_int_operands) |
5421 | op1 = remove_c_maybe_const_expr (expr: op1); |
5422 | op2_int_operands = EXPR_INT_CONST_OPERANDS (orig_op2); |
5423 | if (op2_int_operands) |
5424 | op2 = remove_c_maybe_const_expr (expr: op2); |
5425 | ifexp_int_operands = EXPR_INT_CONST_OPERANDS (ifexp); |
5426 | if (ifexp_int_operands) |
5427 | ifexp = remove_c_maybe_const_expr (expr: ifexp); |
5428 | |
5429 | /* Promote both alternatives. */ |
5430 | |
5431 | if (TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE) |
5432 | op1 = default_conversion (exp: op1); |
5433 | if (TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE) |
5434 | op2 = default_conversion (exp: op2); |
5435 | |
5436 | if (TREE_CODE (ifexp) == ERROR_MARK |
5437 | || TREE_CODE (TREE_TYPE (op1)) == ERROR_MARK |
5438 | || TREE_CODE (TREE_TYPE (op2)) == ERROR_MARK) |
5439 | return error_mark_node; |
5440 | |
5441 | tree bltin1 = NULL_TREE; |
5442 | tree bltin2 = NULL_TREE; |
5443 | type1 = type_or_builtin_type (expr: op1, bltin: &bltin1); |
5444 | code1 = TREE_CODE (type1); |
5445 | type2 = type_or_builtin_type (expr: op2, bltin: &bltin2); |
5446 | code2 = TREE_CODE (type2); |
5447 | |
5448 | if (code1 == POINTER_TYPE && reject_gcc_builtin (op1)) |
5449 | return error_mark_node; |
5450 | |
5451 | if (code2 == POINTER_TYPE && reject_gcc_builtin (op2)) |
5452 | return error_mark_node; |
5453 | |
5454 | /* C90 does not permit non-lvalue arrays in conditional expressions. |
5455 | In C99 they will be pointers by now. */ |
5456 | if (code1 == ARRAY_TYPE || code2 == ARRAY_TYPE) |
5457 | { |
5458 | error_at (colon_loc, "non-lvalue array in conditional expression" ); |
5459 | return error_mark_node; |
5460 | } |
5461 | |
5462 | if ((TREE_CODE (op1) == EXCESS_PRECISION_EXPR |
5463 | || TREE_CODE (op2) == EXCESS_PRECISION_EXPR) |
5464 | && (code1 == INTEGER_TYPE || code1 == REAL_TYPE |
5465 | || code1 == COMPLEX_TYPE || code1 == BITINT_TYPE) |
5466 | && (code2 == INTEGER_TYPE || code2 == REAL_TYPE |
5467 | || code2 == COMPLEX_TYPE || code2 == BITINT_TYPE)) |
5468 | { |
5469 | semantic_result_type = c_common_type (t1: type1, t2: type2); |
5470 | if (TREE_CODE (op1) == EXCESS_PRECISION_EXPR) |
5471 | { |
5472 | op1 = TREE_OPERAND (op1, 0); |
5473 | type1 = TREE_TYPE (op1); |
5474 | gcc_assert (TREE_CODE (type1) == code1); |
5475 | } |
5476 | if (TREE_CODE (op2) == EXCESS_PRECISION_EXPR) |
5477 | { |
5478 | op2 = TREE_OPERAND (op2, 0); |
5479 | type2 = TREE_TYPE (op2); |
5480 | gcc_assert (TREE_CODE (type2) == code2); |
5481 | } |
5482 | } |
5483 | |
5484 | if (warn_cxx_compat) |
5485 | { |
5486 | tree t1 = op1_original_type ? op1_original_type : TREE_TYPE (orig_op1); |
5487 | tree t2 = op2_original_type ? op2_original_type : TREE_TYPE (orig_op2); |
5488 | |
5489 | if (TREE_CODE (t1) == ENUMERAL_TYPE |
5490 | && TREE_CODE (t2) == ENUMERAL_TYPE |
5491 | && TYPE_MAIN_VARIANT (t1) != TYPE_MAIN_VARIANT (t2)) |
5492 | warning_at (colon_loc, OPT_Wc___compat, |
5493 | ("different enum types in conditional is " |
5494 | "invalid in C++: %qT vs %qT" ), |
5495 | t1, t2); |
5496 | } |
5497 | |
5498 | /* Quickly detect the usual case where op1 and op2 have the same type |
5499 | after promotion. */ |
5500 | if (TYPE_MAIN_VARIANT (type1) == TYPE_MAIN_VARIANT (type2)) |
5501 | { |
5502 | if (type1 == type2) |
5503 | result_type = type1; |
5504 | else |
5505 | result_type = TYPE_MAIN_VARIANT (type1); |
5506 | } |
5507 | else if ((code1 == INTEGER_TYPE || code1 == REAL_TYPE |
5508 | || code1 == COMPLEX_TYPE || code1 == BITINT_TYPE) |
5509 | && (code2 == INTEGER_TYPE || code2 == REAL_TYPE |
5510 | || code2 == COMPLEX_TYPE || code2 == BITINT_TYPE)) |
5511 | { |
5512 | /* In C11, a conditional expression between a floating-point |
5513 | type and an integer type should convert the integer type to |
5514 | the evaluation format of the floating-point type, with |
5515 | possible excess precision. */ |
5516 | tree eptype1 = type1; |
5517 | tree eptype2 = type2; |
5518 | if (flag_isoc11) |
5519 | { |
5520 | tree eptype; |
5521 | if (ANY_INTEGRAL_TYPE_P (type1) |
5522 | && (eptype = excess_precision_type (type2)) != NULL_TREE) |
5523 | { |
5524 | eptype2 = eptype; |
5525 | if (!semantic_result_type) |
5526 | semantic_result_type = c_common_type (t1: type1, t2: type2); |
5527 | } |
5528 | else if (ANY_INTEGRAL_TYPE_P (type2) |
5529 | && (eptype = excess_precision_type (type1)) != NULL_TREE) |
5530 | { |
5531 | eptype1 = eptype; |
5532 | if (!semantic_result_type) |
5533 | semantic_result_type = c_common_type (t1: type1, t2: type2); |
5534 | } |
5535 | } |
5536 | result_type = c_common_type (t1: eptype1, t2: eptype2); |
5537 | if (result_type == error_mark_node) |
5538 | return error_mark_node; |
5539 | do_warn_double_promotion (result_type, type1, type2, |
5540 | "implicit conversion from %qT to %qT to " |
5541 | "match other result of conditional" , |
5542 | colon_loc); |
5543 | |
5544 | /* If -Wsign-compare, warn here if type1 and type2 have |
5545 | different signedness. We'll promote the signed to unsigned |
5546 | and later code won't know it used to be different. |
5547 | Do this check on the original types, so that explicit casts |
5548 | will be considered, but default promotions won't. */ |
5549 | if (c_inhibit_evaluation_warnings == 0) |
5550 | { |
5551 | int unsigned_op1 = TYPE_UNSIGNED (TREE_TYPE (orig_op1)); |
5552 | int unsigned_op2 = TYPE_UNSIGNED (TREE_TYPE (orig_op2)); |
5553 | |
5554 | if (unsigned_op1 ^ unsigned_op2) |
5555 | { |
5556 | bool ovf; |
5557 | |
5558 | /* Do not warn if the result type is signed, since the |
5559 | signed type will only be chosen if it can represent |
5560 | all the values of the unsigned type. */ |
5561 | if (!TYPE_UNSIGNED (result_type)) |
5562 | /* OK */; |
5563 | else |
5564 | { |
5565 | bool op1_maybe_const = true; |
5566 | bool op2_maybe_const = true; |
5567 | |
5568 | /* Do not warn if the signed quantity is an |
5569 | unsuffixed integer literal (or some static |
5570 | constant expression involving such literals) and |
5571 | it is non-negative. This warning requires the |
5572 | operands to be folded for best results, so do |
5573 | that folding in this case even without |
5574 | warn_sign_compare to avoid warning options |
5575 | possibly affecting code generation. */ |
5576 | c_inhibit_evaluation_warnings |
5577 | += (ifexp == truthvalue_false_node); |
5578 | op1 = c_fully_fold (op1, require_constant_value, |
5579 | &op1_maybe_const); |
5580 | c_inhibit_evaluation_warnings |
5581 | -= (ifexp == truthvalue_false_node); |
5582 | |
5583 | c_inhibit_evaluation_warnings |
5584 | += (ifexp == truthvalue_true_node); |
5585 | op2 = c_fully_fold (op2, require_constant_value, |
5586 | &op2_maybe_const); |
5587 | c_inhibit_evaluation_warnings |
5588 | -= (ifexp == truthvalue_true_node); |
5589 | |
5590 | if (warn_sign_compare) |
5591 | { |
5592 | if ((unsigned_op2 |
5593 | && tree_expr_nonnegative_warnv_p (op1, &ovf)) |
5594 | || (unsigned_op1 |
5595 | && tree_expr_nonnegative_warnv_p (op2, &ovf))) |
5596 | /* OK */; |
5597 | else if (unsigned_op2) |
5598 | warning_at (op1_loc, OPT_Wsign_compare, |
5599 | "operand of %<?:%> changes signedness from " |
5600 | "%qT to %qT due to unsignedness of other " |
5601 | "operand" , TREE_TYPE (orig_op1), |
5602 | TREE_TYPE (orig_op2)); |
5603 | else |
5604 | warning_at (op2_loc, OPT_Wsign_compare, |
5605 | "operand of %<?:%> changes signedness from " |
5606 | "%qT to %qT due to unsignedness of other " |
5607 | "operand" , TREE_TYPE (orig_op2), |
5608 | TREE_TYPE (orig_op1)); |
5609 | } |
5610 | if (!op1_maybe_const || TREE_CODE (op1) != INTEGER_CST) |
5611 | op1 = c_wrap_maybe_const (op1, !op1_maybe_const); |
5612 | if (!op2_maybe_const || TREE_CODE (op2) != INTEGER_CST) |
5613 | op2 = c_wrap_maybe_const (op2, !op2_maybe_const); |
5614 | } |
5615 | } |
5616 | } |
5617 | } |
5618 | else if (code1 == VOID_TYPE || code2 == VOID_TYPE) |
5619 | { |
5620 | if (code1 != VOID_TYPE || code2 != VOID_TYPE) |
5621 | pedwarn (colon_loc, OPT_Wpedantic, |
5622 | "ISO C forbids conditional expr with only one void side" ); |
5623 | result_type = void_type_node; |
5624 | } |
5625 | else if (code1 == POINTER_TYPE && code2 == POINTER_TYPE) |
5626 | { |
5627 | addr_space_t as1 = TYPE_ADDR_SPACE (TREE_TYPE (type1)); |
5628 | addr_space_t as2 = TYPE_ADDR_SPACE (TREE_TYPE (type2)); |
5629 | addr_space_t as_common; |
5630 | |
5631 | if (comp_target_types (location: colon_loc, ttl: type1, ttr: type2)) |
5632 | result_type = common_pointer_type (t1: type1, t2: type2); |
5633 | else if (null_pointer_constant_p (expr: orig_op1)) |
5634 | result_type = type2; |
5635 | else if (null_pointer_constant_p (expr: orig_op2)) |
5636 | result_type = type1; |
5637 | else if (!addr_space_superset (as1, as2, common: &as_common)) |
5638 | { |
5639 | error_at (colon_loc, "pointers to disjoint address spaces " |
5640 | "used in conditional expression" ); |
5641 | return error_mark_node; |
5642 | } |
5643 | else if ((VOID_TYPE_P (TREE_TYPE (type1)) |
5644 | && !TYPE_ATOMIC (TREE_TYPE (type1))) |
5645 | || (VOID_TYPE_P (TREE_TYPE (type2)) |
5646 | && !TYPE_ATOMIC (TREE_TYPE (type2)))) |
5647 | { |
5648 | tree t1 = TREE_TYPE (type1); |
5649 | tree t2 = TREE_TYPE (type2); |
5650 | if (!(VOID_TYPE_P (t1) |
5651 | && !TYPE_ATOMIC (t1))) |
5652 | { |
5653 | /* roles are swapped */ |
5654 | t1 = t2; |
5655 | t2 = TREE_TYPE (type1); |
5656 | } |
5657 | tree t2_stripped = strip_array_types (type: t2); |
5658 | if ((TREE_CODE (t2) == ARRAY_TYPE) |
5659 | && (TYPE_QUALS (t2_stripped) & ~TYPE_QUALS (t1))) |
5660 | { |
5661 | if (!flag_isoc23) |
5662 | warning_at (colon_loc, OPT_Wdiscarded_array_qualifiers, |
5663 | "pointer to array loses qualifier " |
5664 | "in conditional expression" ); |
5665 | else if (warn_c11_c23_compat > 0) |
5666 | warning_at (colon_loc, OPT_Wc11_c23_compat, |
5667 | "pointer to array loses qualifier " |
5668 | "in conditional expression in ISO C before C23" ); |
5669 | } |
5670 | if (TREE_CODE (t2) == FUNCTION_TYPE) |
5671 | pedwarn (colon_loc, OPT_Wpedantic, |
5672 | "ISO C forbids conditional expr between " |
5673 | "%<void *%> and function pointer" ); |
5674 | /* for array, use qualifiers of element type */ |
5675 | if (flag_isoc23) |
5676 | t2 = t2_stripped; |
5677 | result_type = build_pointer_type (qualify_type (type: t1, like: t2)); |
5678 | } |
5679 | /* Objective-C pointer comparisons are a bit more lenient. */ |
5680 | else if (objc_have_common_type (type1, type2, -3, NULL_TREE)) |
5681 | result_type = objc_common_type (type1, type2); |
5682 | else |
5683 | { |
5684 | int qual = ENCODE_QUAL_ADDR_SPACE (as_common); |
5685 | diagnostic_t kind = DK_PERMERROR; |
5686 | if (!flag_isoc99) |
5687 | /* This downgrade to a warning ensures that -std=gnu89 |
5688 | -pedantic-errors does not flag these mismatches between |
5689 | builtins as errors (as DK_PERMERROR would). ISO C99 |
5690 | and later do not have implicit function declarations, |
5691 | so the mismatch cannot occur naturally there. */ |
5692 | kind = bltin1 && bltin2 ? DK_WARNING : DK_PEDWARN; |
5693 | if (emit_diagnostic (kind, colon_loc, OPT_Wincompatible_pointer_types, |
5694 | "pointer type mismatch " |
5695 | "in conditional expression" )) |
5696 | { |
5697 | inform (op1_loc, "first expression has type %qT" , type1); |
5698 | inform (op2_loc, "second expression has type %qT" , type2); |
5699 | } |
5700 | result_type = build_pointer_type |
5701 | (build_qualified_type (void_type_node, qual)); |
5702 | } |
5703 | } |
5704 | else if (code1 == POINTER_TYPE |
5705 | && (code2 == INTEGER_TYPE || code2 == BITINT_TYPE)) |
5706 | { |
5707 | if (!null_pointer_constant_p (expr: orig_op2)) |
5708 | permerror_opt (colon_loc, OPT_Wint_conversion, |
5709 | "pointer/integer type mismatch " |
5710 | "in conditional expression" ); |
5711 | else |
5712 | { |
5713 | op2 = null_pointer_node; |
5714 | } |
5715 | result_type = type1; |
5716 | } |
5717 | else if (code2 == POINTER_TYPE |
5718 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
5719 | { |
5720 | if (!null_pointer_constant_p (expr: orig_op1)) |
5721 | permerror_opt (colon_loc, OPT_Wint_conversion, |
5722 | "pointer/integer type mismatch " |
5723 | "in conditional expression" ); |
5724 | else |
5725 | { |
5726 | op1 = null_pointer_node; |
5727 | } |
5728 | result_type = type2; |
5729 | } |
5730 | /* 6.5.15: "if one is a null pointer constant (other than a pointer) or has |
5731 | type nullptr_t and the other is a pointer, the result type is the pointer |
5732 | type." */ |
5733 | else if (code1 == NULLPTR_TYPE && code2 == POINTER_TYPE) |
5734 | result_type = type2; |
5735 | else if (code1 == POINTER_TYPE && code2 == NULLPTR_TYPE) |
5736 | result_type = type1; |
5737 | else if (RECORD_OR_UNION_TYPE_P (type1) && RECORD_OR_UNION_TYPE_P (type2) |
5738 | && comptypes (TYPE_MAIN_VARIANT (type1), |
5739 | TYPE_MAIN_VARIANT (type2))) |
5740 | result_type = composite_type (TYPE_MAIN_VARIANT (type1), |
5741 | TYPE_MAIN_VARIANT (type2)); |
5742 | |
5743 | if (!result_type) |
5744 | { |
5745 | if (flag_cond_mismatch) |
5746 | result_type = void_type_node; |
5747 | else |
5748 | { |
5749 | error_at (colon_loc, "type mismatch in conditional expression" ); |
5750 | return error_mark_node; |
5751 | } |
5752 | } |
5753 | |
5754 | /* Merge const and volatile flags of the incoming types. */ |
5755 | result_type |
5756 | = build_type_variant (result_type, |
5757 | TYPE_READONLY (type1) || TYPE_READONLY (type2), |
5758 | TYPE_VOLATILE (type1) || TYPE_VOLATILE (type2)); |
5759 | |
5760 | op1 = ep_convert_and_check (loc: colon_loc, type: result_type, expr: op1, |
5761 | semantic_type: semantic_result_type); |
5762 | op2 = ep_convert_and_check (loc: colon_loc, type: result_type, expr: op2, |
5763 | semantic_type: semantic_result_type); |
5764 | |
5765 | if (ifexp_bcp && ifexp == truthvalue_true_node) |
5766 | { |
5767 | op2_int_operands = true; |
5768 | op1 = c_fully_fold (op1, require_constant_value, NULL); |
5769 | } |
5770 | if (ifexp_bcp && ifexp == truthvalue_false_node) |
5771 | { |
5772 | op1_int_operands = true; |
5773 | op2 = c_fully_fold (op2, require_constant_value, NULL); |
5774 | } |
5775 | int_const = int_operands = (ifexp_int_operands |
5776 | && op1_int_operands |
5777 | && op2_int_operands); |
5778 | if (int_operands) |
5779 | { |
5780 | int_const = ((ifexp == truthvalue_true_node |
5781 | && TREE_CODE (orig_op1) == INTEGER_CST |
5782 | && !TREE_OVERFLOW (orig_op1)) |
5783 | || (ifexp == truthvalue_false_node |
5784 | && TREE_CODE (orig_op2) == INTEGER_CST |
5785 | && !TREE_OVERFLOW (orig_op2))); |
5786 | } |
5787 | |
5788 | /* Need to convert condition operand into a vector mask. */ |
5789 | if (VECTOR_TYPE_P (TREE_TYPE (ifexp))) |
5790 | { |
5791 | tree vectype = TREE_TYPE (ifexp); |
5792 | tree elem_type = TREE_TYPE (vectype); |
5793 | tree zero = build_int_cst (elem_type, 0); |
5794 | tree zero_vec = build_vector_from_val (vectype, zero); |
5795 | tree cmp_type = truth_type_for (vectype); |
5796 | ifexp = build2 (NE_EXPR, cmp_type, ifexp, zero_vec); |
5797 | } |
5798 | |
5799 | if (int_const || (ifexp_bcp && TREE_CODE (ifexp) == INTEGER_CST)) |
5800 | ret = fold_build3_loc (colon_loc, COND_EXPR, result_type, ifexp, op1, op2); |
5801 | else |
5802 | { |
5803 | if (int_operands) |
5804 | { |
5805 | /* Use c_fully_fold here, since C_MAYBE_CONST_EXPR might be |
5806 | nested inside of the expression. */ |
5807 | op1 = c_fully_fold (op1, false, NULL); |
5808 | op2 = c_fully_fold (op2, false, NULL); |
5809 | } |
5810 | ret = build3 (COND_EXPR, result_type, ifexp, op1, op2); |
5811 | if (int_operands) |
5812 | ret = note_integer_operands (expr: ret); |
5813 | } |
5814 | if (semantic_result_type) |
5815 | ret = build1 (EXCESS_PRECISION_EXPR, semantic_result_type, ret); |
5816 | |
5817 | protected_set_expr_location (ret, colon_loc); |
5818 | |
5819 | /* If the OP1 and OP2 are the same and don't have side-effects, |
5820 | warn here, because the COND_EXPR will be turned into OP1. */ |
5821 | if (warn_duplicated_branches |
5822 | && TREE_CODE (ret) == COND_EXPR |
5823 | && (op1 == op2 || operand_equal_p (op1, op2, flags: OEP_ADDRESS_OF_SAME_FIELD))) |
5824 | warning_at (EXPR_LOCATION (ret), OPT_Wduplicated_branches, |
5825 | "this condition has identical branches" ); |
5826 | |
5827 | return ret; |
5828 | } |
5829 | |
5830 | /* EXPR is an expression, location LOC, whose result is discarded. |
5831 | Warn if it is a call to a nodiscard function (or a COMPOUND_EXPR |
5832 | whose right-hand operand is such a call, possibly recursively). */ |
5833 | |
5834 | static void |
5835 | maybe_warn_nodiscard (location_t loc, tree expr) |
5836 | { |
5837 | if (VOID_TYPE_P (TREE_TYPE (expr))) |
5838 | return; |
5839 | while (TREE_CODE (expr) == COMPOUND_EXPR) |
5840 | { |
5841 | expr = TREE_OPERAND (expr, 1); |
5842 | if (EXPR_HAS_LOCATION (expr)) |
5843 | loc = EXPR_LOCATION (expr); |
5844 | } |
5845 | if (TREE_CODE (expr) != CALL_EXPR) |
5846 | return; |
5847 | tree fn = CALL_EXPR_FN (expr); |
5848 | if (!fn) |
5849 | return; |
5850 | tree attr; |
5851 | if (TREE_CODE (fn) == ADDR_EXPR |
5852 | && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL |
5853 | && (attr = lookup_attribute (attr_name: "nodiscard" , |
5854 | DECL_ATTRIBUTES (TREE_OPERAND (fn, 0))))) |
5855 | { |
5856 | fn = TREE_OPERAND (fn, 0); |
5857 | tree args = TREE_VALUE (attr); |
5858 | if (args) |
5859 | args = TREE_VALUE (args); |
5860 | auto_diagnostic_group d; |
5861 | int warned; |
5862 | if (args) |
5863 | warned = warning_at (loc, OPT_Wunused_result, |
5864 | "ignoring return value of %qD, declared with " |
5865 | "attribute %<nodiscard%>: %E" , fn, args); |
5866 | else |
5867 | warned = warning_at (loc, OPT_Wunused_result, |
5868 | "ignoring return value of %qD, declared with " |
5869 | "attribute %<nodiscard%>" , fn); |
5870 | if (warned) |
5871 | inform (DECL_SOURCE_LOCATION (fn), "declared here" ); |
5872 | } |
5873 | else |
5874 | { |
5875 | tree rettype = TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))); |
5876 | attr = lookup_attribute (attr_name: "nodiscard" , TYPE_ATTRIBUTES (rettype)); |
5877 | if (!attr) |
5878 | return; |
5879 | tree args = TREE_VALUE (attr); |
5880 | if (args) |
5881 | args = TREE_VALUE (args); |
5882 | auto_diagnostic_group d; |
5883 | int warned; |
5884 | if (args) |
5885 | warned = warning_at (loc, OPT_Wunused_result, |
5886 | "ignoring return value of type %qT, declared " |
5887 | "with attribute %<nodiscard%>: %E" , |
5888 | rettype, args); |
5889 | else |
5890 | warned = warning_at (loc, OPT_Wunused_result, |
5891 | "ignoring return value of type %qT, declared " |
5892 | "with attribute %<nodiscard%>" , rettype); |
5893 | if (warned) |
5894 | { |
5895 | if (TREE_CODE (fn) == ADDR_EXPR) |
5896 | { |
5897 | fn = TREE_OPERAND (fn, 0); |
5898 | if (TREE_CODE (fn) == FUNCTION_DECL) |
5899 | inform (DECL_SOURCE_LOCATION (fn), |
5900 | "in call to %qD, declared here" , fn); |
5901 | } |
5902 | } |
5903 | } |
5904 | } |
5905 | |
5906 | /* Return a compound expression that performs two expressions and |
5907 | returns the value of the second of them. |
5908 | |
5909 | LOC is the location of the COMPOUND_EXPR. */ |
5910 | |
5911 | tree |
5912 | build_compound_expr (location_t loc, tree expr1, tree expr2) |
5913 | { |
5914 | bool expr1_int_operands, expr2_int_operands; |
5915 | tree eptype = NULL_TREE; |
5916 | tree ret; |
5917 | |
5918 | expr1_int_operands = EXPR_INT_CONST_OPERANDS (expr1); |
5919 | if (expr1_int_operands) |
5920 | expr1 = remove_c_maybe_const_expr (expr: expr1); |
5921 | expr2_int_operands = EXPR_INT_CONST_OPERANDS (expr2); |
5922 | if (expr2_int_operands) |
5923 | expr2 = remove_c_maybe_const_expr (expr: expr2); |
5924 | |
5925 | if (TREE_CODE (expr1) == EXCESS_PRECISION_EXPR) |
5926 | expr1 = TREE_OPERAND (expr1, 0); |
5927 | if (TREE_CODE (expr2) == EXCESS_PRECISION_EXPR) |
5928 | { |
5929 | eptype = TREE_TYPE (expr2); |
5930 | expr2 = TREE_OPERAND (expr2, 0); |
5931 | } |
5932 | |
5933 | if (!TREE_SIDE_EFFECTS (expr1)) |
5934 | { |
5935 | /* The left-hand operand of a comma expression is like an expression |
5936 | statement: with -Wunused, we should warn if it doesn't have |
5937 | any side-effects, unless it was explicitly cast to (void). */ |
5938 | if (warn_unused_value) |
5939 | { |
5940 | if (VOID_TYPE_P (TREE_TYPE (expr1)) |
5941 | && CONVERT_EXPR_P (expr1)) |
5942 | ; /* (void) a, b */ |
5943 | else if (VOID_TYPE_P (TREE_TYPE (expr1)) |
5944 | && TREE_CODE (expr1) == COMPOUND_EXPR |
5945 | && CONVERT_EXPR_P (TREE_OPERAND (expr1, 1))) |
5946 | ; /* (void) a, (void) b, c */ |
5947 | else |
5948 | warning_at (loc, OPT_Wunused_value, |
5949 | "left-hand operand of comma expression has no effect" ); |
5950 | } |
5951 | } |
5952 | else if (TREE_CODE (expr1) == COMPOUND_EXPR |
5953 | && warn_unused_value) |
5954 | { |
5955 | tree r = expr1; |
5956 | location_t cloc = loc; |
5957 | while (TREE_CODE (r) == COMPOUND_EXPR) |
5958 | { |
5959 | if (EXPR_HAS_LOCATION (r)) |
5960 | cloc = EXPR_LOCATION (r); |
5961 | r = TREE_OPERAND (r, 1); |
5962 | } |
5963 | if (!TREE_SIDE_EFFECTS (r) |
5964 | && !VOID_TYPE_P (TREE_TYPE (r)) |
5965 | && !CONVERT_EXPR_P (r)) |
5966 | warning_at (cloc, OPT_Wunused_value, |
5967 | "right-hand operand of comma expression has no effect" ); |
5968 | } |
5969 | |
5970 | /* With -Wunused, we should also warn if the left-hand operand does have |
5971 | side-effects, but computes a value which is not used. For example, in |
5972 | `foo() + bar(), baz()' the result of the `+' operator is not used, |
5973 | so we should issue a warning. */ |
5974 | else if (warn_unused_value) |
5975 | warn_if_unused_value (expr1, loc); |
5976 | |
5977 | maybe_warn_nodiscard (loc, expr: expr1); |
5978 | |
5979 | if (expr2 == error_mark_node) |
5980 | return error_mark_node; |
5981 | |
5982 | ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr2), expr1, expr2); |
5983 | |
5984 | if (flag_isoc99 |
5985 | && expr1_int_operands |
5986 | && expr2_int_operands) |
5987 | ret = note_integer_operands (expr: ret); |
5988 | |
5989 | if (eptype) |
5990 | ret = build1 (EXCESS_PRECISION_EXPR, eptype, ret); |
5991 | |
5992 | protected_set_expr_location (ret, loc); |
5993 | return ret; |
5994 | } |
5995 | |
5996 | /* Issue -Wcast-qual warnings when appropriate. TYPE is the type to |
5997 | which we are casting. OTYPE is the type of the expression being |
5998 | cast. Both TYPE and OTYPE are pointer types. LOC is the location |
5999 | of the cast. -Wcast-qual appeared on the command line. Named |
6000 | address space qualifiers are not handled here, because they result |
6001 | in different warnings. */ |
6002 | |
6003 | static void |
6004 | handle_warn_cast_qual (location_t loc, tree type, tree otype) |
6005 | { |
6006 | tree in_type = type; |
6007 | tree in_otype = otype; |
6008 | int added = 0; |
6009 | int discarded = 0; |
6010 | bool is_const; |
6011 | |
6012 | /* Check that the qualifiers on IN_TYPE are a superset of the |
6013 | qualifiers of IN_OTYPE. The outermost level of POINTER_TYPE |
6014 | nodes is uninteresting and we stop as soon as we hit a |
6015 | non-POINTER_TYPE node on either type. */ |
6016 | do |
6017 | { |
6018 | in_otype = TREE_TYPE (in_otype); |
6019 | in_type = TREE_TYPE (in_type); |
6020 | |
6021 | /* GNU C allows cv-qualified function types. 'const' means the |
6022 | function is very pure, 'volatile' means it can't return. We |
6023 | need to warn when such qualifiers are added, not when they're |
6024 | taken away. */ |
6025 | if (TREE_CODE (in_otype) == FUNCTION_TYPE |
6026 | && TREE_CODE (in_type) == FUNCTION_TYPE) |
6027 | added |= (TYPE_QUALS_NO_ADDR_SPACE (in_type) |
6028 | & ~TYPE_QUALS_NO_ADDR_SPACE (in_otype)); |
6029 | else |
6030 | discarded |= (TYPE_QUALS_NO_ADDR_SPACE (in_otype) |
6031 | & ~TYPE_QUALS_NO_ADDR_SPACE (in_type)); |
6032 | } |
6033 | while (TREE_CODE (in_type) == POINTER_TYPE |
6034 | && TREE_CODE (in_otype) == POINTER_TYPE); |
6035 | |
6036 | if (added) |
6037 | warning_at (loc, OPT_Wcast_qual, |
6038 | "cast adds %q#v qualifier to function type" , added); |
6039 | |
6040 | if (discarded) |
6041 | /* There are qualifiers present in IN_OTYPE that are not present |
6042 | in IN_TYPE. */ |
6043 | warning_at (loc, OPT_Wcast_qual, |
6044 | "cast discards %qv qualifier from pointer target type" , |
6045 | discarded); |
6046 | |
6047 | if (added || discarded) |
6048 | return; |
6049 | |
6050 | /* A cast from **T to const **T is unsafe, because it can cause a |
6051 | const value to be changed with no additional warning. We only |
6052 | issue this warning if T is the same on both sides, and we only |
6053 | issue the warning if there are the same number of pointers on |
6054 | both sides, as otherwise the cast is clearly unsafe anyhow. A |
6055 | cast is unsafe when a qualifier is added at one level and const |
6056 | is not present at all outer levels. |
6057 | |
6058 | To issue this warning, we check at each level whether the cast |
6059 | adds new qualifiers not already seen. We don't need to special |
6060 | case function types, as they won't have the same |
6061 | TYPE_MAIN_VARIANT. */ |
6062 | |
6063 | if (TYPE_MAIN_VARIANT (in_type) != TYPE_MAIN_VARIANT (in_otype)) |
6064 | return; |
6065 | if (TREE_CODE (TREE_TYPE (type)) != POINTER_TYPE) |
6066 | return; |
6067 | |
6068 | in_type = type; |
6069 | in_otype = otype; |
6070 | is_const = TYPE_READONLY (TREE_TYPE (in_type)); |
6071 | do |
6072 | { |
6073 | in_type = TREE_TYPE (in_type); |
6074 | in_otype = TREE_TYPE (in_otype); |
6075 | if ((TYPE_QUALS (in_type) &~ TYPE_QUALS (in_otype)) != 0 |
6076 | && !is_const) |
6077 | { |
6078 | warning_at (loc, OPT_Wcast_qual, |
6079 | "to be safe all intermediate pointers in cast from " |
6080 | "%qT to %qT must be %<const%> qualified" , |
6081 | otype, type); |
6082 | break; |
6083 | } |
6084 | if (is_const) |
6085 | is_const = TYPE_READONLY (in_type); |
6086 | } |
6087 | while (TREE_CODE (in_type) == POINTER_TYPE); |
6088 | } |
6089 | |
6090 | /* Heuristic check if two parameter types can be considered ABI-equivalent. */ |
6091 | |
6092 | static bool |
6093 | c_safe_arg_type_equiv_p (tree t1, tree t2) |
6094 | { |
6095 | if (error_operand_p (t: t1) || error_operand_p (t: t2)) |
6096 | return true; |
6097 | |
6098 | t1 = TYPE_MAIN_VARIANT (t1); |
6099 | t2 = TYPE_MAIN_VARIANT (t2); |
6100 | |
6101 | if (TREE_CODE (t1) == POINTER_TYPE |
6102 | && TREE_CODE (t2) == POINTER_TYPE) |
6103 | return true; |
6104 | |
6105 | /* The signedness of the parameter matters only when an integral |
6106 | type smaller than int is promoted to int, otherwise only the |
6107 | precision of the parameter matters. |
6108 | This check should make sure that the callee does not see |
6109 | undefined values in argument registers. */ |
6110 | if (INTEGRAL_TYPE_P (t1) |
6111 | && INTEGRAL_TYPE_P (t2) |
6112 | && TYPE_PRECISION (t1) == TYPE_PRECISION (t2) |
6113 | && (TYPE_UNSIGNED (t1) == TYPE_UNSIGNED (t2) |
6114 | || !targetm.calls.promote_prototypes (NULL_TREE) |
6115 | || TYPE_PRECISION (t1) >= TYPE_PRECISION (integer_type_node))) |
6116 | return true; |
6117 | |
6118 | return comptypes (type1: t1, type2: t2); |
6119 | } |
6120 | |
6121 | /* Check if a type cast between two function types can be considered safe. */ |
6122 | |
6123 | static bool |
6124 | c_safe_function_type_cast_p (tree t1, tree t2) |
6125 | { |
6126 | if (TREE_TYPE (t1) == void_type_node && |
6127 | TYPE_ARG_TYPES (t1) == void_list_node) |
6128 | return true; |
6129 | |
6130 | if (TREE_TYPE (t2) == void_type_node && |
6131 | TYPE_ARG_TYPES (t2) == void_list_node) |
6132 | return true; |
6133 | |
6134 | if (!c_safe_arg_type_equiv_p (TREE_TYPE (t1), TREE_TYPE (t2))) |
6135 | return false; |
6136 | |
6137 | for (t1 = TYPE_ARG_TYPES (t1), t2 = TYPE_ARG_TYPES (t2); |
6138 | t1 && t2; |
6139 | t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2)) |
6140 | if (!c_safe_arg_type_equiv_p (TREE_VALUE (t1), TREE_VALUE (t2))) |
6141 | return false; |
6142 | |
6143 | return true; |
6144 | } |
6145 | |
6146 | /* Build an expression representing a cast to type TYPE of expression EXPR. |
6147 | LOC is the location of the cast-- typically the open paren of the cast. */ |
6148 | |
6149 | tree |
6150 | build_c_cast (location_t loc, tree type, tree expr) |
6151 | { |
6152 | tree value; |
6153 | |
6154 | bool int_operands = EXPR_INT_CONST_OPERANDS (expr); |
6155 | |
6156 | if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR) |
6157 | expr = TREE_OPERAND (expr, 0); |
6158 | |
6159 | value = expr; |
6160 | if (int_operands) |
6161 | value = remove_c_maybe_const_expr (expr: value); |
6162 | |
6163 | if (type == error_mark_node || expr == error_mark_node) |
6164 | return error_mark_node; |
6165 | |
6166 | /* The ObjC front-end uses TYPE_MAIN_VARIANT to tie together types differing |
6167 | only in <protocol> qualifications. But when constructing cast expressions, |
6168 | the protocols do matter and must be kept around. */ |
6169 | if (objc_is_object_ptr (type) && objc_is_object_ptr (TREE_TYPE (expr))) |
6170 | return build1 (NOP_EXPR, type, expr); |
6171 | |
6172 | type = TYPE_MAIN_VARIANT (type); |
6173 | |
6174 | if (TREE_CODE (type) == ARRAY_TYPE) |
6175 | { |
6176 | error_at (loc, "cast specifies array type" ); |
6177 | return error_mark_node; |
6178 | } |
6179 | |
6180 | if (TREE_CODE (type) == FUNCTION_TYPE) |
6181 | { |
6182 | error_at (loc, "cast specifies function type" ); |
6183 | return error_mark_node; |
6184 | } |
6185 | |
6186 | if (!VOID_TYPE_P (type)) |
6187 | { |
6188 | value = require_complete_type (loc, value); |
6189 | if (value == error_mark_node) |
6190 | return error_mark_node; |
6191 | } |
6192 | |
6193 | if (type == TYPE_MAIN_VARIANT (TREE_TYPE (value))) |
6194 | { |
6195 | if (RECORD_OR_UNION_TYPE_P (type) |
6196 | && pedwarn (loc, OPT_Wpedantic, |
6197 | "ISO C forbids casting nonscalar to the same type" )) |
6198 | ; |
6199 | else if (warn_useless_cast) |
6200 | warning_at (loc, OPT_Wuseless_cast, |
6201 | "useless cast to type %qT" , type); |
6202 | |
6203 | /* Convert to remove any qualifiers from VALUE's type. */ |
6204 | value = convert (type, value); |
6205 | } |
6206 | else if (TREE_CODE (type) == UNION_TYPE) |
6207 | { |
6208 | tree field; |
6209 | |
6210 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
6211 | if (TREE_TYPE (field) != error_mark_node |
6212 | && comptypes (TYPE_MAIN_VARIANT (TREE_TYPE (field)), |
6213 | TYPE_MAIN_VARIANT (TREE_TYPE (value)))) |
6214 | break; |
6215 | |
6216 | if (field) |
6217 | { |
6218 | tree t; |
6219 | bool maybe_const = true; |
6220 | |
6221 | pedwarn (loc, OPT_Wpedantic, "ISO C forbids casts to union type" ); |
6222 | t = c_fully_fold (value, false, &maybe_const); |
6223 | t = build_constructor_single (type, field, t); |
6224 | if (!maybe_const) |
6225 | t = c_wrap_maybe_const (t, true); |
6226 | t = digest_init (loc, type, t, |
6227 | NULL_TREE, false, false, false, true, false, false); |
6228 | TREE_CONSTANT (t) = TREE_CONSTANT (value); |
6229 | return t; |
6230 | } |
6231 | error_at (loc, "cast to union type from type not present in union" ); |
6232 | return error_mark_node; |
6233 | } |
6234 | else |
6235 | { |
6236 | tree otype, ovalue; |
6237 | |
6238 | if (type == void_type_node) |
6239 | { |
6240 | tree t = build1 (CONVERT_EXPR, type, value); |
6241 | SET_EXPR_LOCATION (t, loc); |
6242 | return t; |
6243 | } |
6244 | |
6245 | otype = TREE_TYPE (value); |
6246 | |
6247 | /* Optionally warn about potentially worrisome casts. */ |
6248 | if (warn_cast_qual |
6249 | && TREE_CODE (type) == POINTER_TYPE |
6250 | && TREE_CODE (otype) == POINTER_TYPE) |
6251 | handle_warn_cast_qual (loc, type, otype); |
6252 | |
6253 | /* Warn about conversions between pointers to disjoint |
6254 | address spaces. */ |
6255 | if (TREE_CODE (type) == POINTER_TYPE |
6256 | && TREE_CODE (otype) == POINTER_TYPE |
6257 | && !null_pointer_constant_p (expr: value)) |
6258 | { |
6259 | addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type)); |
6260 | addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (otype)); |
6261 | addr_space_t as_common; |
6262 | |
6263 | if (!addr_space_superset (as1: as_to, as2: as_from, common: &as_common)) |
6264 | { |
6265 | if (ADDR_SPACE_GENERIC_P (as_from)) |
6266 | warning_at (loc, 0, "cast to %qs address space pointer " |
6267 | "from disjoint generic address space pointer" , |
6268 | c_addr_space_name (as: as_to)); |
6269 | |
6270 | else if (ADDR_SPACE_GENERIC_P (as_to)) |
6271 | warning_at (loc, 0, "cast to generic address space pointer " |
6272 | "from disjoint %qs address space pointer" , |
6273 | c_addr_space_name (as: as_from)); |
6274 | |
6275 | else |
6276 | warning_at (loc, 0, "cast to %qs address space pointer " |
6277 | "from disjoint %qs address space pointer" , |
6278 | c_addr_space_name (as: as_to), |
6279 | c_addr_space_name (as: as_from)); |
6280 | } |
6281 | |
6282 | /* Warn of new allocations that are not big enough for the target |
6283 | type. */ |
6284 | if (warn_alloc_size && TREE_CODE (value) == CALL_EXPR) |
6285 | if (tree fndecl = get_callee_fndecl (value)) |
6286 | if (DECL_IS_MALLOC (fndecl)) |
6287 | { |
6288 | tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (fndecl)); |
6289 | tree alloc_size = lookup_attribute (attr_name: "alloc_size" , list: attrs); |
6290 | if (alloc_size) |
6291 | warn_for_alloc_size (loc, TREE_TYPE (type), value, |
6292 | alloc_size); |
6293 | } |
6294 | } |
6295 | |
6296 | /* Warn about possible alignment problems. */ |
6297 | if ((STRICT_ALIGNMENT || warn_cast_align == 2) |
6298 | && TREE_CODE (type) == POINTER_TYPE |
6299 | && TREE_CODE (otype) == POINTER_TYPE |
6300 | && TREE_CODE (TREE_TYPE (otype)) != VOID_TYPE |
6301 | && TREE_CODE (TREE_TYPE (otype)) != FUNCTION_TYPE |
6302 | /* Don't warn about opaque types, where the actual alignment |
6303 | restriction is unknown. */ |
6304 | && !(RECORD_OR_UNION_TYPE_P (TREE_TYPE (otype)) |
6305 | && TYPE_MODE (TREE_TYPE (otype)) == VOIDmode) |
6306 | && min_align_of_type (TREE_TYPE (type)) |
6307 | > min_align_of_type (TREE_TYPE (otype))) |
6308 | warning_at (loc, OPT_Wcast_align, |
6309 | "cast increases required alignment of target type" ); |
6310 | |
6311 | if ((TREE_CODE (type) == INTEGER_TYPE |
6312 | || TREE_CODE (type) == BITINT_TYPE) |
6313 | && TREE_CODE (otype) == POINTER_TYPE |
6314 | && TYPE_PRECISION (type) != TYPE_PRECISION (otype)) |
6315 | /* Unlike conversion of integers to pointers, where the |
6316 | warning is disabled for converting constants because |
6317 | of cases such as SIG_*, warn about converting constant |
6318 | pointers to integers. In some cases it may cause unwanted |
6319 | sign extension, and a warning is appropriate. */ |
6320 | warning_at (loc, OPT_Wpointer_to_int_cast, |
6321 | "cast from pointer to integer of different size" ); |
6322 | |
6323 | if (TREE_CODE (value) == CALL_EXPR |
6324 | && TREE_CODE (type) != TREE_CODE (otype)) |
6325 | warning_at (loc, OPT_Wbad_function_cast, |
6326 | "cast from function call of type %qT " |
6327 | "to non-matching type %qT" , otype, type); |
6328 | |
6329 | if (TREE_CODE (type) == POINTER_TYPE |
6330 | && (TREE_CODE (otype) == INTEGER_TYPE |
6331 | || TREE_CODE (otype) == BITINT_TYPE) |
6332 | && TYPE_PRECISION (type) != TYPE_PRECISION (otype) |
6333 | /* Don't warn about converting any constant. */ |
6334 | && !TREE_CONSTANT (value)) |
6335 | warning_at (loc, |
6336 | OPT_Wint_to_pointer_cast, "cast to pointer from integer " |
6337 | "of different size" ); |
6338 | |
6339 | if (warn_strict_aliasing <= 2) |
6340 | strict_aliasing_warning (EXPR_LOCATION (value), type, expr); |
6341 | |
6342 | /* If pedantic, warn for conversions between function and object |
6343 | pointer types, except for converting a null pointer constant |
6344 | to function pointer type. */ |
6345 | if (pedantic |
6346 | && TREE_CODE (type) == POINTER_TYPE |
6347 | && TREE_CODE (otype) == POINTER_TYPE |
6348 | && TREE_CODE (TREE_TYPE (otype)) == FUNCTION_TYPE |
6349 | && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE) |
6350 | pedwarn (loc, OPT_Wpedantic, "ISO C forbids " |
6351 | "conversion of function pointer to object pointer type" ); |
6352 | |
6353 | if (pedantic |
6354 | && TREE_CODE (type) == POINTER_TYPE |
6355 | && TREE_CODE (otype) == POINTER_TYPE |
6356 | && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE |
6357 | && TREE_CODE (TREE_TYPE (otype)) != FUNCTION_TYPE |
6358 | && !null_pointer_constant_p (expr: value)) |
6359 | pedwarn (loc, OPT_Wpedantic, "ISO C forbids " |
6360 | "conversion of object pointer to function pointer type" ); |
6361 | |
6362 | if (TREE_CODE (type) == POINTER_TYPE |
6363 | && TREE_CODE (otype) == POINTER_TYPE |
6364 | && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE |
6365 | && TREE_CODE (TREE_TYPE (otype)) == FUNCTION_TYPE |
6366 | && !c_safe_function_type_cast_p (TREE_TYPE (type), |
6367 | TREE_TYPE (otype))) |
6368 | warning_at (loc, OPT_Wcast_function_type, |
6369 | "cast between incompatible function types" |
6370 | " from %qT to %qT" , otype, type); |
6371 | |
6372 | ovalue = value; |
6373 | /* If converting to boolean a value with integer operands that |
6374 | is not itself represented as an INTEGER_CST, the call below |
6375 | to note_integer_operands may insert a C_MAYBE_CONST_EXPR, but |
6376 | build_binary_op as called by c_common_truthvalue_conversion |
6377 | may also insert a C_MAYBE_CONST_EXPR to indicate that a |
6378 | subexpression has been fully folded. To avoid nested |
6379 | C_MAYBE_CONST_EXPR, ensure that |
6380 | c_objc_common_truthvalue_conversion receives an argument |
6381 | properly marked as having integer operands in that case. */ |
6382 | if (int_operands |
6383 | && TREE_CODE (value) != INTEGER_CST |
6384 | && (TREE_CODE (type) == BOOLEAN_TYPE |
6385 | || (TREE_CODE (type) == ENUMERAL_TYPE |
6386 | && ENUM_UNDERLYING_TYPE (type) != NULL_TREE |
6387 | && TREE_CODE (ENUM_UNDERLYING_TYPE (type)) == BOOLEAN_TYPE))) |
6388 | value = note_integer_operands (expr: value); |
6389 | value = convert (type, value); |
6390 | |
6391 | /* Ignore any integer overflow caused by the cast. */ |
6392 | if (TREE_CODE (value) == INTEGER_CST && !FLOAT_TYPE_P (otype)) |
6393 | { |
6394 | if (TREE_OVERFLOW_P (ovalue)) |
6395 | { |
6396 | if (!TREE_OVERFLOW (value)) |
6397 | { |
6398 | /* Avoid clobbering a shared constant. */ |
6399 | value = copy_node (value); |
6400 | TREE_OVERFLOW (value) = TREE_OVERFLOW (ovalue); |
6401 | } |
6402 | } |
6403 | else if (TREE_OVERFLOW (value)) |
6404 | /* Reset VALUE's overflow flags, ensuring constant sharing. */ |
6405 | value = wide_int_to_tree (TREE_TYPE (value), cst: wi::to_wide (t: value)); |
6406 | } |
6407 | } |
6408 | |
6409 | /* Don't let a cast be an lvalue. */ |
6410 | if (lvalue_p (ref: value)) |
6411 | value = non_lvalue_loc (loc, value); |
6412 | |
6413 | /* Don't allow the results of casting to floating-point or complex |
6414 | types be confused with actual constants, or casts involving |
6415 | integer and pointer types other than direct integer-to-integer |
6416 | and integer-to-pointer be confused with integer constant |
6417 | expressions and null pointer constants. */ |
6418 | if (TREE_CODE (value) == REAL_CST |
6419 | || TREE_CODE (value) == COMPLEX_CST |
6420 | || (TREE_CODE (value) == INTEGER_CST |
6421 | && !((TREE_CODE (expr) == INTEGER_CST |
6422 | && INTEGRAL_TYPE_P (TREE_TYPE (expr))) |
6423 | || TREE_CODE (expr) == REAL_CST |
6424 | || TREE_CODE (expr) == COMPLEX_CST))) |
6425 | value = build1 (NOP_EXPR, type, value); |
6426 | |
6427 | /* If the expression has integer operands and so can occur in an |
6428 | unevaluated part of an integer constant expression, ensure the |
6429 | return value reflects this. */ |
6430 | if (int_operands |
6431 | && INTEGRAL_TYPE_P (type) |
6432 | && value != error_mark_node |
6433 | && !EXPR_INT_CONST_OPERANDS (value)) |
6434 | value = note_integer_operands (expr: value); |
6435 | |
6436 | protected_set_expr_location (value, loc); |
6437 | return value; |
6438 | } |
6439 | |
6440 | /* Interpret a cast of expression EXPR to type TYPE. LOC is the |
6441 | location of the open paren of the cast, or the position of the cast |
6442 | expr. */ |
6443 | tree |
6444 | c_cast_expr (location_t loc, struct c_type_name *type_name, tree expr) |
6445 | { |
6446 | tree type; |
6447 | tree type_expr = NULL_TREE; |
6448 | bool type_expr_const = true; |
6449 | tree ret; |
6450 | int saved_wsp = warn_strict_prototypes; |
6451 | |
6452 | /* This avoids warnings about unprototyped casts on |
6453 | integers. E.g. "#define SIG_DFL (void(*)())0". */ |
6454 | if (TREE_CODE (expr) == INTEGER_CST) |
6455 | warn_strict_prototypes = 0; |
6456 | type = groktypename (type_name, &type_expr, &type_expr_const); |
6457 | warn_strict_prototypes = saved_wsp; |
6458 | |
6459 | if (TREE_CODE (expr) == ADDR_EXPR && !VOID_TYPE_P (type) |
6460 | && reject_gcc_builtin (expr)) |
6461 | return error_mark_node; |
6462 | |
6463 | ret = build_c_cast (loc, type, expr); |
6464 | if (type_expr) |
6465 | { |
6466 | bool inner_expr_const = true; |
6467 | ret = c_fully_fold (ret, require_constant_value, &inner_expr_const); |
6468 | ret = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (ret), type_expr, ret); |
6469 | C_MAYBE_CONST_EXPR_NON_CONST (ret) = !(type_expr_const |
6470 | && inner_expr_const); |
6471 | SET_EXPR_LOCATION (ret, loc); |
6472 | } |
6473 | |
6474 | if (!EXPR_HAS_LOCATION (ret)) |
6475 | protected_set_expr_location (ret, loc); |
6476 | |
6477 | /* C++ does not permits types to be defined in a cast, but it |
6478 | allows references to incomplete types. */ |
6479 | if (warn_cxx_compat && type_name->specs->typespec_kind == ctsk_tagdef) |
6480 | warning_at (loc, OPT_Wc___compat, |
6481 | "defining a type in a cast is invalid in C++" ); |
6482 | |
6483 | return ret; |
6484 | } |
6485 | |
6486 | /* Build an assignment expression of lvalue LHS from value RHS. |
6487 | If LHS_ORIGTYPE is not NULL, it is the original type of LHS, which |
6488 | may differ from TREE_TYPE (LHS) for an enum bitfield. |
6489 | MODIFYCODE is the code for a binary operator that we use |
6490 | to combine the old value of LHS with RHS to get the new value. |
6491 | Or else MODIFYCODE is NOP_EXPR meaning do a simple assignment. |
6492 | If RHS_ORIGTYPE is not NULL_TREE, it is the original type of RHS, |
6493 | which may differ from TREE_TYPE (RHS) for an enum value. |
6494 | |
6495 | LOCATION is the location of the MODIFYCODE operator. |
6496 | RHS_LOC is the location of the RHS. */ |
6497 | |
6498 | tree |
6499 | build_modify_expr (location_t location, tree lhs, tree lhs_origtype, |
6500 | enum tree_code modifycode, |
6501 | location_t rhs_loc, tree rhs, tree rhs_origtype) |
6502 | { |
6503 | tree result; |
6504 | tree newrhs; |
6505 | tree rhseval = NULL_TREE; |
6506 | tree lhstype = TREE_TYPE (lhs); |
6507 | tree olhstype = lhstype; |
6508 | bool npc; |
6509 | bool is_atomic_op; |
6510 | |
6511 | /* Types that aren't fully specified cannot be used in assignments. */ |
6512 | lhs = require_complete_type (loc: location, value: lhs); |
6513 | |
6514 | /* Avoid duplicate error messages from operands that had errors. */ |
6515 | if (TREE_CODE (lhs) == ERROR_MARK || TREE_CODE (rhs) == ERROR_MARK) |
6516 | return error_mark_node; |
6517 | |
6518 | /* Ensure an error for assigning a non-lvalue array to an array in |
6519 | C90. */ |
6520 | if (TREE_CODE (lhstype) == ARRAY_TYPE) |
6521 | { |
6522 | error_at (location, "assignment to expression with array type" ); |
6523 | return error_mark_node; |
6524 | } |
6525 | |
6526 | /* For ObjC properties, defer this check. */ |
6527 | if (!objc_is_property_ref (lhs) && !lvalue_or_else (loc: location, ref: lhs, use: lv_assign)) |
6528 | return error_mark_node; |
6529 | |
6530 | is_atomic_op = really_atomic_lvalue (expr: lhs); |
6531 | |
6532 | newrhs = rhs; |
6533 | |
6534 | if (TREE_CODE (lhs) == C_MAYBE_CONST_EXPR) |
6535 | { |
6536 | tree inner = build_modify_expr (location, C_MAYBE_CONST_EXPR_EXPR (lhs), |
6537 | lhs_origtype, modifycode, rhs_loc, rhs, |
6538 | rhs_origtype); |
6539 | if (inner == error_mark_node) |
6540 | return error_mark_node; |
6541 | result = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (inner), |
6542 | C_MAYBE_CONST_EXPR_PRE (lhs), inner); |
6543 | gcc_assert (!C_MAYBE_CONST_EXPR_INT_OPERANDS (lhs)); |
6544 | C_MAYBE_CONST_EXPR_NON_CONST (result) = 1; |
6545 | protected_set_expr_location (result, location); |
6546 | return result; |
6547 | } |
6548 | |
6549 | /* If a binary op has been requested, combine the old LHS value with the RHS |
6550 | producing the value we should actually store into the LHS. */ |
6551 | |
6552 | if (modifycode != NOP_EXPR) |
6553 | { |
6554 | lhs = c_fully_fold (lhs, false, NULL, true); |
6555 | lhs = stabilize_reference (lhs); |
6556 | |
6557 | /* Construct the RHS for any non-atomic compound assignemnt. */ |
6558 | if (!is_atomic_op) |
6559 | { |
6560 | /* If in LHS op= RHS the RHS has side-effects, ensure they |
6561 | are preevaluated before the rest of the assignment expression's |
6562 | side-effects, because RHS could contain e.g. function calls |
6563 | that modify LHS. */ |
6564 | if (TREE_SIDE_EFFECTS (rhs)) |
6565 | { |
6566 | if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR) |
6567 | newrhs = save_expr (TREE_OPERAND (rhs, 0)); |
6568 | else |
6569 | newrhs = save_expr (rhs); |
6570 | rhseval = newrhs; |
6571 | if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR) |
6572 | newrhs = build1 (EXCESS_PRECISION_EXPR, TREE_TYPE (rhs), |
6573 | newrhs); |
6574 | } |
6575 | newrhs = build_binary_op (location, |
6576 | modifycode, lhs, newrhs, true); |
6577 | |
6578 | /* The original type of the right hand side is no longer |
6579 | meaningful. */ |
6580 | rhs_origtype = NULL_TREE; |
6581 | } |
6582 | } |
6583 | |
6584 | if (c_dialect_objc ()) |
6585 | { |
6586 | /* Check if we are modifying an Objective-C property reference; |
6587 | if so, we need to generate setter calls. */ |
6588 | if (TREE_CODE (newrhs) == EXCESS_PRECISION_EXPR) |
6589 | result = objc_maybe_build_modify_expr (lhs, TREE_OPERAND (newrhs, 0)); |
6590 | else |
6591 | result = objc_maybe_build_modify_expr (lhs, newrhs); |
6592 | if (result) |
6593 | goto return_result; |
6594 | |
6595 | /* Else, do the check that we postponed for Objective-C. */ |
6596 | if (!lvalue_or_else (loc: location, ref: lhs, use: lv_assign)) |
6597 | return error_mark_node; |
6598 | } |
6599 | |
6600 | /* Give an error for storing in something that is 'const'. */ |
6601 | |
6602 | if (TYPE_READONLY (lhstype) |
6603 | || (RECORD_OR_UNION_TYPE_P (lhstype) |
6604 | && C_TYPE_FIELDS_READONLY (lhstype))) |
6605 | { |
6606 | readonly_error (location, lhs, lv_assign); |
6607 | return error_mark_node; |
6608 | } |
6609 | else if (TREE_READONLY (lhs)) |
6610 | readonly_warning (arg: lhs, use: lv_assign); |
6611 | |
6612 | /* If storing into a structure or union member, |
6613 | it has probably been given type `int'. |
6614 | Compute the type that would go with |
6615 | the actual amount of storage the member occupies. */ |
6616 | |
6617 | if (TREE_CODE (lhs) == COMPONENT_REF |
6618 | && (TREE_CODE (lhstype) == INTEGER_TYPE |
6619 | || TREE_CODE (lhstype) == BOOLEAN_TYPE |
6620 | || SCALAR_FLOAT_TYPE_P (lhstype) |
6621 | || TREE_CODE (lhstype) == ENUMERAL_TYPE)) |
6622 | lhstype = TREE_TYPE (get_unwidened (lhs, 0)); |
6623 | |
6624 | /* If storing in a field that is in actuality a short or narrower than one, |
6625 | we must store in the field in its actual type. */ |
6626 | |
6627 | if (lhstype != TREE_TYPE (lhs)) |
6628 | { |
6629 | lhs = copy_node (lhs); |
6630 | TREE_TYPE (lhs) = lhstype; |
6631 | } |
6632 | |
6633 | /* Issue -Wc++-compat warnings about an assignment to an enum type |
6634 | when LHS does not have its original type. This happens for, |
6635 | e.g., an enum bitfield in a struct. */ |
6636 | if (warn_cxx_compat |
6637 | && lhs_origtype != NULL_TREE |
6638 | && lhs_origtype != lhstype |
6639 | && TREE_CODE (lhs_origtype) == ENUMERAL_TYPE) |
6640 | { |
6641 | tree checktype = (rhs_origtype != NULL_TREE |
6642 | ? rhs_origtype |
6643 | : TREE_TYPE (rhs)); |
6644 | if (checktype != error_mark_node |
6645 | && (TYPE_MAIN_VARIANT (checktype) != TYPE_MAIN_VARIANT (lhs_origtype) |
6646 | || (is_atomic_op && modifycode != NOP_EXPR))) |
6647 | warning_at (location, OPT_Wc___compat, |
6648 | "enum conversion in assignment is invalid in C++" ); |
6649 | } |
6650 | |
6651 | /* Remove qualifiers. */ |
6652 | lhstype = build_qualified_type (lhstype, TYPE_UNQUALIFIED); |
6653 | olhstype = build_qualified_type (olhstype, TYPE_UNQUALIFIED); |
6654 | |
6655 | /* Convert new value to destination type. Fold it first, then |
6656 | restore any excess precision information, for the sake of |
6657 | conversion warnings. */ |
6658 | |
6659 | if (!(is_atomic_op && modifycode != NOP_EXPR)) |
6660 | { |
6661 | tree rhs_semantic_type = NULL_TREE; |
6662 | if (!c_in_omp_for) |
6663 | { |
6664 | if (TREE_CODE (newrhs) == EXCESS_PRECISION_EXPR) |
6665 | { |
6666 | rhs_semantic_type = TREE_TYPE (newrhs); |
6667 | newrhs = TREE_OPERAND (newrhs, 0); |
6668 | } |
6669 | npc = null_pointer_constant_p (expr: newrhs); |
6670 | newrhs = c_fully_fold (newrhs, false, NULL); |
6671 | if (rhs_semantic_type) |
6672 | newrhs = build1 (EXCESS_PRECISION_EXPR, rhs_semantic_type, newrhs); |
6673 | } |
6674 | else |
6675 | npc = null_pointer_constant_p (expr: newrhs); |
6676 | newrhs = convert_for_assignment (location, rhs_loc, lhstype, newrhs, |
6677 | rhs_origtype, ic_assign, npc, |
6678 | NULL_TREE, NULL_TREE, 0); |
6679 | if (TREE_CODE (newrhs) == ERROR_MARK) |
6680 | return error_mark_node; |
6681 | } |
6682 | |
6683 | /* Emit ObjC write barrier, if necessary. */ |
6684 | if (c_dialect_objc () && flag_objc_gc) |
6685 | { |
6686 | result = objc_generate_write_barrier (lhs, modifycode, newrhs); |
6687 | if (result) |
6688 | { |
6689 | protected_set_expr_location (result, location); |
6690 | goto return_result; |
6691 | } |
6692 | } |
6693 | |
6694 | /* Scan operands. */ |
6695 | |
6696 | if (is_atomic_op) |
6697 | result = build_atomic_assign (loc: location, lhs, modifycode, rhs: newrhs, return_old_p: false); |
6698 | else |
6699 | { |
6700 | result = build2 (MODIFY_EXPR, lhstype, lhs, newrhs); |
6701 | TREE_SIDE_EFFECTS (result) = 1; |
6702 | protected_set_expr_location (result, location); |
6703 | } |
6704 | |
6705 | /* If we got the LHS in a different type for storing in, |
6706 | convert the result back to the nominal type of LHS |
6707 | so that the value we return always has the same type |
6708 | as the LHS argument. */ |
6709 | |
6710 | if (olhstype == TREE_TYPE (result)) |
6711 | goto return_result; |
6712 | |
6713 | result = convert_for_assignment (location, rhs_loc, olhstype, result, |
6714 | rhs_origtype, ic_assign, false, NULL_TREE, |
6715 | NULL_TREE, 0); |
6716 | protected_set_expr_location (result, location); |
6717 | |
6718 | return_result: |
6719 | if (rhseval) |
6720 | result = build2 (COMPOUND_EXPR, TREE_TYPE (result), rhseval, result); |
6721 | return result; |
6722 | } |
6723 | |
6724 | /* Return whether STRUCT_TYPE has an anonymous field with type TYPE. |
6725 | This is used to implement -fplan9-extensions. */ |
6726 | |
6727 | static bool |
6728 | find_anonymous_field_with_type (tree struct_type, tree type) |
6729 | { |
6730 | tree field; |
6731 | bool found; |
6732 | |
6733 | gcc_assert (RECORD_OR_UNION_TYPE_P (struct_type)); |
6734 | found = false; |
6735 | for (field = TYPE_FIELDS (struct_type); |
6736 | field != NULL_TREE; |
6737 | field = TREE_CHAIN (field)) |
6738 | { |
6739 | tree fieldtype = (TYPE_ATOMIC (TREE_TYPE (field)) |
6740 | ? c_build_qualified_type (TREE_TYPE (field), |
6741 | TYPE_QUAL_ATOMIC) |
6742 | : TYPE_MAIN_VARIANT (TREE_TYPE (field))); |
6743 | if (DECL_NAME (field) == NULL |
6744 | && comptypes (type1: type, type2: fieldtype)) |
6745 | { |
6746 | if (found) |
6747 | return false; |
6748 | found = true; |
6749 | } |
6750 | else if (DECL_NAME (field) == NULL |
6751 | && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field)) |
6752 | && find_anonymous_field_with_type (TREE_TYPE (field), type)) |
6753 | { |
6754 | if (found) |
6755 | return false; |
6756 | found = true; |
6757 | } |
6758 | } |
6759 | return found; |
6760 | } |
6761 | |
6762 | /* RHS is an expression whose type is pointer to struct. If there is |
6763 | an anonymous field in RHS with type TYPE, then return a pointer to |
6764 | that field in RHS. This is used with -fplan9-extensions. This |
6765 | returns NULL if no conversion could be found. */ |
6766 | |
6767 | static tree |
6768 | convert_to_anonymous_field (location_t location, tree type, tree rhs) |
6769 | { |
6770 | tree rhs_struct_type, lhs_main_type; |
6771 | tree field, found_field; |
6772 | bool found_sub_field; |
6773 | tree ret; |
6774 | |
6775 | gcc_assert (POINTER_TYPE_P (TREE_TYPE (rhs))); |
6776 | rhs_struct_type = TREE_TYPE (TREE_TYPE (rhs)); |
6777 | gcc_assert (RECORD_OR_UNION_TYPE_P (rhs_struct_type)); |
6778 | |
6779 | gcc_assert (POINTER_TYPE_P (type)); |
6780 | lhs_main_type = (TYPE_ATOMIC (TREE_TYPE (type)) |
6781 | ? c_build_qualified_type (TREE_TYPE (type), |
6782 | TYPE_QUAL_ATOMIC) |
6783 | : TYPE_MAIN_VARIANT (TREE_TYPE (type))); |
6784 | |
6785 | found_field = NULL_TREE; |
6786 | found_sub_field = false; |
6787 | for (field = TYPE_FIELDS (rhs_struct_type); |
6788 | field != NULL_TREE; |
6789 | field = TREE_CHAIN (field)) |
6790 | { |
6791 | if (DECL_NAME (field) != NULL_TREE |
6792 | || !RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))) |
6793 | continue; |
6794 | tree fieldtype = (TYPE_ATOMIC (TREE_TYPE (field)) |
6795 | ? c_build_qualified_type (TREE_TYPE (field), |
6796 | TYPE_QUAL_ATOMIC) |
6797 | : TYPE_MAIN_VARIANT (TREE_TYPE (field))); |
6798 | if (comptypes (type1: lhs_main_type, type2: fieldtype)) |
6799 | { |
6800 | if (found_field != NULL_TREE) |
6801 | return NULL_TREE; |
6802 | found_field = field; |
6803 | } |
6804 | else if (find_anonymous_field_with_type (TREE_TYPE (field), |
6805 | type: lhs_main_type)) |
6806 | { |
6807 | if (found_field != NULL_TREE) |
6808 | return NULL_TREE; |
6809 | found_field = field; |
6810 | found_sub_field = true; |
6811 | } |
6812 | } |
6813 | |
6814 | if (found_field == NULL_TREE) |
6815 | return NULL_TREE; |
6816 | |
6817 | ret = fold_build3_loc (location, COMPONENT_REF, TREE_TYPE (found_field), |
6818 | build_fold_indirect_ref (rhs), found_field, |
6819 | NULL_TREE); |
6820 | ret = build_fold_addr_expr_loc (location, ret); |
6821 | |
6822 | if (found_sub_field) |
6823 | { |
6824 | ret = convert_to_anonymous_field (location, type, rhs: ret); |
6825 | gcc_assert (ret != NULL_TREE); |
6826 | } |
6827 | |
6828 | return ret; |
6829 | } |
6830 | |
6831 | /* Issue an error message for a bad initializer component. |
6832 | GMSGID identifies the message. |
6833 | The component name is taken from the spelling stack. */ |
6834 | |
6835 | static void ATTRIBUTE_GCC_DIAG (2,0) |
6836 | error_init (location_t loc, const char *gmsgid, ...) |
6837 | { |
6838 | char *ofwhat; |
6839 | |
6840 | auto_diagnostic_group d; |
6841 | |
6842 | /* The gmsgid may be a format string with %< and %>. */ |
6843 | va_list ap; |
6844 | va_start (ap, gmsgid); |
6845 | bool warned = emit_diagnostic_valist (DK_ERROR, loc, -1, gmsgid, &ap); |
6846 | va_end (ap); |
6847 | |
6848 | ofwhat = print_spelling ((char *) alloca (spelling_length () + 1)); |
6849 | if (*ofwhat && warned) |
6850 | inform (loc, "(near initialization for %qs)" , ofwhat); |
6851 | } |
6852 | |
6853 | /* Used to implement pedwarn_init and permerror_init. */ |
6854 | |
6855 | static void ATTRIBUTE_GCC_DIAG (3,0) |
6856 | pedwarn_permerror_init (location_t loc, int opt, const char *gmsgid, |
6857 | va_list *ap, diagnostic_t kind) |
6858 | { |
6859 | /* Use the location where a macro was expanded rather than where |
6860 | it was defined to make sure macros defined in system headers |
6861 | but used incorrectly elsewhere are diagnosed. */ |
6862 | location_t exploc = expansion_point_location_if_in_system_header (loc); |
6863 | auto_diagnostic_group d; |
6864 | bool warned = emit_diagnostic_valist (kind, exploc, opt, gmsgid, ap); |
6865 | char *ofwhat = print_spelling ((char *) alloca (spelling_length () + 1)); |
6866 | if (*ofwhat && warned) |
6867 | inform (exploc, "(near initialization for %qs)" , ofwhat); |
6868 | } |
6869 | |
6870 | /* Issue a pedantic warning for a bad initializer component. OPT is |
6871 | the option OPT_* (from options.h) controlling this warning or 0 if |
6872 | it is unconditionally given. GMSGID identifies the message. The |
6873 | component name is taken from the spelling stack. */ |
6874 | |
6875 | static void ATTRIBUTE_GCC_DIAG (3,0) |
6876 | pedwarn_init (location_t loc, int opt, const char *gmsgid, ...) |
6877 | { |
6878 | va_list ap; |
6879 | va_start (ap, gmsgid); |
6880 | pedwarn_permerror_init (loc, opt, gmsgid, ap: &ap, kind: DK_PEDWARN); |
6881 | va_end (ap); |
6882 | } |
6883 | |
6884 | /* Like pedwarn_init, but issue a permerror. */ |
6885 | |
6886 | static void ATTRIBUTE_GCC_DIAG (3,0) |
6887 | permerror_init (location_t loc, int opt, const char *gmsgid, ...) |
6888 | { |
6889 | va_list ap; |
6890 | va_start (ap, gmsgid); |
6891 | pedwarn_permerror_init (loc, opt, gmsgid, ap: &ap, kind: DK_PERMERROR); |
6892 | va_end (ap); |
6893 | } |
6894 | |
6895 | /* Issue a warning for a bad initializer component. |
6896 | |
6897 | OPT is the OPT_W* value corresponding to the warning option that |
6898 | controls this warning. GMSGID identifies the message. The |
6899 | component name is taken from the spelling stack. */ |
6900 | |
6901 | static void |
6902 | warning_init (location_t loc, int opt, const char *gmsgid) |
6903 | { |
6904 | char *ofwhat; |
6905 | bool warned; |
6906 | |
6907 | auto_diagnostic_group d; |
6908 | |
6909 | /* Use the location where a macro was expanded rather than where |
6910 | it was defined to make sure macros defined in system headers |
6911 | but used incorrectly elsewhere are diagnosed. */ |
6912 | location_t exploc = expansion_point_location_if_in_system_header (loc); |
6913 | |
6914 | /* The gmsgid may be a format string with %< and %>. */ |
6915 | warned = warning_at (exploc, opt, gmsgid); |
6916 | ofwhat = print_spelling ((char *) alloca (spelling_length () + 1)); |
6917 | if (*ofwhat && warned) |
6918 | inform (exploc, "(near initialization for %qs)" , ofwhat); |
6919 | } |
6920 | |
6921 | /* If TYPE is an array type and EXPR is a parenthesized string |
6922 | constant, warn if pedantic that EXPR is being used to initialize an |
6923 | object of type TYPE. */ |
6924 | |
6925 | void |
6926 | maybe_warn_string_init (location_t loc, tree type, struct c_expr expr) |
6927 | { |
6928 | if (pedantic |
6929 | && TREE_CODE (type) == ARRAY_TYPE |
6930 | && TREE_CODE (expr.value) == STRING_CST |
6931 | && expr.original_code != STRING_CST) |
6932 | pedwarn_init (loc, opt: OPT_Wpedantic, |
6933 | gmsgid: "array initialized from parenthesized string constant" ); |
6934 | } |
6935 | |
6936 | /* Attempt to locate the parameter with the given index within FNDECL, |
6937 | returning DECL_SOURCE_LOCATION (FNDECL) if it can't be found. */ |
6938 | |
6939 | static location_t |
6940 | get_fndecl_argument_location (tree fndecl, int argnum) |
6941 | { |
6942 | int i; |
6943 | tree param; |
6944 | |
6945 | /* Locate param by index within DECL_ARGUMENTS (fndecl). */ |
6946 | for (i = 0, param = DECL_ARGUMENTS (fndecl); |
6947 | i < argnum && param; |
6948 | i++, param = TREE_CHAIN (param)) |
6949 | ; |
6950 | |
6951 | /* If something went wrong (e.g. if we have a builtin and thus no arguments), |
6952 | return DECL_SOURCE_LOCATION (FNDECL). */ |
6953 | if (param == NULL) |
6954 | return DECL_SOURCE_LOCATION (fndecl); |
6955 | |
6956 | return DECL_SOURCE_LOCATION (param); |
6957 | } |
6958 | |
6959 | /* Issue a note about a mismatching argument for parameter PARMNUM |
6960 | to FUNDECL, for types EXPECTED_TYPE and ACTUAL_TYPE. |
6961 | Attempt to issue the note at the pertinent parameter of the decl; |
6962 | failing that issue it at the location of FUNDECL; failing that |
6963 | issue it at PLOC. */ |
6964 | |
6965 | static void |
6966 | inform_for_arg (tree fundecl, location_t ploc, int parmnum, |
6967 | tree expected_type, tree actual_type) |
6968 | { |
6969 | location_t loc; |
6970 | if (fundecl && !DECL_IS_UNDECLARED_BUILTIN (fundecl)) |
6971 | loc = get_fndecl_argument_location (fndecl: fundecl, argnum: parmnum - 1); |
6972 | else |
6973 | loc = ploc; |
6974 | |
6975 | inform (loc, |
6976 | "expected %qT but argument is of type %qT" , |
6977 | expected_type, actual_type); |
6978 | } |
6979 | |
6980 | /* Issue a warning when an argument of ARGTYPE is passed to a built-in |
6981 | function FUNDECL declared without prototype to parameter PARMNUM of |
6982 | PARMTYPE when ARGTYPE does not promote to PARMTYPE. */ |
6983 | |
6984 | static void |
6985 | maybe_warn_builtin_no_proto_arg (location_t loc, tree fundecl, int parmnum, |
6986 | tree parmtype, tree argtype) |
6987 | { |
6988 | tree_code parmcode = TREE_CODE (parmtype); |
6989 | tree_code argcode = TREE_CODE (argtype); |
6990 | tree promoted = c_type_promotes_to (type: argtype); |
6991 | |
6992 | /* Avoid warning for enum arguments that promote to an integer type |
6993 | of the same size/mode. */ |
6994 | if (parmcode == INTEGER_TYPE |
6995 | && argcode == ENUMERAL_TYPE |
6996 | && TYPE_MODE (parmtype) == TYPE_MODE (argtype)) |
6997 | return; |
6998 | |
6999 | if ((parmcode == argcode |
7000 | || (parmcode == INTEGER_TYPE |
7001 | && argcode == ENUMERAL_TYPE)) |
7002 | && TYPE_MAIN_VARIANT (parmtype) == TYPE_MAIN_VARIANT (promoted)) |
7003 | return; |
7004 | |
7005 | /* This diagnoses even signed/unsigned mismatches. Those might be |
7006 | safe in many cases but GCC may emit suboptimal code for them so |
7007 | warning on those cases drives efficiency improvements. */ |
7008 | if (warning_at (loc, OPT_Wbuiltin_declaration_mismatch, |
7009 | TYPE_MAIN_VARIANT (promoted) == argtype |
7010 | ? G_("%qD argument %d type is %qT where %qT is expected " |
7011 | "in a call to built-in function declared without " |
7012 | "prototype" ) |
7013 | : G_("%qD argument %d promotes to %qT where %qT is expected " |
7014 | "in a call to built-in function declared without " |
7015 | "prototype" ), |
7016 | fundecl, parmnum, promoted, parmtype)) |
7017 | inform (DECL_SOURCE_LOCATION (fundecl), |
7018 | "built-in %qD declared here" , |
7019 | fundecl); |
7020 | } |
7021 | |
7022 | /* Convert value RHS to type TYPE as preparation for an assignment to |
7023 | an lvalue of type TYPE. If ORIGTYPE is not NULL_TREE, it is the |
7024 | original type of RHS; this differs from TREE_TYPE (RHS) for enum |
7025 | types. NULL_POINTER_CONSTANT says whether RHS was a null pointer |
7026 | constant before any folding. |
7027 | The real work of conversion is done by `convert'. |
7028 | The purpose of this function is to generate error messages |
7029 | for assignments that are not allowed in C. |
7030 | ERRTYPE says whether it is argument passing, assignment, |
7031 | initialization or return. |
7032 | |
7033 | In the following example, '~' denotes where EXPR_LOC and '^' where |
7034 | LOCATION point to: |
7035 | |
7036 | f (var); [ic_argpass] |
7037 | ^ ~~~ |
7038 | x = var; [ic_assign] |
7039 | ^ ~~~; |
7040 | int x = var; [ic_init] |
7041 | ^^^ |
7042 | return x; [ic_return] |
7043 | ^ |
7044 | |
7045 | FUNCTION is a tree for the function being called. |
7046 | PARMNUM is the number of the argument, for printing in error messages. |
7047 | WARNOPT may be set to a warning option to issue the corresponding warning |
7048 | rather than an error for invalid conversions. Used for calls to built-in |
7049 | functions declared without a prototype. */ |
7050 | |
7051 | static tree |
7052 | convert_for_assignment (location_t location, location_t expr_loc, tree type, |
7053 | tree rhs, tree origtype, enum impl_conv errtype, |
7054 | bool null_pointer_constant, tree fundecl, |
7055 | tree function, int parmnum, int warnopt /* = 0 */) |
7056 | { |
7057 | enum tree_code codel = TREE_CODE (type); |
7058 | tree orig_rhs = rhs; |
7059 | tree rhstype; |
7060 | enum tree_code coder; |
7061 | tree rname = NULL_TREE; |
7062 | bool objc_ok = false; |
7063 | |
7064 | /* Use the expansion point location to handle cases such as user's |
7065 | function returning a wrong-type macro defined in a system header. */ |
7066 | location = expansion_point_location_if_in_system_header (location); |
7067 | |
7068 | if (errtype == ic_argpass) |
7069 | { |
7070 | tree selector; |
7071 | /* Change pointer to function to the function itself for |
7072 | diagnostics. */ |
7073 | if (TREE_CODE (function) == ADDR_EXPR |
7074 | && TREE_CODE (TREE_OPERAND (function, 0)) == FUNCTION_DECL) |
7075 | function = TREE_OPERAND (function, 0); |
7076 | |
7077 | /* Handle an ObjC selector specially for diagnostics. */ |
7078 | selector = objc_message_selector (); |
7079 | rname = function; |
7080 | if (selector && parmnum > 2) |
7081 | { |
7082 | rname = selector; |
7083 | parmnum -= 2; |
7084 | } |
7085 | } |
7086 | |
7087 | /* This macro is used to emit diagnostics to ensure that all format |
7088 | strings are complete sentences, visible to gettext and checked at |
7089 | compile time. */ |
7090 | #define PEDWARN_FOR_ASSIGNMENT(LOCATION, PLOC, OPT, AR, AS, IN, RE) \ |
7091 | do { \ |
7092 | switch (errtype) \ |
7093 | { \ |
7094 | case ic_argpass: \ |
7095 | { \ |
7096 | auto_diagnostic_group d; \ |
7097 | if (pedwarn (PLOC, OPT, AR, parmnum, rname)) \ |
7098 | inform_for_arg (fundecl, (PLOC), parmnum, type, rhstype); \ |
7099 | } \ |
7100 | break; \ |
7101 | case ic_assign: \ |
7102 | pedwarn (LOCATION, OPT, AS); \ |
7103 | break; \ |
7104 | case ic_init: \ |
7105 | case ic_init_const: \ |
7106 | pedwarn_init (LOCATION, OPT, IN); \ |
7107 | break; \ |
7108 | case ic_return: \ |
7109 | pedwarn (LOCATION, OPT, RE); \ |
7110 | break; \ |
7111 | default: \ |
7112 | gcc_unreachable (); \ |
7113 | } \ |
7114 | } while (0) |
7115 | |
7116 | /* This macro is used to emit diagnostics to ensure that all format |
7117 | strings are complete sentences, visible to gettext and checked at |
7118 | compile time. It can be called with 'pedwarn' or 'warning_at'. */ |
7119 | #define WARNING_FOR_QUALIFIERS(PEDWARN, LOCATION, PLOC, OPT, AR, AS, IN, RE, QUALS) \ |
7120 | do { \ |
7121 | switch (errtype) \ |
7122 | { \ |
7123 | case ic_argpass: \ |
7124 | { \ |
7125 | auto_diagnostic_group d; \ |
7126 | if (PEDWARN) { \ |
7127 | if (pedwarn (PLOC, OPT, AR, parmnum, rname, QUALS)) \ |
7128 | inform_for_arg (fundecl, (PLOC), parmnum, type, rhstype); \ |
7129 | } else { \ |
7130 | if (warning_at (PLOC, OPT, AR, parmnum, rname, QUALS)) \ |
7131 | inform_for_arg (fundecl, (PLOC), parmnum, type, rhstype); \ |
7132 | } \ |
7133 | } \ |
7134 | break; \ |
7135 | case ic_assign: \ |
7136 | if (PEDWARN) \ |
7137 | pedwarn (LOCATION, OPT, AS, QUALS); \ |
7138 | else \ |
7139 | warning_at (LOCATION, OPT, AS, QUALS); \ |
7140 | break; \ |
7141 | case ic_init: \ |
7142 | case ic_init_const: \ |
7143 | if (PEDWARN) \ |
7144 | pedwarn (LOCATION, OPT, IN, QUALS); \ |
7145 | else \ |
7146 | warning_at (LOCATION, OPT, IN, QUALS); \ |
7147 | break; \ |
7148 | case ic_return: \ |
7149 | if (PEDWARN) \ |
7150 | pedwarn (LOCATION, OPT, RE, QUALS); \ |
7151 | else \ |
7152 | warning_at (LOCATION, OPT, RE, QUALS); \ |
7153 | break; \ |
7154 | default: \ |
7155 | gcc_unreachable (); \ |
7156 | } \ |
7157 | } while (0) |
7158 | |
7159 | /* This macro is used to emit diagnostics to ensure that all format |
7160 | strings are complete sentences, visible to gettext and checked at |
7161 | compile time. It is the same as PEDWARN_FOR_ASSIGNMENT but with an |
7162 | extra parameter to enumerate qualifiers. */ |
7163 | #define PEDWARN_FOR_QUALIFIERS(LOCATION, PLOC, OPT, AR, AS, IN, RE, QUALS) \ |
7164 | WARNING_FOR_QUALIFIERS (true, LOCATION, PLOC, OPT, AR, AS, IN, RE, QUALS) |
7165 | |
7166 | |
7167 | if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR) |
7168 | rhs = TREE_OPERAND (rhs, 0); |
7169 | |
7170 | rhstype = TREE_TYPE (rhs); |
7171 | coder = TREE_CODE (rhstype); |
7172 | |
7173 | if (coder == ERROR_MARK) |
7174 | return error_mark_node; |
7175 | |
7176 | if (c_dialect_objc ()) |
7177 | { |
7178 | int parmno; |
7179 | |
7180 | switch (errtype) |
7181 | { |
7182 | case ic_return: |
7183 | parmno = 0; |
7184 | break; |
7185 | |
7186 | case ic_assign: |
7187 | parmno = -1; |
7188 | break; |
7189 | |
7190 | case ic_init: |
7191 | case ic_init_const: |
7192 | parmno = -2; |
7193 | break; |
7194 | |
7195 | default: |
7196 | parmno = parmnum; |
7197 | break; |
7198 | } |
7199 | |
7200 | objc_ok = objc_compare_types (type, rhstype, parmno, rname); |
7201 | } |
7202 | |
7203 | if (warn_cxx_compat) |
7204 | { |
7205 | tree checktype = origtype != NULL_TREE ? origtype : rhstype; |
7206 | if (checktype != error_mark_node |
7207 | && TREE_CODE (type) == ENUMERAL_TYPE |
7208 | && TYPE_MAIN_VARIANT (checktype) != TYPE_MAIN_VARIANT (type)) |
7209 | switch (errtype) |
7210 | { |
7211 | case ic_argpass: |
7212 | if (pedwarn (expr_loc, OPT_Wc___compat, "enum conversion when " |
7213 | "passing argument %d of %qE is invalid in C++" , |
7214 | parmnum, rname)) |
7215 | inform ((fundecl && !DECL_IS_UNDECLARED_BUILTIN (fundecl)) |
7216 | ? DECL_SOURCE_LOCATION (fundecl) : expr_loc, |
7217 | "expected %qT but argument is of type %qT" , |
7218 | type, rhstype); |
7219 | break; |
7220 | case ic_assign: |
7221 | pedwarn (location, OPT_Wc___compat, "enum conversion from %qT to " |
7222 | "%qT in assignment is invalid in C++" , rhstype, type); |
7223 | break; |
7224 | case ic_init: |
7225 | case ic_init_const: |
7226 | pedwarn_init (loc: location, opt: OPT_Wc___compat, gmsgid: "enum conversion from " |
7227 | "%qT to %qT in initialization is invalid in C++" , |
7228 | rhstype, type); |
7229 | break; |
7230 | case ic_return: |
7231 | pedwarn (location, OPT_Wc___compat, "enum conversion from %qT to " |
7232 | "%qT in return is invalid in C++" , rhstype, type); |
7233 | break; |
7234 | default: |
7235 | gcc_unreachable (); |
7236 | } |
7237 | } |
7238 | |
7239 | if (warn_enum_conversion) |
7240 | { |
7241 | tree checktype = origtype != NULL_TREE ? origtype : rhstype; |
7242 | if (checktype != error_mark_node |
7243 | && TREE_CODE (checktype) == ENUMERAL_TYPE |
7244 | && TREE_CODE (type) == ENUMERAL_TYPE |
7245 | && !comptypes (TYPE_MAIN_VARIANT (checktype), TYPE_MAIN_VARIANT (type))) |
7246 | { |
7247 | gcc_rich_location loc (location); |
7248 | warning_at (&loc, OPT_Wenum_conversion, |
7249 | "implicit conversion from %qT to %qT" , |
7250 | checktype, type); |
7251 | } |
7252 | } |
7253 | |
7254 | if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (rhstype)) |
7255 | { |
7256 | warn_for_address_of_packed_member (type, orig_rhs); |
7257 | return rhs; |
7258 | } |
7259 | |
7260 | if (coder == VOID_TYPE) |
7261 | { |
7262 | /* Except for passing an argument to an unprototyped function, |
7263 | this is a constraint violation. When passing an argument to |
7264 | an unprototyped function, it is compile-time undefined; |
7265 | making it a constraint in that case was rejected in |
7266 | DR#252. */ |
7267 | const char msg[] = "void value not ignored as it ought to be" ; |
7268 | if (warnopt) |
7269 | warning_at (location, warnopt, msg); |
7270 | else |
7271 | error_at (location, msg); |
7272 | return error_mark_node; |
7273 | } |
7274 | rhs = require_complete_type (loc: location, value: rhs); |
7275 | if (rhs == error_mark_node) |
7276 | return error_mark_node; |
7277 | |
7278 | if (coder == POINTER_TYPE && reject_gcc_builtin (rhs)) |
7279 | return error_mark_node; |
7280 | |
7281 | /* A non-reference type can convert to a reference. This handles |
7282 | va_start, va_copy and possibly port built-ins. */ |
7283 | if (codel == REFERENCE_TYPE && coder != REFERENCE_TYPE) |
7284 | { |
7285 | if (!lvalue_p (ref: rhs)) |
7286 | { |
7287 | const char msg[] = "cannot pass rvalue to reference parameter" ; |
7288 | if (warnopt) |
7289 | warning_at (location, warnopt, msg); |
7290 | else |
7291 | error_at (location, msg); |
7292 | return error_mark_node; |
7293 | } |
7294 | if (!c_mark_addressable (exp: rhs)) |
7295 | return error_mark_node; |
7296 | rhs = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (rhs)), rhs); |
7297 | SET_EXPR_LOCATION (rhs, location); |
7298 | |
7299 | rhs = convert_for_assignment (location, expr_loc, |
7300 | type: build_pointer_type (TREE_TYPE (type)), |
7301 | rhs, origtype, errtype, |
7302 | null_pointer_constant, fundecl, function, |
7303 | parmnum, warnopt); |
7304 | if (rhs == error_mark_node) |
7305 | return error_mark_node; |
7306 | |
7307 | rhs = build1 (NOP_EXPR, type, rhs); |
7308 | SET_EXPR_LOCATION (rhs, location); |
7309 | return rhs; |
7310 | } |
7311 | /* Some types can interconvert without explicit casts. */ |
7312 | else if (codel == VECTOR_TYPE && coder == VECTOR_TYPE |
7313 | && vector_types_convertible_p (t1: type, TREE_TYPE (rhs), emit_lax_note: true)) |
7314 | return convert (type, rhs); |
7315 | /* Arithmetic types all interconvert, and enum is treated like int. */ |
7316 | else if ((codel == INTEGER_TYPE || codel == REAL_TYPE |
7317 | || codel == FIXED_POINT_TYPE |
7318 | || codel == ENUMERAL_TYPE || codel == COMPLEX_TYPE |
7319 | || codel == BOOLEAN_TYPE || codel == BITINT_TYPE) |
7320 | && (coder == INTEGER_TYPE || coder == REAL_TYPE |
7321 | || coder == FIXED_POINT_TYPE |
7322 | || coder == ENUMERAL_TYPE || coder == COMPLEX_TYPE |
7323 | || coder == BOOLEAN_TYPE || coder == BITINT_TYPE)) |
7324 | { |
7325 | if (warnopt && errtype == ic_argpass) |
7326 | maybe_warn_builtin_no_proto_arg (loc: expr_loc, fundecl, parmnum, parmtype: type, |
7327 | argtype: rhstype); |
7328 | |
7329 | bool save = in_late_binary_op; |
7330 | if (C_BOOLEAN_TYPE_P (type) || codel == COMPLEX_TYPE |
7331 | || (coder == REAL_TYPE |
7332 | && (codel == INTEGER_TYPE || codel == ENUMERAL_TYPE) |
7333 | && sanitize_flags_p (flag: SANITIZE_FLOAT_CAST))) |
7334 | in_late_binary_op = true; |
7335 | tree ret = convert_and_check (expr_loc != UNKNOWN_LOCATION |
7336 | ? expr_loc : location, type, orig_rhs, |
7337 | errtype == ic_init_const); |
7338 | in_late_binary_op = save; |
7339 | return ret; |
7340 | } |
7341 | |
7342 | /* Aggregates in different TUs might need conversion. */ |
7343 | if ((codel == RECORD_TYPE || codel == UNION_TYPE) |
7344 | && codel == coder |
7345 | && comptypes (TYPE_MAIN_VARIANT (type), TYPE_MAIN_VARIANT (rhstype))) |
7346 | return convert_and_check (expr_loc != UNKNOWN_LOCATION |
7347 | ? expr_loc : location, type, rhs); |
7348 | |
7349 | /* Conversion to a transparent union or record from its member types. |
7350 | This applies only to function arguments. */ |
7351 | if (((codel == UNION_TYPE || codel == RECORD_TYPE) |
7352 | && TYPE_TRANSPARENT_AGGR (type)) |
7353 | && errtype == ic_argpass) |
7354 | { |
7355 | tree memb, marginal_memb = NULL_TREE; |
7356 | |
7357 | for (memb = TYPE_FIELDS (type); memb ; memb = DECL_CHAIN (memb)) |
7358 | { |
7359 | tree memb_type = TREE_TYPE (memb); |
7360 | |
7361 | if (comptypes (TYPE_MAIN_VARIANT (memb_type), |
7362 | TYPE_MAIN_VARIANT (rhstype))) |
7363 | break; |
7364 | |
7365 | if (TREE_CODE (memb_type) != POINTER_TYPE) |
7366 | continue; |
7367 | |
7368 | if (coder == POINTER_TYPE) |
7369 | { |
7370 | tree ttl = TREE_TYPE (memb_type); |
7371 | tree ttr = TREE_TYPE (rhstype); |
7372 | |
7373 | /* Any non-function converts to a [const][volatile] void * |
7374 | and vice versa; otherwise, targets must be the same. |
7375 | Meanwhile, the lhs target must have all the qualifiers of |
7376 | the rhs. */ |
7377 | if ((VOID_TYPE_P (ttl) && !TYPE_ATOMIC (ttl)) |
7378 | || (VOID_TYPE_P (ttr) && !TYPE_ATOMIC (ttr)) |
7379 | || comp_target_types (location, ttl: memb_type, ttr: rhstype)) |
7380 | { |
7381 | int lquals = TYPE_QUALS (ttl) & ~TYPE_QUAL_ATOMIC; |
7382 | int rquals = TYPE_QUALS (ttr) & ~TYPE_QUAL_ATOMIC; |
7383 | /* If this type won't generate any warnings, use it. */ |
7384 | if (lquals == rquals |
7385 | || ((TREE_CODE (ttr) == FUNCTION_TYPE |
7386 | && TREE_CODE (ttl) == FUNCTION_TYPE) |
7387 | ? ((lquals | rquals) == rquals) |
7388 | : ((lquals | rquals) == lquals))) |
7389 | break; |
7390 | |
7391 | /* Keep looking for a better type, but remember this one. */ |
7392 | if (!marginal_memb) |
7393 | marginal_memb = memb; |
7394 | } |
7395 | } |
7396 | |
7397 | /* Can convert integer zero to any pointer type. */ |
7398 | if (null_pointer_constant) |
7399 | { |
7400 | rhs = null_pointer_node; |
7401 | break; |
7402 | } |
7403 | } |
7404 | |
7405 | if (memb || marginal_memb) |
7406 | { |
7407 | if (!memb) |
7408 | { |
7409 | /* We have only a marginally acceptable member type; |
7410 | it needs a warning. */ |
7411 | tree ttl = TREE_TYPE (TREE_TYPE (marginal_memb)); |
7412 | tree ttr = TREE_TYPE (rhstype); |
7413 | |
7414 | /* Const and volatile mean something different for function |
7415 | types, so the usual warnings are not appropriate. */ |
7416 | if (TREE_CODE (ttr) == FUNCTION_TYPE |
7417 | && TREE_CODE (ttl) == FUNCTION_TYPE) |
7418 | { |
7419 | /* Because const and volatile on functions are |
7420 | restrictions that say the function will not do |
7421 | certain things, it is okay to use a const or volatile |
7422 | function where an ordinary one is wanted, but not |
7423 | vice-versa. */ |
7424 | if (TYPE_QUALS_NO_ADDR_SPACE (ttl) |
7425 | & ~TYPE_QUALS_NO_ADDR_SPACE (ttr)) |
7426 | PEDWARN_FOR_QUALIFIERS (location, expr_loc, |
7427 | OPT_Wdiscarded_qualifiers, |
7428 | G_("passing argument %d of %qE " |
7429 | "makes %q#v qualified function " |
7430 | "pointer from unqualified" ), |
7431 | G_("assignment makes %q#v qualified " |
7432 | "function pointer from " |
7433 | "unqualified" ), |
7434 | G_("initialization makes %q#v qualified " |
7435 | "function pointer from " |
7436 | "unqualified" ), |
7437 | G_("return makes %q#v qualified function " |
7438 | "pointer from unqualified" ), |
7439 | TYPE_QUALS (ttl) & ~TYPE_QUALS (ttr)); |
7440 | } |
7441 | else if (TYPE_QUALS_NO_ADDR_SPACE (ttr) |
7442 | & ~TYPE_QUALS_NO_ADDR_SPACE (ttl)) |
7443 | PEDWARN_FOR_QUALIFIERS (location, expr_loc, |
7444 | OPT_Wdiscarded_qualifiers, |
7445 | G_("passing argument %d of %qE discards " |
7446 | "%qv qualifier from pointer target type" ), |
7447 | G_("assignment discards %qv qualifier " |
7448 | "from pointer target type" ), |
7449 | G_("initialization discards %qv qualifier " |
7450 | "from pointer target type" ), |
7451 | G_("return discards %qv qualifier from " |
7452 | "pointer target type" ), |
7453 | TYPE_QUALS (ttr) & ~TYPE_QUALS (ttl)); |
7454 | |
7455 | memb = marginal_memb; |
7456 | } |
7457 | |
7458 | if (!fundecl || !DECL_IN_SYSTEM_HEADER (fundecl)) |
7459 | pedwarn (location, OPT_Wpedantic, |
7460 | "ISO C prohibits argument conversion to union type" ); |
7461 | |
7462 | rhs = fold_convert_loc (location, TREE_TYPE (memb), rhs); |
7463 | return build_constructor_single (type, memb, rhs); |
7464 | } |
7465 | } |
7466 | |
7467 | /* Conversions among pointers */ |
7468 | else if ((codel == POINTER_TYPE || codel == REFERENCE_TYPE) |
7469 | && (coder == codel)) |
7470 | { |
7471 | /* If RHS refers to a built-in declared without a prototype |
7472 | BLTIN is the declaration of the built-in with a prototype |
7473 | and RHSTYPE is set to the actual type of the built-in. */ |
7474 | tree bltin; |
7475 | rhstype = type_or_builtin_type (expr: rhs, bltin: &bltin); |
7476 | |
7477 | tree ttl = TREE_TYPE (type); |
7478 | tree ttr = TREE_TYPE (rhstype); |
7479 | tree mvl = ttl; |
7480 | tree mvr = ttr; |
7481 | bool is_opaque_pointer; |
7482 | bool target_cmp = false; /* Cache comp_target_types () result. */ |
7483 | addr_space_t asl; |
7484 | addr_space_t asr; |
7485 | |
7486 | if (TREE_CODE (mvl) != ARRAY_TYPE) |
7487 | mvl = (TYPE_ATOMIC (mvl) |
7488 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mvl), |
7489 | TYPE_QUAL_ATOMIC) |
7490 | : TYPE_MAIN_VARIANT (mvl)); |
7491 | if (TREE_CODE (mvr) != ARRAY_TYPE) |
7492 | mvr = (TYPE_ATOMIC (mvr) |
7493 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mvr), |
7494 | TYPE_QUAL_ATOMIC) |
7495 | : TYPE_MAIN_VARIANT (mvr)); |
7496 | /* Opaque pointers are treated like void pointers. */ |
7497 | is_opaque_pointer = vector_targets_convertible_p (t1: ttl, t2: ttr); |
7498 | |
7499 | /* The Plan 9 compiler permits a pointer to a struct to be |
7500 | automatically converted into a pointer to an anonymous field |
7501 | within the struct. */ |
7502 | if (flag_plan9_extensions |
7503 | && RECORD_OR_UNION_TYPE_P (mvl) |
7504 | && RECORD_OR_UNION_TYPE_P (mvr) |
7505 | && mvl != mvr) |
7506 | { |
7507 | tree new_rhs = convert_to_anonymous_field (location, type, rhs); |
7508 | if (new_rhs != NULL_TREE) |
7509 | { |
7510 | rhs = new_rhs; |
7511 | rhstype = TREE_TYPE (rhs); |
7512 | coder = TREE_CODE (rhstype); |
7513 | ttr = TREE_TYPE (rhstype); |
7514 | mvr = TYPE_MAIN_VARIANT (ttr); |
7515 | } |
7516 | } |
7517 | |
7518 | /* C++ does not allow the implicit conversion void* -> T*. However, |
7519 | for the purpose of reducing the number of false positives, we |
7520 | tolerate the special case of |
7521 | |
7522 | int *p = NULL; |
7523 | |
7524 | where NULL is typically defined in C to be '(void *) 0'. */ |
7525 | if (VOID_TYPE_P (ttr) && rhs != null_pointer_node && !VOID_TYPE_P (ttl)) |
7526 | warning_at (errtype == ic_argpass ? expr_loc : location, |
7527 | OPT_Wc___compat, |
7528 | "request for implicit conversion " |
7529 | "from %qT to %qT not permitted in C++" , rhstype, type); |
7530 | |
7531 | /* Warn of new allocations that are not big enough for the target |
7532 | type. */ |
7533 | if (warn_alloc_size && TREE_CODE (rhs) == CALL_EXPR) |
7534 | if (tree fndecl = get_callee_fndecl (rhs)) |
7535 | if (DECL_IS_MALLOC (fndecl)) |
7536 | { |
7537 | tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (fndecl)); |
7538 | tree alloc_size = lookup_attribute (attr_name: "alloc_size" , list: attrs); |
7539 | if (alloc_size) |
7540 | warn_for_alloc_size (location, ttl, rhs, alloc_size); |
7541 | } |
7542 | |
7543 | /* See if the pointers point to incompatible address spaces. */ |
7544 | asl = TYPE_ADDR_SPACE (ttl); |
7545 | asr = TYPE_ADDR_SPACE (ttr); |
7546 | if (!null_pointer_constant_p (expr: rhs) |
7547 | && asr != asl && !targetm.addr_space.subset_p (asr, asl)) |
7548 | { |
7549 | auto_diagnostic_group d; |
7550 | bool diagnosed = true; |
7551 | switch (errtype) |
7552 | { |
7553 | case ic_argpass: |
7554 | { |
7555 | const char msg[] = G_("passing argument %d of %qE from " |
7556 | "pointer to non-enclosed address space" ); |
7557 | if (warnopt) |
7558 | diagnosed |
7559 | = warning_at (expr_loc, warnopt, msg, parmnum, rname); |
7560 | else |
7561 | error_at (expr_loc, msg, parmnum, rname); |
7562 | break; |
7563 | } |
7564 | case ic_assign: |
7565 | { |
7566 | const char msg[] = G_("assignment from pointer to " |
7567 | "non-enclosed address space" ); |
7568 | if (warnopt) |
7569 | diagnosed = warning_at (location, warnopt, msg); |
7570 | else |
7571 | error_at (location, msg); |
7572 | break; |
7573 | } |
7574 | case ic_init: |
7575 | case ic_init_const: |
7576 | { |
7577 | const char msg[] = G_("initialization from pointer to " |
7578 | "non-enclosed address space" ); |
7579 | if (warnopt) |
7580 | diagnosed = warning_at (location, warnopt, msg); |
7581 | else |
7582 | error_at (location, msg); |
7583 | break; |
7584 | } |
7585 | case ic_return: |
7586 | { |
7587 | const char msg[] = G_("return from pointer to " |
7588 | "non-enclosed address space" ); |
7589 | if (warnopt) |
7590 | diagnosed = warning_at (location, warnopt, msg); |
7591 | else |
7592 | error_at (location, msg); |
7593 | break; |
7594 | } |
7595 | default: |
7596 | gcc_unreachable (); |
7597 | } |
7598 | if (diagnosed) |
7599 | { |
7600 | if (errtype == ic_argpass) |
7601 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, actual_type: rhstype); |
7602 | else |
7603 | inform (location, "expected %qT but pointer is of type %qT" , |
7604 | type, rhstype); |
7605 | } |
7606 | return error_mark_node; |
7607 | } |
7608 | |
7609 | /* Check if the right-hand side has a format attribute but the |
7610 | left-hand side doesn't. */ |
7611 | if (warn_suggest_attribute_format |
7612 | && check_missing_format_attribute (type, rhstype)) |
7613 | { |
7614 | switch (errtype) |
7615 | { |
7616 | case ic_argpass: |
7617 | warning_at (expr_loc, OPT_Wsuggest_attribute_format, |
7618 | "argument %d of %qE might be " |
7619 | "a candidate for a format attribute" , |
7620 | parmnum, rname); |
7621 | break; |
7622 | case ic_assign: |
7623 | warning_at (location, OPT_Wsuggest_attribute_format, |
7624 | "assignment left-hand side might be " |
7625 | "a candidate for a format attribute" ); |
7626 | break; |
7627 | case ic_init: |
7628 | case ic_init_const: |
7629 | warning_at (location, OPT_Wsuggest_attribute_format, |
7630 | "initialization left-hand side might be " |
7631 | "a candidate for a format attribute" ); |
7632 | break; |
7633 | case ic_return: |
7634 | warning_at (location, OPT_Wsuggest_attribute_format, |
7635 | "return type might be " |
7636 | "a candidate for a format attribute" ); |
7637 | break; |
7638 | default: |
7639 | gcc_unreachable (); |
7640 | } |
7641 | } |
7642 | |
7643 | /* See if the pointers point to incompatible scalar storage orders. */ |
7644 | if (warn_scalar_storage_order |
7645 | && !null_pointer_constant_p (expr: rhs) |
7646 | && (AGGREGATE_TYPE_P (ttl) && TYPE_REVERSE_STORAGE_ORDER (ttl)) |
7647 | != (AGGREGATE_TYPE_P (ttr) && TYPE_REVERSE_STORAGE_ORDER (ttr))) |
7648 | { |
7649 | tree t; |
7650 | |
7651 | switch (errtype) |
7652 | { |
7653 | case ic_argpass: |
7654 | /* Do not warn for built-in functions, for example memcpy, since we |
7655 | control how they behave and they can be useful in this area. */ |
7656 | if (TREE_CODE (rname) != FUNCTION_DECL |
7657 | || !fndecl_built_in_p (node: rname)) |
7658 | warning_at (location, OPT_Wscalar_storage_order, |
7659 | "passing argument %d of %qE from incompatible " |
7660 | "scalar storage order" , parmnum, rname); |
7661 | break; |
7662 | case ic_assign: |
7663 | /* Do not warn if the RHS is a call to a function that returns a |
7664 | pointer that is not an alias. */ |
7665 | if (TREE_CODE (rhs) != CALL_EXPR |
7666 | || (t = get_callee_fndecl (rhs)) == NULL_TREE |
7667 | || !DECL_IS_MALLOC (t)) |
7668 | warning_at (location, OPT_Wscalar_storage_order, |
7669 | "assignment to %qT from pointer type %qT with " |
7670 | "incompatible scalar storage order" , type, rhstype); |
7671 | break; |
7672 | case ic_init: |
7673 | case ic_init_const: |
7674 | /* Likewise. */ |
7675 | if (TREE_CODE (rhs) != CALL_EXPR |
7676 | || (t = get_callee_fndecl (rhs)) == NULL_TREE |
7677 | || !DECL_IS_MALLOC (t)) |
7678 | warning_at (location, OPT_Wscalar_storage_order, |
7679 | "initialization of %qT from pointer type %qT with " |
7680 | "incompatible scalar storage order" , type, rhstype); |
7681 | break; |
7682 | case ic_return: |
7683 | warning_at (location, OPT_Wscalar_storage_order, |
7684 | "returning %qT from pointer type with incompatible " |
7685 | "scalar storage order %qT" , rhstype, type); |
7686 | break; |
7687 | default: |
7688 | gcc_unreachable (); |
7689 | } |
7690 | } |
7691 | |
7692 | /* Any non-function converts to a [const][volatile] void * |
7693 | and vice versa; otherwise, targets must be the same. |
7694 | Meanwhile, the lhs target must have all the qualifiers of the rhs. */ |
7695 | if ((VOID_TYPE_P (ttl) && !TYPE_ATOMIC (ttl)) |
7696 | || (VOID_TYPE_P (ttr) && !TYPE_ATOMIC (ttr)) |
7697 | || (target_cmp = comp_target_types (location, ttl: type, ttr: rhstype)) |
7698 | || is_opaque_pointer |
7699 | || ((c_common_unsigned_type (mvl) |
7700 | == c_common_unsigned_type (mvr)) |
7701 | && (c_common_signed_type (mvl) |
7702 | == c_common_signed_type (mvr)) |
7703 | && TYPE_ATOMIC (mvl) == TYPE_ATOMIC (mvr))) |
7704 | { |
7705 | /* Warn about loss of qualifers from pointers to arrays with |
7706 | qualifiers on the element type. */ |
7707 | if (TREE_CODE (ttr) == ARRAY_TYPE) |
7708 | { |
7709 | ttr = strip_array_types (type: ttr); |
7710 | ttl = strip_array_types (type: ttl); |
7711 | |
7712 | if (TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (ttr) |
7713 | & ~TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (ttl)) |
7714 | WARNING_FOR_QUALIFIERS (flag_isoc23, |
7715 | location, expr_loc, |
7716 | OPT_Wdiscarded_array_qualifiers, |
7717 | G_("passing argument %d of %qE discards " |
7718 | "%qv qualifier from pointer target type" ), |
7719 | G_("assignment discards %qv qualifier " |
7720 | "from pointer target type" ), |
7721 | G_("initialization discards %qv qualifier " |
7722 | "from pointer target type" ), |
7723 | G_("return discards %qv qualifier from " |
7724 | "pointer target type" ), |
7725 | TYPE_QUALS (ttr) & ~TYPE_QUALS (ttl)); |
7726 | } |
7727 | else if (pedantic |
7728 | && ((VOID_TYPE_P (ttl) && TREE_CODE (ttr) == FUNCTION_TYPE) |
7729 | || |
7730 | (VOID_TYPE_P (ttr) |
7731 | && !null_pointer_constant |
7732 | && TREE_CODE (ttl) == FUNCTION_TYPE))) |
7733 | PEDWARN_FOR_ASSIGNMENT (location, expr_loc, OPT_Wpedantic, |
7734 | G_("ISO C forbids passing argument %d of " |
7735 | "%qE between function pointer " |
7736 | "and %<void *%>" ), |
7737 | G_("ISO C forbids assignment between " |
7738 | "function pointer and %<void *%>" ), |
7739 | G_("ISO C forbids initialization between " |
7740 | "function pointer and %<void *%>" ), |
7741 | G_("ISO C forbids return between function " |
7742 | "pointer and %<void *%>" )); |
7743 | /* Const and volatile mean something different for function types, |
7744 | so the usual warnings are not appropriate. */ |
7745 | else if (TREE_CODE (ttr) != FUNCTION_TYPE |
7746 | && TREE_CODE (ttl) != FUNCTION_TYPE) |
7747 | { |
7748 | /* Assignments between atomic and non-atomic objects are OK. */ |
7749 | bool warn_quals_ped = TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (ttr) |
7750 | & ~TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (ttl); |
7751 | bool warn_quals = TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (ttr) |
7752 | & ~TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (strip_array_types (ttl)); |
7753 | |
7754 | /* Don't warn about loss of qualifier for conversions from |
7755 | qualified void* to pointers to arrays with corresponding |
7756 | qualifier on the element type (except for pedantic before C23). */ |
7757 | if (warn_quals || (warn_quals_ped && pedantic && !flag_isoc23)) |
7758 | PEDWARN_FOR_QUALIFIERS (location, expr_loc, |
7759 | OPT_Wdiscarded_qualifiers, |
7760 | G_("passing argument %d of %qE discards " |
7761 | "%qv qualifier from pointer target type" ), |
7762 | G_("assignment discards %qv qualifier " |
7763 | "from pointer target type" ), |
7764 | G_("initialization discards %qv qualifier " |
7765 | "from pointer target type" ), |
7766 | G_("return discards %qv qualifier from " |
7767 | "pointer target type" ), |
7768 | TYPE_QUALS (ttr) & ~TYPE_QUALS (ttl)); |
7769 | else if (warn_quals_ped) |
7770 | pedwarn_c11 (location, opt: OPT_Wc11_c23_compat, |
7771 | "array with qualifier on the element is not qualified before C23" ); |
7772 | |
7773 | /* If this is not a case of ignoring a mismatch in signedness, |
7774 | no warning. */ |
7775 | else if (VOID_TYPE_P (ttl) || VOID_TYPE_P (ttr) |
7776 | || target_cmp) |
7777 | ; |
7778 | /* If there is a mismatch, do warn. */ |
7779 | else if (warn_pointer_sign) |
7780 | switch (errtype) |
7781 | { |
7782 | case ic_argpass: |
7783 | { |
7784 | auto_diagnostic_group d; |
7785 | range_label_for_type_mismatch rhs_label (rhstype, type); |
7786 | gcc_rich_location richloc (expr_loc, &rhs_label); |
7787 | if (pedwarn (&richloc, OPT_Wpointer_sign, |
7788 | "pointer targets in passing argument %d of " |
7789 | "%qE differ in signedness" , parmnum, rname)) |
7790 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, |
7791 | actual_type: rhstype); |
7792 | } |
7793 | break; |
7794 | case ic_assign: |
7795 | pedwarn (location, OPT_Wpointer_sign, |
7796 | "pointer targets in assignment from %qT to %qT " |
7797 | "differ in signedness" , rhstype, type); |
7798 | break; |
7799 | case ic_init: |
7800 | case ic_init_const: |
7801 | pedwarn_init (loc: location, opt: OPT_Wpointer_sign, |
7802 | gmsgid: "pointer targets in initialization of %qT " |
7803 | "from %qT differ in signedness" , type, |
7804 | rhstype); |
7805 | break; |
7806 | case ic_return: |
7807 | pedwarn (location, OPT_Wpointer_sign, "pointer targets in " |
7808 | "returning %qT from a function with return type " |
7809 | "%qT differ in signedness" , rhstype, type); |
7810 | break; |
7811 | default: |
7812 | gcc_unreachable (); |
7813 | } |
7814 | } |
7815 | else if (TREE_CODE (ttl) == FUNCTION_TYPE |
7816 | && TREE_CODE (ttr) == FUNCTION_TYPE) |
7817 | { |
7818 | /* Because const and volatile on functions are restrictions |
7819 | that say the function will not do certain things, |
7820 | it is okay to use a const or volatile function |
7821 | where an ordinary one is wanted, but not vice-versa. */ |
7822 | if (TYPE_QUALS_NO_ADDR_SPACE (ttl) |
7823 | & ~TYPE_QUALS_NO_ADDR_SPACE (ttr)) |
7824 | PEDWARN_FOR_QUALIFIERS (location, expr_loc, |
7825 | OPT_Wdiscarded_qualifiers, |
7826 | G_("passing argument %d of %qE makes " |
7827 | "%q#v qualified function pointer " |
7828 | "from unqualified" ), |
7829 | G_("assignment makes %q#v qualified function " |
7830 | "pointer from unqualified" ), |
7831 | G_("initialization makes %q#v qualified " |
7832 | "function pointer from unqualified" ), |
7833 | G_("return makes %q#v qualified function " |
7834 | "pointer from unqualified" ), |
7835 | TYPE_QUALS (ttl) & ~TYPE_QUALS (ttr)); |
7836 | } |
7837 | } |
7838 | /* Avoid warning about the volatile ObjC EH puts on decls. */ |
7839 | else if (!objc_ok) |
7840 | { |
7841 | switch (errtype) |
7842 | { |
7843 | case ic_argpass: |
7844 | { |
7845 | auto_diagnostic_group d; |
7846 | range_label_for_type_mismatch rhs_label (rhstype, type); |
7847 | gcc_rich_location richloc (expr_loc, &rhs_label); |
7848 | if (permerror_opt (&richloc, OPT_Wincompatible_pointer_types, |
7849 | "passing argument %d of %qE from " |
7850 | "incompatible pointer type" , |
7851 | parmnum, rname)) |
7852 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, actual_type: rhstype); |
7853 | } |
7854 | break; |
7855 | case ic_assign: |
7856 | if (bltin) |
7857 | permerror_opt (location, OPT_Wincompatible_pointer_types, |
7858 | "assignment to %qT from pointer to " |
7859 | "%qD with incompatible type %qT" , |
7860 | type, bltin, rhstype); |
7861 | else |
7862 | permerror_opt (location, OPT_Wincompatible_pointer_types, |
7863 | "assignment to %qT from incompatible pointer " |
7864 | "type %qT" , type, rhstype); |
7865 | break; |
7866 | case ic_init: |
7867 | case ic_init_const: |
7868 | if (bltin) |
7869 | permerror_init (loc: location, opt: OPT_Wincompatible_pointer_types, |
7870 | gmsgid: "initialization of %qT from pointer to " |
7871 | "%qD with incompatible type %qT" , |
7872 | type, bltin, rhstype); |
7873 | else |
7874 | permerror_init (loc: location, opt: OPT_Wincompatible_pointer_types, |
7875 | gmsgid: "initialization of %qT from incompatible " |
7876 | "pointer type %qT" , |
7877 | type, rhstype); |
7878 | break; |
7879 | case ic_return: |
7880 | if (bltin) |
7881 | permerror_opt (location, OPT_Wincompatible_pointer_types, |
7882 | "returning pointer to %qD of type %qT from " |
7883 | "a function with incompatible type %qT" , |
7884 | bltin, rhstype, type); |
7885 | else |
7886 | permerror_opt (location, OPT_Wincompatible_pointer_types, |
7887 | "returning %qT from a function with " |
7888 | "incompatible return type %qT" , rhstype, type); |
7889 | break; |
7890 | default: |
7891 | gcc_unreachable (); |
7892 | } |
7893 | } |
7894 | |
7895 | /* If RHS isn't an address, check pointer or array of packed |
7896 | struct or union. */ |
7897 | warn_for_address_of_packed_member (type, orig_rhs); |
7898 | |
7899 | return convert (type, rhs); |
7900 | } |
7901 | else if (codel == POINTER_TYPE && coder == ARRAY_TYPE) |
7902 | { |
7903 | /* ??? This should not be an error when inlining calls to |
7904 | unprototyped functions. */ |
7905 | const char msg[] = "invalid use of non-lvalue array" ; |
7906 | if (warnopt) |
7907 | warning_at (location, warnopt, msg); |
7908 | else |
7909 | error_at (location, msg); |
7910 | return error_mark_node; |
7911 | } |
7912 | else if (codel == POINTER_TYPE |
7913 | && (coder == INTEGER_TYPE |
7914 | || coder == NULLPTR_TYPE |
7915 | || coder == BITINT_TYPE)) |
7916 | { |
7917 | /* An explicit constant 0 or type nullptr_t can convert to a pointer, |
7918 | or one that results from arithmetic, even including a cast to |
7919 | integer type. */ |
7920 | if (!null_pointer_constant && coder != NULLPTR_TYPE) |
7921 | switch (errtype) |
7922 | { |
7923 | case ic_argpass: |
7924 | { |
7925 | auto_diagnostic_group d; |
7926 | range_label_for_type_mismatch rhs_label (rhstype, type); |
7927 | gcc_rich_location richloc (expr_loc, &rhs_label); |
7928 | if (permerror_opt (&richloc, OPT_Wint_conversion, |
7929 | "passing argument %d of %qE makes pointer " |
7930 | "from integer without a cast" , parmnum, rname)) |
7931 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, actual_type: rhstype); |
7932 | } |
7933 | break; |
7934 | case ic_assign: |
7935 | permerror_opt (location, OPT_Wint_conversion, |
7936 | "assignment to %qT from %qT makes pointer from " |
7937 | "integer without a cast" , type, rhstype); |
7938 | break; |
7939 | case ic_init: |
7940 | case ic_init_const: |
7941 | permerror_init (loc: location, opt: OPT_Wint_conversion, |
7942 | gmsgid: "initialization of %qT from %qT makes pointer " |
7943 | "from integer without a cast" , type, rhstype); |
7944 | break; |
7945 | case ic_return: |
7946 | permerror_init (loc: location, opt: OPT_Wint_conversion, |
7947 | gmsgid: "returning %qT from a function with return type " |
7948 | "%qT makes pointer from integer without a cast" , |
7949 | rhstype, type); |
7950 | break; |
7951 | default: |
7952 | gcc_unreachable (); |
7953 | } |
7954 | |
7955 | return convert (type, rhs); |
7956 | } |
7957 | else if ((codel == INTEGER_TYPE || codel == BITINT_TYPE) |
7958 | && coder == POINTER_TYPE) |
7959 | { |
7960 | switch (errtype) |
7961 | { |
7962 | case ic_argpass: |
7963 | { |
7964 | auto_diagnostic_group d; |
7965 | range_label_for_type_mismatch rhs_label (rhstype, type); |
7966 | gcc_rich_location richloc (expr_loc, &rhs_label); |
7967 | if (permerror_opt (&richloc, OPT_Wint_conversion, |
7968 | "passing argument %d of %qE makes integer from " |
7969 | "pointer without a cast" , parmnum, rname)) |
7970 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, actual_type: rhstype); |
7971 | } |
7972 | break; |
7973 | case ic_assign: |
7974 | permerror_opt (location, OPT_Wint_conversion, |
7975 | "assignment to %qT from %qT makes integer from " |
7976 | "pointer without a cast" , type, rhstype); |
7977 | break; |
7978 | case ic_init: |
7979 | case ic_init_const: |
7980 | permerror_init (loc: location, opt: OPT_Wint_conversion, |
7981 | gmsgid: "initialization of %qT from %qT makes integer " |
7982 | "from pointer without a cast" , type, rhstype); |
7983 | break; |
7984 | case ic_return: |
7985 | permerror_opt (location, OPT_Wint_conversion, "returning %qT from a " |
7986 | "function with return type %qT makes integer from " |
7987 | "pointer without a cast" , rhstype, type); |
7988 | break; |
7989 | default: |
7990 | gcc_unreachable (); |
7991 | } |
7992 | |
7993 | return convert (type, rhs); |
7994 | } |
7995 | else if (C_BOOLEAN_TYPE_P (type) |
7996 | /* The type nullptr_t may be converted to bool. The |
7997 | result is false. */ |
7998 | && (coder == POINTER_TYPE || coder == NULLPTR_TYPE)) |
7999 | { |
8000 | tree ret; |
8001 | bool save = in_late_binary_op; |
8002 | in_late_binary_op = true; |
8003 | ret = convert (type, rhs); |
8004 | in_late_binary_op = save; |
8005 | return ret; |
8006 | } |
8007 | else if (codel == NULLPTR_TYPE && null_pointer_constant) |
8008 | return convert (type, rhs); |
8009 | |
8010 | switch (errtype) |
8011 | { |
8012 | case ic_argpass: |
8013 | { |
8014 | auto_diagnostic_group d; |
8015 | range_label_for_type_mismatch rhs_label (rhstype, type); |
8016 | gcc_rich_location richloc (expr_loc, &rhs_label); |
8017 | const char msg[] = G_("incompatible type for argument %d of %qE" ); |
8018 | if (warnopt) |
8019 | warning_at (expr_loc, warnopt, msg, parmnum, rname); |
8020 | else |
8021 | error_at (&richloc, msg, parmnum, rname); |
8022 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, actual_type: rhstype); |
8023 | } |
8024 | break; |
8025 | case ic_assign: |
8026 | { |
8027 | const char msg[] |
8028 | = G_("incompatible types when assigning to type %qT from type %qT" ); |
8029 | if (warnopt) |
8030 | warning_at (expr_loc, 0, msg, type, rhstype); |
8031 | else |
8032 | error_at (expr_loc, msg, type, rhstype); |
8033 | break; |
8034 | } |
8035 | case ic_init: |
8036 | case ic_init_const: |
8037 | { |
8038 | const char msg[] |
8039 | = G_("incompatible types when initializing type %qT using type %qT" ); |
8040 | if (warnopt) |
8041 | warning_at (location, 0, msg, type, rhstype); |
8042 | else |
8043 | error_at (location, msg, type, rhstype); |
8044 | break; |
8045 | } |
8046 | case ic_return: |
8047 | { |
8048 | const char msg[] |
8049 | = G_("incompatible types when returning type %qT but %qT was expected" ); |
8050 | if (warnopt) |
8051 | warning_at (location, 0, msg, rhstype, type); |
8052 | else |
8053 | error_at (location, msg, rhstype, type); |
8054 | break; |
8055 | } |
8056 | default: |
8057 | gcc_unreachable (); |
8058 | } |
8059 | |
8060 | return error_mark_node; |
8061 | } |
8062 | |
8063 | /* If VALUE is a compound expr all of whose expressions are constant, then |
8064 | return its value. Otherwise, return error_mark_node. |
8065 | |
8066 | This is for handling COMPOUND_EXPRs as initializer elements |
8067 | which is allowed with a warning when -pedantic is specified. */ |
8068 | |
8069 | static tree |
8070 | valid_compound_expr_initializer (tree value, tree endtype) |
8071 | { |
8072 | if (TREE_CODE (value) == COMPOUND_EXPR) |
8073 | { |
8074 | if (valid_compound_expr_initializer (TREE_OPERAND (value, 0), endtype) |
8075 | == error_mark_node) |
8076 | return error_mark_node; |
8077 | return valid_compound_expr_initializer (TREE_OPERAND (value, 1), |
8078 | endtype); |
8079 | } |
8080 | else if (!initializer_constant_valid_p (value, endtype)) |
8081 | return error_mark_node; |
8082 | else |
8083 | return value; |
8084 | } |
8085 | |
8086 | /* Perform appropriate conversions on the initial value of a variable, |
8087 | store it in the declaration DECL, |
8088 | and print any error messages that are appropriate. |
8089 | If ORIGTYPE is not NULL_TREE, it is the original type of INIT. |
8090 | If the init is invalid, store an ERROR_MARK. |
8091 | |
8092 | INIT_LOC is the location of the initial value. */ |
8093 | |
8094 | void |
8095 | store_init_value (location_t init_loc, tree decl, tree init, tree origtype) |
8096 | { |
8097 | tree value, type; |
8098 | bool npc = false; |
8099 | bool int_const_expr = false; |
8100 | bool arith_const_expr = false; |
8101 | |
8102 | /* If variable's type was invalidly declared, just ignore it. */ |
8103 | |
8104 | type = TREE_TYPE (decl); |
8105 | if (TREE_CODE (type) == ERROR_MARK) |
8106 | return; |
8107 | |
8108 | /* Digest the specified initializer into an expression. */ |
8109 | |
8110 | if (init) |
8111 | { |
8112 | npc = null_pointer_constant_p (expr: init); |
8113 | int_const_expr = (TREE_CODE (init) == INTEGER_CST |
8114 | && !TREE_OVERFLOW (init) |
8115 | && INTEGRAL_TYPE_P (TREE_TYPE (init))); |
8116 | /* Not fully determined before folding. */ |
8117 | arith_const_expr = true; |
8118 | } |
8119 | bool constexpr_p = (VAR_P (decl) |
8120 | && C_DECL_DECLARED_CONSTEXPR (decl)); |
8121 | value = digest_init (init_loc, type, init, origtype, npc, int_const_expr, |
8122 | arith_const_expr, true, |
8123 | TREE_STATIC (decl) || constexpr_p, constexpr_p); |
8124 | |
8125 | /* Store the expression if valid; else report error. */ |
8126 | |
8127 | if (!in_system_header_at (loc: input_location) |
8128 | && AGGREGATE_TYPE_P (TREE_TYPE (decl)) && !TREE_STATIC (decl)) |
8129 | warning (OPT_Wtraditional, "traditional C rejects automatic " |
8130 | "aggregate initialization" ); |
8131 | |
8132 | if (value != error_mark_node || TREE_CODE (decl) != FUNCTION_DECL) |
8133 | DECL_INITIAL (decl) = value; |
8134 | |
8135 | /* ANSI wants warnings about out-of-range constant initializers. */ |
8136 | STRIP_TYPE_NOPS (value); |
8137 | if (TREE_STATIC (decl)) |
8138 | constant_expression_warning (value); |
8139 | |
8140 | /* Check if we need to set array size from compound literal size. */ |
8141 | if (TREE_CODE (type) == ARRAY_TYPE |
8142 | && TYPE_DOMAIN (type) == NULL_TREE |
8143 | && value != error_mark_node) |
8144 | { |
8145 | tree inside_init = init; |
8146 | |
8147 | STRIP_TYPE_NOPS (inside_init); |
8148 | inside_init = fold (inside_init); |
8149 | |
8150 | if (TREE_CODE (inside_init) == COMPOUND_LITERAL_EXPR) |
8151 | { |
8152 | tree cldecl = COMPOUND_LITERAL_EXPR_DECL (inside_init); |
8153 | |
8154 | if (TYPE_DOMAIN (TREE_TYPE (cldecl))) |
8155 | { |
8156 | /* For int foo[] = (int [3]){1}; we need to set array size |
8157 | now since later on array initializer will be just the |
8158 | brace enclosed list of the compound literal. */ |
8159 | tree etype = strip_array_types (TREE_TYPE (decl)); |
8160 | type = build_distinct_type_copy (TYPE_MAIN_VARIANT (type)); |
8161 | TYPE_DOMAIN (type) = TYPE_DOMAIN (TREE_TYPE (cldecl)); |
8162 | layout_type (type); |
8163 | layout_decl (cldecl, 0); |
8164 | TREE_TYPE (decl) |
8165 | = c_build_qualified_type (type, TYPE_QUALS (etype)); |
8166 | } |
8167 | } |
8168 | } |
8169 | } |
8170 | |
8171 | /* Methods for storing and printing names for error messages. */ |
8172 | |
8173 | /* Implement a spelling stack that allows components of a name to be pushed |
8174 | and popped. Each element on the stack is this structure. */ |
8175 | |
8176 | struct spelling |
8177 | { |
8178 | int kind; |
8179 | union |
8180 | { |
8181 | unsigned HOST_WIDE_INT i; |
8182 | const char *s; |
8183 | } u; |
8184 | }; |
8185 | |
8186 | #define SPELLING_STRING 1 |
8187 | #define SPELLING_MEMBER 2 |
8188 | #define SPELLING_BOUNDS 3 |
8189 | |
8190 | static struct spelling *spelling; /* Next stack element (unused). */ |
8191 | static struct spelling *spelling_base; /* Spelling stack base. */ |
8192 | static int spelling_size; /* Size of the spelling stack. */ |
8193 | |
8194 | /* Macros to save and restore the spelling stack around push_... functions. |
8195 | Alternative to SAVE_SPELLING_STACK. */ |
8196 | |
8197 | #define SPELLING_DEPTH() (spelling - spelling_base) |
8198 | #define RESTORE_SPELLING_DEPTH(DEPTH) (spelling = spelling_base + (DEPTH)) |
8199 | |
8200 | /* Push an element on the spelling stack with type KIND and assign VALUE |
8201 | to MEMBER. */ |
8202 | |
8203 | #define PUSH_SPELLING(KIND, VALUE, MEMBER) \ |
8204 | { \ |
8205 | int depth = SPELLING_DEPTH (); \ |
8206 | \ |
8207 | if (depth >= spelling_size) \ |
8208 | { \ |
8209 | spelling_size += 10; \ |
8210 | spelling_base = XRESIZEVEC (struct spelling, spelling_base, \ |
8211 | spelling_size); \ |
8212 | RESTORE_SPELLING_DEPTH (depth); \ |
8213 | } \ |
8214 | \ |
8215 | spelling->kind = (KIND); \ |
8216 | spelling->MEMBER = (VALUE); \ |
8217 | spelling++; \ |
8218 | } |
8219 | |
8220 | /* Push STRING on the stack. Printed literally. */ |
8221 | |
8222 | static void |
8223 | push_string (const char *string) |
8224 | { |
8225 | PUSH_SPELLING (SPELLING_STRING, string, u.s); |
8226 | } |
8227 | |
8228 | /* Push a member name on the stack. Printed as '.' STRING. */ |
8229 | |
8230 | static void |
8231 | push_member_name (tree decl) |
8232 | { |
8233 | const char *const string |
8234 | = (DECL_NAME (decl) |
8235 | ? identifier_to_locale (IDENTIFIER_POINTER (DECL_NAME (decl))) |
8236 | : _("<anonymous>" )); |
8237 | PUSH_SPELLING (SPELLING_MEMBER, string, u.s); |
8238 | } |
8239 | |
8240 | /* Push an array bounds on the stack. Printed as [BOUNDS]. */ |
8241 | |
8242 | static void |
8243 | push_array_bounds (unsigned HOST_WIDE_INT bounds) |
8244 | { |
8245 | PUSH_SPELLING (SPELLING_BOUNDS, bounds, u.i); |
8246 | } |
8247 | |
8248 | /* Compute the maximum size in bytes of the printed spelling. */ |
8249 | |
8250 | static int |
8251 | spelling_length (void) |
8252 | { |
8253 | int size = 0; |
8254 | struct spelling *p; |
8255 | |
8256 | for (p = spelling_base; p < spelling; p++) |
8257 | { |
8258 | if (p->kind == SPELLING_BOUNDS) |
8259 | size += 25; |
8260 | else |
8261 | size += strlen (s: p->u.s) + 1; |
8262 | } |
8263 | |
8264 | return size; |
8265 | } |
8266 | |
8267 | /* Print the spelling to BUFFER and return it. */ |
8268 | |
8269 | static char * |
8270 | print_spelling (char *buffer) |
8271 | { |
8272 | char *d = buffer; |
8273 | struct spelling *p; |
8274 | |
8275 | for (p = spelling_base; p < spelling; p++) |
8276 | if (p->kind == SPELLING_BOUNDS) |
8277 | { |
8278 | sprintf (s: d, format: "[" HOST_WIDE_INT_PRINT_UNSIGNED "]" , p->u.i); |
8279 | d += strlen (s: d); |
8280 | } |
8281 | else |
8282 | { |
8283 | const char *s; |
8284 | if (p->kind == SPELLING_MEMBER) |
8285 | *d++ = '.'; |
8286 | for (s = p->u.s; (*d = *s++); d++) |
8287 | ; |
8288 | } |
8289 | *d++ = '\0'; |
8290 | return buffer; |
8291 | } |
8292 | |
8293 | /* Check whether INIT, a floating or integer constant, is |
8294 | representable in TYPE, a real floating type with the same radix or |
8295 | a decimal floating type initialized with a binary floating |
8296 | constant. Return true if OK, false if not. */ |
8297 | static bool |
8298 | constexpr_init_fits_real_type (tree type, tree init) |
8299 | { |
8300 | gcc_assert (SCALAR_FLOAT_TYPE_P (type)); |
8301 | gcc_assert (TREE_CODE (init) == INTEGER_CST || TREE_CODE (init) == REAL_CST); |
8302 | if (TREE_CODE (init) == REAL_CST |
8303 | && TYPE_MODE (TREE_TYPE (init)) == TYPE_MODE (type)) |
8304 | { |
8305 | /* Same mode, no conversion required except for the case of |
8306 | signaling NaNs if the types are incompatible (e.g. double and |
8307 | long double with the same mode). */ |
8308 | if (REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (init)) |
8309 | && !comptypes (TYPE_MAIN_VARIANT (type), |
8310 | TYPE_MAIN_VARIANT (TREE_TYPE (init)))) |
8311 | return false; |
8312 | return true; |
8313 | } |
8314 | if (TREE_CODE (init) == INTEGER_CST) |
8315 | { |
8316 | tree converted = build_real_from_int_cst (type, init); |
8317 | bool fail = false; |
8318 | wide_int w = real_to_integer (&TREE_REAL_CST (converted), &fail, |
8319 | TYPE_PRECISION (TREE_TYPE (init))); |
8320 | return !fail && wi::eq_p (x: w, y: wi::to_wide (t: init)); |
8321 | } |
8322 | if (REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (init))) |
8323 | return false; |
8324 | if ((REAL_VALUE_ISINF (TREE_REAL_CST (init)) |
8325 | && MODE_HAS_INFINITIES (TYPE_MODE (type))) |
8326 | || (REAL_VALUE_ISNAN (TREE_REAL_CST (init)) |
8327 | && MODE_HAS_NANS (TYPE_MODE (type)))) |
8328 | return true; |
8329 | if (DECIMAL_FLOAT_TYPE_P (type) |
8330 | && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (init))) |
8331 | { |
8332 | /* This is valid if the real number represented by the |
8333 | initializer can be exactly represented in the decimal |
8334 | type. Compare the values using MPFR. */ |
8335 | REAL_VALUE_TYPE t; |
8336 | real_convert (&t, TYPE_MODE (type), &TREE_REAL_CST (init)); |
8337 | mpfr_t bin_val, dec_val; |
8338 | mpfr_init2 (bin_val, REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (init)))->p); |
8339 | mpfr_init2 (dec_val, REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (init)))->p); |
8340 | mpfr_from_real (bin_val, &TREE_REAL_CST (init), MPFR_RNDN); |
8341 | char string[256]; |
8342 | real_to_decimal (string, &t, sizeof string, 0, 1); |
8343 | bool res = (mpfr_strtofr (dec_val, string, NULL, 10, MPFR_RNDN) == 0 |
8344 | && mpfr_equal_p (bin_val, dec_val)); |
8345 | mpfr_clear (bin_val); |
8346 | mpfr_clear (dec_val); |
8347 | return res; |
8348 | } |
8349 | /* exact_real_truncate is not quite right here, since it doesn't |
8350 | allow even an exact conversion to subnormal values. */ |
8351 | REAL_VALUE_TYPE t; |
8352 | real_convert (&t, TYPE_MODE (type), &TREE_REAL_CST (init)); |
8353 | return real_identical (&t, &TREE_REAL_CST (init)); |
8354 | } |
8355 | |
8356 | /* Check whether INIT (location LOC) is valid as a 'constexpr' |
8357 | initializer for type TYPE, and give an error if not. INIT has |
8358 | already been folded and verified to be constant. INT_CONST_EXPR |
8359 | and ARITH_CONST_EXPR say whether it is an integer constant |
8360 | expression or arithmetic constant expression, respectively. If |
8361 | TYPE is not a scalar type, this function does nothing. */ |
8362 | |
8363 | static void |
8364 | check_constexpr_init (location_t loc, tree type, tree init, |
8365 | bool int_const_expr, bool arith_const_expr) |
8366 | { |
8367 | if (POINTER_TYPE_P (type)) |
8368 | { |
8369 | /* The initializer must be null. */ |
8370 | if (TREE_CODE (init) != INTEGER_CST || !integer_zerop (init)) |
8371 | error_at (loc, "%<constexpr%> pointer initializer is not null" ); |
8372 | return; |
8373 | } |
8374 | if (INTEGRAL_TYPE_P (type)) |
8375 | { |
8376 | /* The initializer must be an integer constant expression, |
8377 | representable in the target type. */ |
8378 | if (!int_const_expr) |
8379 | error_at (loc, "%<constexpr%> integer initializer is not an " |
8380 | "integer constant expression" ); |
8381 | if (!int_fits_type_p (init, type)) |
8382 | error_at (loc, "%<constexpr%> initializer not representable in " |
8383 | "type of object" ); |
8384 | return; |
8385 | } |
8386 | /* We don't apply any extra checks to extension types such as vector |
8387 | or fixed-point types. */ |
8388 | if (TREE_CODE (type) != REAL_TYPE && TREE_CODE (type) != COMPLEX_TYPE) |
8389 | return; |
8390 | if (!arith_const_expr) |
8391 | { |
8392 | error_at (loc, "%<constexpr%> initializer is not an arithmetic " |
8393 | "constant expression" ); |
8394 | return; |
8395 | } |
8396 | /* We don't apply any extra checks to complex integers. */ |
8397 | if (TREE_CODE (type) == COMPLEX_TYPE |
8398 | && TREE_CODE (TREE_TYPE (type)) != REAL_TYPE) |
8399 | return; |
8400 | /* Following N3082, a real type cannot be initialized from a complex |
8401 | type and a binary type cannot be initialized from a decimal type |
8402 | (but initializing a decimal type from a binary type is OK). |
8403 | Signaling NaN initializers are OK only if the types are |
8404 | compatible (not just the same mode); all quiet NaN and infinity |
8405 | initializations are considered to preserve the value. */ |
8406 | if (TREE_CODE (TREE_TYPE (init)) == COMPLEX_TYPE |
8407 | && SCALAR_FLOAT_TYPE_P (type)) |
8408 | { |
8409 | error_at (loc, "%<constexpr%> initializer for a real type is of " |
8410 | "complex type" ); |
8411 | return; |
8412 | } |
8413 | if (SCALAR_FLOAT_TYPE_P (type) |
8414 | && SCALAR_FLOAT_TYPE_P (TREE_TYPE (init)) |
8415 | && DECIMAL_FLOAT_TYPE_P (TREE_TYPE (init)) |
8416 | && !DECIMAL_FLOAT_TYPE_P (type)) |
8417 | { |
8418 | error_at (loc, "%<constexpr%> initializer for a binary " |
8419 | "floating-point type is of decimal type" ); |
8420 | return; |
8421 | } |
8422 | bool fits; |
8423 | if (TREE_CODE (type) == COMPLEX_TYPE) |
8424 | { |
8425 | switch (TREE_CODE (init)) |
8426 | { |
8427 | case INTEGER_CST: |
8428 | case REAL_CST: |
8429 | fits = constexpr_init_fits_real_type (TREE_TYPE (type), init); |
8430 | break; |
8431 | case COMPLEX_CST: |
8432 | fits = (constexpr_init_fits_real_type (TREE_TYPE (type), |
8433 | TREE_REALPART (init)) |
8434 | && constexpr_init_fits_real_type (TREE_TYPE (type), |
8435 | TREE_IMAGPART (init))); |
8436 | break; |
8437 | default: |
8438 | gcc_unreachable (); |
8439 | } |
8440 | } |
8441 | else |
8442 | fits = constexpr_init_fits_real_type (type, init); |
8443 | if (!fits) |
8444 | error_at (loc, "%<constexpr%> initializer not representable in " |
8445 | "type of object" ); |
8446 | } |
8447 | |
8448 | /* Digest the parser output INIT as an initializer for type TYPE. |
8449 | Return a C expression of type TYPE to represent the initial value. |
8450 | |
8451 | If ORIGTYPE is not NULL_TREE, it is the original type of INIT. |
8452 | |
8453 | NULL_POINTER_CONSTANT is true if INIT is a null pointer constant, |
8454 | INT_CONST_EXPR is true if INIT is an integer constant expression, |
8455 | and ARITH_CONST_EXPR is true if INIT is, or might be, an arithmetic |
8456 | constant expression, false if it has already been determined in the |
8457 | caller that it is not (but folding may have made the value passed here |
8458 | indistinguishable from an arithmetic constant expression). |
8459 | |
8460 | If INIT is a string constant, STRICT_STRING is true if it is |
8461 | unparenthesized or we should not warn here for it being parenthesized. |
8462 | For other types of INIT, STRICT_STRING is not used. |
8463 | |
8464 | INIT_LOC is the location of the INIT. |
8465 | |
8466 | REQUIRE_CONSTANT requests an error if non-constant initializers or |
8467 | elements are seen. REQUIRE_CONSTEXPR means the stricter requirements |
8468 | on initializers for 'constexpr' objects apply. */ |
8469 | |
8470 | static tree |
8471 | digest_init (location_t init_loc, tree type, tree init, tree origtype, |
8472 | bool null_pointer_constant, bool int_const_expr, |
8473 | bool arith_const_expr, bool strict_string, |
8474 | bool require_constant, bool require_constexpr) |
8475 | { |
8476 | enum tree_code code = TREE_CODE (type); |
8477 | tree inside_init = init; |
8478 | tree semantic_type = NULL_TREE; |
8479 | bool maybe_const = true; |
8480 | |
8481 | if (type == error_mark_node |
8482 | || !init |
8483 | || error_operand_p (t: init)) |
8484 | return error_mark_node; |
8485 | |
8486 | STRIP_TYPE_NOPS (inside_init); |
8487 | |
8488 | if (!c_in_omp_for) |
8489 | { |
8490 | if (TREE_CODE (inside_init) == EXCESS_PRECISION_EXPR) |
8491 | { |
8492 | semantic_type = TREE_TYPE (inside_init); |
8493 | inside_init = TREE_OPERAND (inside_init, 0); |
8494 | } |
8495 | inside_init = c_fully_fold (inside_init, require_constant, &maybe_const); |
8496 | } |
8497 | /* TODO: this may not detect all cases of expressions folding to |
8498 | constants that are not arithmetic constant expressions. */ |
8499 | if (!maybe_const) |
8500 | arith_const_expr = false; |
8501 | else if (!INTEGRAL_TYPE_P (TREE_TYPE (inside_init)) |
8502 | && TREE_CODE (TREE_TYPE (inside_init)) != REAL_TYPE |
8503 | && TREE_CODE (TREE_TYPE (inside_init)) != COMPLEX_TYPE) |
8504 | arith_const_expr = false; |
8505 | else if (TREE_CODE (inside_init) != INTEGER_CST |
8506 | && TREE_CODE (inside_init) != REAL_CST |
8507 | && TREE_CODE (inside_init) != COMPLEX_CST) |
8508 | arith_const_expr = false; |
8509 | else if (TREE_OVERFLOW (inside_init)) |
8510 | arith_const_expr = false; |
8511 | |
8512 | /* Initialization of an array of chars from a string constant |
8513 | optionally enclosed in braces. */ |
8514 | |
8515 | if (code == ARRAY_TYPE && inside_init |
8516 | && TREE_CODE (inside_init) == STRING_CST) |
8517 | { |
8518 | tree typ1 |
8519 | = (TYPE_ATOMIC (TREE_TYPE (type)) |
8520 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (TREE_TYPE (type)), |
8521 | TYPE_QUAL_ATOMIC) |
8522 | : TYPE_MAIN_VARIANT (TREE_TYPE (type))); |
8523 | /* Note that an array could be both an array of character type |
8524 | and an array of wchar_t if wchar_t is signed char or unsigned |
8525 | char. */ |
8526 | bool char_array = (typ1 == char_type_node |
8527 | || typ1 == signed_char_type_node |
8528 | || typ1 == unsigned_char_type_node); |
8529 | bool wchar_array = !!comptypes (type1: typ1, wchar_type_node); |
8530 | bool char16_array = !!comptypes (type1: typ1, char16_type_node); |
8531 | bool char32_array = !!comptypes (type1: typ1, char32_type_node); |
8532 | |
8533 | if (char_array || wchar_array || char16_array || char32_array) |
8534 | { |
8535 | struct c_expr expr; |
8536 | tree typ2 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (inside_init))); |
8537 | bool incompat_string_cst = false; |
8538 | expr.value = inside_init; |
8539 | expr.original_code = (strict_string ? STRING_CST : ERROR_MARK); |
8540 | expr.original_type = NULL; |
8541 | expr.m_decimal = 0; |
8542 | maybe_warn_string_init (loc: init_loc, type, expr); |
8543 | |
8544 | if (TYPE_DOMAIN (type) && !TYPE_MAX_VALUE (TYPE_DOMAIN (type))) |
8545 | pedwarn_init (loc: init_loc, opt: OPT_Wpedantic, |
8546 | gmsgid: "initialization of a flexible array member" ); |
8547 | |
8548 | if (comptypes (TYPE_MAIN_VARIANT (TREE_TYPE (inside_init)), |
8549 | TYPE_MAIN_VARIANT (type))) |
8550 | return inside_init; |
8551 | |
8552 | if (char_array) |
8553 | { |
8554 | if (typ2 != char_type_node && typ2 != char8_type_node) |
8555 | incompat_string_cst = true; |
8556 | } |
8557 | else if (!comptypes (type1: typ1, type2: typ2)) |
8558 | incompat_string_cst = true; |
8559 | |
8560 | if (incompat_string_cst) |
8561 | { |
8562 | error_init (loc: init_loc, gmsgid: "cannot initialize array of %qT from " |
8563 | "a string literal with type array of %qT" , |
8564 | typ1, typ2); |
8565 | return error_mark_node; |
8566 | } |
8567 | |
8568 | if (require_constexpr |
8569 | && TYPE_UNSIGNED (typ1) != TYPE_UNSIGNED (typ2)) |
8570 | { |
8571 | /* Check if all characters of the string can be |
8572 | represented in the type of the constexpr object being |
8573 | initialized. */ |
8574 | unsigned HOST_WIDE_INT len = TREE_STRING_LENGTH (inside_init); |
8575 | const unsigned char *p = |
8576 | (const unsigned char *) TREE_STRING_POINTER (inside_init); |
8577 | gcc_assert (CHAR_TYPE_SIZE == 8 && CHAR_BIT == 8); |
8578 | for (unsigned i = 0; i < len; i++) |
8579 | if (p[i] > 127) |
8580 | { |
8581 | error_init (loc: init_loc, gmsgid: "%<constexpr%> initializer not " |
8582 | "representable in type of object" ); |
8583 | break; |
8584 | } |
8585 | } |
8586 | |
8587 | if (TYPE_DOMAIN (type) != NULL_TREE |
8588 | && TYPE_SIZE (type) != NULL_TREE |
8589 | && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST) |
8590 | { |
8591 | unsigned HOST_WIDE_INT len = TREE_STRING_LENGTH (inside_init); |
8592 | unsigned unit = TYPE_PRECISION (typ1) / BITS_PER_UNIT; |
8593 | |
8594 | /* Subtract the size of a single (possibly wide) character |
8595 | because it's ok to ignore the terminating null char |
8596 | that is counted in the length of the constant. */ |
8597 | if (compare_tree_int (TYPE_SIZE_UNIT (type), len - unit) < 0) |
8598 | pedwarn_init (loc: init_loc, opt: 0, |
8599 | gmsgid: ("initializer-string for array of %qT " |
8600 | "is too long" ), typ1); |
8601 | else if (warn_cxx_compat |
8602 | && compare_tree_int (TYPE_SIZE_UNIT (type), len) < 0) |
8603 | warning_at (init_loc, OPT_Wc___compat, |
8604 | ("initializer-string for array of %qT " |
8605 | "is too long for C++" ), typ1); |
8606 | if (compare_tree_int (TYPE_SIZE_UNIT (type), len) < 0) |
8607 | { |
8608 | unsigned HOST_WIDE_INT size |
8609 | = tree_to_uhwi (TYPE_SIZE_UNIT (type)); |
8610 | const char *p = TREE_STRING_POINTER (inside_init); |
8611 | |
8612 | inside_init = build_string (size, p); |
8613 | } |
8614 | } |
8615 | |
8616 | TREE_TYPE (inside_init) = type; |
8617 | return inside_init; |
8618 | } |
8619 | else if (INTEGRAL_TYPE_P (typ1)) |
8620 | { |
8621 | error_init (loc: init_loc, gmsgid: "array of inappropriate type initialized " |
8622 | "from string constant" ); |
8623 | return error_mark_node; |
8624 | } |
8625 | } |
8626 | |
8627 | /* Build a VECTOR_CST from a *constant* vector constructor. If the |
8628 | vector constructor is not constant (e.g. {1,2,3,foo()}) then punt |
8629 | below and handle as a constructor. */ |
8630 | if (code == VECTOR_TYPE |
8631 | && VECTOR_TYPE_P (TREE_TYPE (inside_init)) |
8632 | && vector_types_convertible_p (TREE_TYPE (inside_init), t2: type, emit_lax_note: true) |
8633 | && TREE_CONSTANT (inside_init)) |
8634 | { |
8635 | if (TREE_CODE (inside_init) == VECTOR_CST |
8636 | && comptypes (TYPE_MAIN_VARIANT (TREE_TYPE (inside_init)), |
8637 | TYPE_MAIN_VARIANT (type))) |
8638 | return inside_init; |
8639 | |
8640 | if (TREE_CODE (inside_init) == CONSTRUCTOR) |
8641 | { |
8642 | unsigned HOST_WIDE_INT ix; |
8643 | tree value; |
8644 | bool constant_p = true; |
8645 | |
8646 | /* Iterate through elements and check if all constructor |
8647 | elements are *_CSTs. */ |
8648 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (inside_init), ix, value) |
8649 | if (!CONSTANT_CLASS_P (value)) |
8650 | { |
8651 | constant_p = false; |
8652 | break; |
8653 | } |
8654 | |
8655 | if (constant_p) |
8656 | return build_vector_from_ctor (type, |
8657 | CONSTRUCTOR_ELTS (inside_init)); |
8658 | } |
8659 | } |
8660 | |
8661 | if (warn_sequence_point) |
8662 | verify_sequence_points (inside_init); |
8663 | |
8664 | /* Any type can be initialized |
8665 | from an expression of the same type, optionally with braces. */ |
8666 | |
8667 | if (inside_init && TREE_TYPE (inside_init) != NULL_TREE |
8668 | && (comptypes (TYPE_MAIN_VARIANT (TREE_TYPE (inside_init)), |
8669 | TYPE_MAIN_VARIANT (type)) |
8670 | || (code == ARRAY_TYPE |
8671 | && comptypes (TREE_TYPE (inside_init), type2: type)) |
8672 | || (gnu_vector_type_p (type) |
8673 | && comptypes (TREE_TYPE (inside_init), type2: type)) |
8674 | || (code == POINTER_TYPE |
8675 | && TREE_CODE (TREE_TYPE (inside_init)) == ARRAY_TYPE |
8676 | && comptypes (TREE_TYPE (TREE_TYPE (inside_init)), |
8677 | TREE_TYPE (type))))) |
8678 | { |
8679 | if (code == POINTER_TYPE) |
8680 | { |
8681 | if (TREE_CODE (TREE_TYPE (inside_init)) == ARRAY_TYPE) |
8682 | { |
8683 | if (TREE_CODE (inside_init) == STRING_CST |
8684 | || TREE_CODE (inside_init) == COMPOUND_LITERAL_EXPR) |
8685 | inside_init = array_to_pointer_conversion |
8686 | (loc: init_loc, exp: inside_init); |
8687 | else |
8688 | { |
8689 | error_init (loc: init_loc, gmsgid: "invalid use of non-lvalue array" ); |
8690 | return error_mark_node; |
8691 | } |
8692 | } |
8693 | } |
8694 | |
8695 | if (code == VECTOR_TYPE || c_hardbool_type_attr (type)) |
8696 | /* Although the types are compatible, we may require a |
8697 | conversion. */ |
8698 | inside_init = convert (type, inside_init); |
8699 | |
8700 | if ((code == RECORD_TYPE || code == UNION_TYPE) |
8701 | && !comptypes (TYPE_MAIN_VARIANT (type), TYPE_MAIN_VARIANT (TREE_TYPE (inside_init)))) |
8702 | { |
8703 | error_init (loc: init_loc, gmsgid: "invalid initializer" ); |
8704 | return error_mark_node; |
8705 | } |
8706 | |
8707 | if (require_constant |
8708 | && TREE_CODE (inside_init) == COMPOUND_LITERAL_EXPR) |
8709 | { |
8710 | /* As an extension, allow initializing objects with static storage |
8711 | duration with compound literals (which are then treated just as |
8712 | the brace enclosed list they contain). Also allow this for |
8713 | vectors, as we can only assign them with compound literals. */ |
8714 | if (flag_isoc99 && code != VECTOR_TYPE) |
8715 | pedwarn_init (loc: init_loc, opt: OPT_Wpedantic, gmsgid: "initializer element " |
8716 | "is not constant" ); |
8717 | tree decl = COMPOUND_LITERAL_EXPR_DECL (inside_init); |
8718 | inside_init = DECL_INITIAL (decl); |
8719 | } |
8720 | |
8721 | if (code == ARRAY_TYPE && TREE_CODE (inside_init) != STRING_CST |
8722 | && TREE_CODE (inside_init) != CONSTRUCTOR) |
8723 | { |
8724 | error_init (loc: init_loc, gmsgid: "array initialized from non-constant array " |
8725 | "expression" ); |
8726 | return error_mark_node; |
8727 | } |
8728 | |
8729 | /* Compound expressions can only occur here if -Wpedantic or |
8730 | -pedantic-errors is specified. In the later case, we always want |
8731 | an error. In the former case, we simply want a warning. */ |
8732 | if (require_constant && pedantic |
8733 | && TREE_CODE (inside_init) == COMPOUND_EXPR) |
8734 | { |
8735 | inside_init |
8736 | = valid_compound_expr_initializer (value: inside_init, |
8737 | TREE_TYPE (inside_init)); |
8738 | if (inside_init == error_mark_node) |
8739 | error_init (loc: init_loc, gmsgid: "initializer element is not constant" ); |
8740 | else |
8741 | pedwarn_init (loc: init_loc, opt: OPT_Wpedantic, |
8742 | gmsgid: "initializer element is not constant" ); |
8743 | if (flag_pedantic_errors) |
8744 | inside_init = error_mark_node; |
8745 | } |
8746 | else if (require_constant |
8747 | && !initializer_constant_valid_p (inside_init, |
8748 | TREE_TYPE (inside_init))) |
8749 | { |
8750 | error_init (loc: init_loc, gmsgid: "initializer element is not constant" ); |
8751 | inside_init = error_mark_node; |
8752 | } |
8753 | else if (require_constant && !maybe_const) |
8754 | pedwarn_init (loc: init_loc, opt: OPT_Wpedantic, |
8755 | gmsgid: "initializer element is not a constant expression" ); |
8756 | else if (require_constexpr) |
8757 | check_constexpr_init (loc: init_loc, type, init: inside_init, |
8758 | int_const_expr, arith_const_expr); |
8759 | |
8760 | /* Added to enable additional -Wsuggest-attribute=format warnings. */ |
8761 | if (TREE_CODE (TREE_TYPE (inside_init)) == POINTER_TYPE) |
8762 | inside_init = convert_for_assignment (location: init_loc, UNKNOWN_LOCATION, |
8763 | type, rhs: inside_init, origtype, |
8764 | errtype: (require_constant |
8765 | ? ic_init_const |
8766 | : ic_init), null_pointer_constant, |
8767 | NULL_TREE, NULL_TREE, parmnum: 0); |
8768 | return inside_init; |
8769 | } |
8770 | |
8771 | /* Handle scalar types, including conversions. */ |
8772 | |
8773 | if (code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE |
8774 | || code == POINTER_TYPE || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE |
8775 | || code == COMPLEX_TYPE || code == VECTOR_TYPE || code == NULLPTR_TYPE |
8776 | || code == BITINT_TYPE) |
8777 | { |
8778 | tree unconverted_init = inside_init; |
8779 | if (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE |
8780 | && (TREE_CODE (init) == STRING_CST |
8781 | || TREE_CODE (init) == COMPOUND_LITERAL_EXPR)) |
8782 | inside_init = init = array_to_pointer_conversion (loc: init_loc, exp: init); |
8783 | if (semantic_type) |
8784 | inside_init = build1 (EXCESS_PRECISION_EXPR, semantic_type, |
8785 | inside_init); |
8786 | inside_init |
8787 | = convert_for_assignment (location: init_loc, UNKNOWN_LOCATION, type, |
8788 | rhs: inside_init, origtype, |
8789 | errtype: require_constant ? ic_init_const : ic_init, |
8790 | null_pointer_constant, NULL_TREE, NULL_TREE, |
8791 | parmnum: 0); |
8792 | |
8793 | /* Check to see if we have already given an error message. */ |
8794 | if (inside_init == error_mark_node) |
8795 | ; |
8796 | else if (require_constant && !TREE_CONSTANT (inside_init)) |
8797 | { |
8798 | error_init (loc: init_loc, gmsgid: "initializer element is not constant" ); |
8799 | inside_init = error_mark_node; |
8800 | } |
8801 | else if (require_constant |
8802 | && !initializer_constant_valid_p (inside_init, |
8803 | TREE_TYPE (inside_init))) |
8804 | { |
8805 | error_init (loc: init_loc, gmsgid: "initializer element is not computable at " |
8806 | "load time" ); |
8807 | inside_init = error_mark_node; |
8808 | } |
8809 | else if (require_constant && !maybe_const) |
8810 | pedwarn_init (loc: init_loc, opt: OPT_Wpedantic, |
8811 | gmsgid: "initializer element is not a constant expression" ); |
8812 | else if (require_constexpr) |
8813 | check_constexpr_init (loc: init_loc, type, init: unconverted_init, |
8814 | int_const_expr, arith_const_expr); |
8815 | |
8816 | return inside_init; |
8817 | } |
8818 | |
8819 | /* Come here only for records and arrays. */ |
8820 | |
8821 | if (COMPLETE_TYPE_P (type) && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) |
8822 | { |
8823 | error_init (loc: init_loc, |
8824 | gmsgid: "variable-sized object may not be initialized except " |
8825 | "with an empty initializer" ); |
8826 | return error_mark_node; |
8827 | } |
8828 | |
8829 | error_init (loc: init_loc, gmsgid: "invalid initializer" ); |
8830 | return error_mark_node; |
8831 | } |
8832 | |
8833 | /* Handle initializers that use braces. */ |
8834 | |
8835 | /* Type of object we are accumulating a constructor for. |
8836 | This type is always a RECORD_TYPE, UNION_TYPE or ARRAY_TYPE. */ |
8837 | static tree constructor_type; |
8838 | |
8839 | /* For a RECORD_TYPE or UNION_TYPE, this is the chain of fields |
8840 | left to fill. */ |
8841 | static tree constructor_fields; |
8842 | |
8843 | /* For an ARRAY_TYPE, this is the specified index |
8844 | at which to store the next element we get. */ |
8845 | static tree constructor_index; |
8846 | |
8847 | /* For an ARRAY_TYPE, this is the maximum index. */ |
8848 | static tree constructor_max_index; |
8849 | |
8850 | /* For a RECORD_TYPE, this is the first field not yet written out. */ |
8851 | static tree constructor_unfilled_fields; |
8852 | |
8853 | /* For an ARRAY_TYPE, this is the index of the first element |
8854 | not yet written out. */ |
8855 | static tree constructor_unfilled_index; |
8856 | |
8857 | /* In a RECORD_TYPE, the byte index of the next consecutive field. |
8858 | This is so we can generate gaps between fields, when appropriate. */ |
8859 | static tree constructor_bit_index; |
8860 | |
8861 | /* If we are saving up the elements rather than allocating them, |
8862 | this is the list of elements so far (in reverse order, |
8863 | most recent first). */ |
8864 | static vec<constructor_elt, va_gc> *constructor_elements; |
8865 | |
8866 | /* 1 if constructor should be incrementally stored into a constructor chain, |
8867 | 0 if all the elements should be kept in AVL tree. */ |
8868 | static int constructor_incremental; |
8869 | |
8870 | /* 1 if so far this constructor's elements are all compile-time constants. */ |
8871 | static int constructor_constant; |
8872 | |
8873 | /* 1 if so far this constructor's elements are all valid address constants. */ |
8874 | static int constructor_simple; |
8875 | |
8876 | /* 1 if this constructor has an element that cannot be part of a |
8877 | constant expression. */ |
8878 | static int constructor_nonconst; |
8879 | |
8880 | /* 1 if this constructor is erroneous so far. */ |
8881 | static int constructor_erroneous; |
8882 | |
8883 | /* 1 if this constructor is the universal zero initializer { 0 }. */ |
8884 | static int constructor_zeroinit; |
8885 | |
8886 | /* Structure for managing pending initializer elements, organized as an |
8887 | AVL tree. */ |
8888 | |
8889 | struct init_node |
8890 | { |
8891 | struct init_node *left, *right; |
8892 | struct init_node *parent; |
8893 | int balance; |
8894 | tree purpose; |
8895 | tree value; |
8896 | tree origtype; |
8897 | }; |
8898 | |
8899 | /* Tree of pending elements at this constructor level. |
8900 | These are elements encountered out of order |
8901 | which belong at places we haven't reached yet in actually |
8902 | writing the output. |
8903 | Will never hold tree nodes across GC runs. */ |
8904 | static struct init_node *constructor_pending_elts; |
8905 | |
8906 | /* The SPELLING_DEPTH of this constructor. */ |
8907 | static int constructor_depth; |
8908 | |
8909 | /* DECL node for which an initializer is being read. |
8910 | 0 means we are reading a constructor expression |
8911 | such as (struct foo) {...}. */ |
8912 | static tree constructor_decl; |
8913 | |
8914 | /* Nonzero if there were any member designators in this initializer. */ |
8915 | static int constructor_designated; |
8916 | |
8917 | /* Nesting depth of designator list. */ |
8918 | static int designator_depth; |
8919 | |
8920 | /* Nonzero if there were diagnosed errors in this designator list. */ |
8921 | static int designator_erroneous; |
8922 | |
8923 | |
8924 | /* This stack has a level for each implicit or explicit level of |
8925 | structuring in the initializer, including the outermost one. It |
8926 | saves the values of most of the variables above. */ |
8927 | |
8928 | struct constructor_range_stack; |
8929 | |
8930 | struct constructor_stack |
8931 | { |
8932 | struct constructor_stack *next; |
8933 | tree type; |
8934 | tree fields; |
8935 | tree index; |
8936 | tree max_index; |
8937 | tree unfilled_index; |
8938 | tree unfilled_fields; |
8939 | tree bit_index; |
8940 | vec<constructor_elt, va_gc> *elements; |
8941 | struct init_node *pending_elts; |
8942 | int offset; |
8943 | int depth; |
8944 | /* If value nonzero, this value should replace the entire |
8945 | constructor at this level. */ |
8946 | struct c_expr replacement_value; |
8947 | struct constructor_range_stack *range_stack; |
8948 | char constant; |
8949 | char simple; |
8950 | char nonconst; |
8951 | char implicit; |
8952 | char erroneous; |
8953 | char outer; |
8954 | char incremental; |
8955 | char designated; |
8956 | int designator_depth; |
8957 | }; |
8958 | |
8959 | static struct constructor_stack *constructor_stack; |
8960 | |
8961 | /* This stack represents designators from some range designator up to |
8962 | the last designator in the list. */ |
8963 | |
8964 | struct constructor_range_stack |
8965 | { |
8966 | struct constructor_range_stack *next, *prev; |
8967 | struct constructor_stack *stack; |
8968 | tree range_start; |
8969 | tree index; |
8970 | tree range_end; |
8971 | tree fields; |
8972 | }; |
8973 | |
8974 | static struct constructor_range_stack *constructor_range_stack; |
8975 | |
8976 | /* This stack records separate initializers that are nested. |
8977 | Nested initializers can't happen in ANSI C, but GNU C allows them |
8978 | in cases like { ... (struct foo) { ... } ... }. */ |
8979 | |
8980 | struct initializer_stack |
8981 | { |
8982 | struct initializer_stack *next; |
8983 | tree decl; |
8984 | struct constructor_stack *constructor_stack; |
8985 | struct constructor_range_stack *constructor_range_stack; |
8986 | vec<constructor_elt, va_gc> *elements; |
8987 | struct spelling *spelling; |
8988 | struct spelling *spelling_base; |
8989 | int spelling_size; |
8990 | char require_constant_value; |
8991 | char require_constant_elements; |
8992 | char require_constexpr_value; |
8993 | char designated; |
8994 | rich_location *missing_brace_richloc; |
8995 | }; |
8996 | |
8997 | static struct initializer_stack *initializer_stack; |
8998 | |
8999 | /* Prepare to parse and output the initializer for variable DECL. */ |
9000 | |
9001 | void |
9002 | start_init (tree decl, tree asmspec_tree ATTRIBUTE_UNUSED, |
9003 | bool init_require_constant, bool init_require_constexpr, |
9004 | rich_location *richloc) |
9005 | { |
9006 | const char *locus; |
9007 | struct initializer_stack *p = XNEW (struct initializer_stack); |
9008 | |
9009 | p->decl = constructor_decl; |
9010 | p->require_constant_value = require_constant_value; |
9011 | p->require_constant_elements = require_constant_elements; |
9012 | p->require_constexpr_value = require_constexpr_value; |
9013 | p->constructor_stack = constructor_stack; |
9014 | p->constructor_range_stack = constructor_range_stack; |
9015 | p->elements = constructor_elements; |
9016 | p->spelling = spelling; |
9017 | p->spelling_base = spelling_base; |
9018 | p->spelling_size = spelling_size; |
9019 | p->next = initializer_stack; |
9020 | p->missing_brace_richloc = richloc; |
9021 | p->designated = constructor_designated; |
9022 | initializer_stack = p; |
9023 | |
9024 | constructor_decl = decl; |
9025 | constructor_designated = 0; |
9026 | |
9027 | require_constant_value = init_require_constant; |
9028 | require_constexpr_value = init_require_constexpr; |
9029 | if (decl != NULL_TREE && decl != error_mark_node) |
9030 | { |
9031 | require_constant_elements |
9032 | = ((init_require_constant || (pedantic && !flag_isoc99)) |
9033 | /* For a scalar, you can always use any value to initialize, |
9034 | even within braces. */ |
9035 | && AGGREGATE_TYPE_P (TREE_TYPE (decl))); |
9036 | locus = identifier_to_locale (IDENTIFIER_POINTER (DECL_NAME (decl))); |
9037 | } |
9038 | else |
9039 | { |
9040 | require_constant_elements = false; |
9041 | locus = _("(anonymous)" ); |
9042 | } |
9043 | |
9044 | constructor_stack = 0; |
9045 | constructor_range_stack = 0; |
9046 | |
9047 | found_missing_braces = 0; |
9048 | |
9049 | spelling_base = 0; |
9050 | spelling_size = 0; |
9051 | RESTORE_SPELLING_DEPTH (0); |
9052 | |
9053 | if (locus) |
9054 | push_string (string: locus); |
9055 | } |
9056 | |
9057 | void |
9058 | finish_init (void) |
9059 | { |
9060 | struct initializer_stack *p = initializer_stack; |
9061 | |
9062 | /* Free the whole constructor stack of this initializer. */ |
9063 | while (constructor_stack) |
9064 | { |
9065 | struct constructor_stack *q = constructor_stack; |
9066 | constructor_stack = q->next; |
9067 | XDELETE (q); |
9068 | } |
9069 | |
9070 | gcc_assert (!constructor_range_stack); |
9071 | |
9072 | /* Pop back to the data of the outer initializer (if any). */ |
9073 | XDELETE (spelling_base); |
9074 | |
9075 | constructor_decl = p->decl; |
9076 | require_constant_value = p->require_constant_value; |
9077 | require_constant_elements = p->require_constant_elements; |
9078 | require_constexpr_value = p->require_constexpr_value; |
9079 | constructor_stack = p->constructor_stack; |
9080 | constructor_designated = p->designated; |
9081 | constructor_range_stack = p->constructor_range_stack; |
9082 | constructor_elements = p->elements; |
9083 | spelling = p->spelling; |
9084 | spelling_base = p->spelling_base; |
9085 | spelling_size = p->spelling_size; |
9086 | initializer_stack = p->next; |
9087 | XDELETE (p); |
9088 | } |
9089 | |
9090 | /* Call here when we see the initializer is surrounded by braces. |
9091 | This is instead of a call to push_init_level; |
9092 | it is matched by a call to pop_init_level. |
9093 | |
9094 | TYPE is the type to initialize, for a constructor expression. |
9095 | For an initializer for a decl, TYPE is zero. */ |
9096 | |
9097 | void |
9098 | really_start_incremental_init (tree type) |
9099 | { |
9100 | struct constructor_stack *p = XNEW (struct constructor_stack); |
9101 | |
9102 | if (type == NULL_TREE) |
9103 | type = TREE_TYPE (constructor_decl); |
9104 | |
9105 | if (VECTOR_TYPE_P (type) |
9106 | && TYPE_VECTOR_OPAQUE (type)) |
9107 | error ("opaque vector types cannot be initialized" ); |
9108 | |
9109 | p->type = constructor_type; |
9110 | p->fields = constructor_fields; |
9111 | p->index = constructor_index; |
9112 | p->max_index = constructor_max_index; |
9113 | p->unfilled_index = constructor_unfilled_index; |
9114 | p->unfilled_fields = constructor_unfilled_fields; |
9115 | p->bit_index = constructor_bit_index; |
9116 | p->elements = constructor_elements; |
9117 | p->constant = constructor_constant; |
9118 | p->simple = constructor_simple; |
9119 | p->nonconst = constructor_nonconst; |
9120 | p->erroneous = constructor_erroneous; |
9121 | p->pending_elts = constructor_pending_elts; |
9122 | p->depth = constructor_depth; |
9123 | p->replacement_value.value = 0; |
9124 | p->replacement_value.original_code = ERROR_MARK; |
9125 | p->replacement_value.original_type = NULL; |
9126 | p->implicit = 0; |
9127 | p->range_stack = 0; |
9128 | p->outer = 0; |
9129 | p->incremental = constructor_incremental; |
9130 | p->designated = constructor_designated; |
9131 | p->designator_depth = designator_depth; |
9132 | p->next = 0; |
9133 | constructor_stack = p; |
9134 | |
9135 | constructor_constant = 1; |
9136 | constructor_simple = 1; |
9137 | constructor_nonconst = 0; |
9138 | constructor_depth = SPELLING_DEPTH (); |
9139 | constructor_elements = NULL; |
9140 | constructor_pending_elts = 0; |
9141 | constructor_type = type; |
9142 | constructor_incremental = 1; |
9143 | constructor_designated = 0; |
9144 | constructor_zeroinit = 1; |
9145 | designator_depth = 0; |
9146 | designator_erroneous = 0; |
9147 | |
9148 | if (RECORD_OR_UNION_TYPE_P (constructor_type)) |
9149 | { |
9150 | constructor_fields = TYPE_FIELDS (constructor_type); |
9151 | /* Skip any nameless bit fields at the beginning. */ |
9152 | while (constructor_fields != NULL_TREE |
9153 | && DECL_UNNAMED_BIT_FIELD (constructor_fields)) |
9154 | constructor_fields = DECL_CHAIN (constructor_fields); |
9155 | |
9156 | constructor_unfilled_fields = constructor_fields; |
9157 | constructor_bit_index = bitsize_zero_node; |
9158 | } |
9159 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
9160 | { |
9161 | if (TYPE_DOMAIN (constructor_type)) |
9162 | { |
9163 | constructor_max_index |
9164 | = TYPE_MAX_VALUE (TYPE_DOMAIN (constructor_type)); |
9165 | |
9166 | /* Detect non-empty initializations of zero-length arrays. */ |
9167 | if (constructor_max_index == NULL_TREE |
9168 | && TYPE_SIZE (constructor_type)) |
9169 | constructor_max_index = integer_minus_one_node; |
9170 | |
9171 | /* constructor_max_index needs to be an INTEGER_CST. Attempts |
9172 | to initialize VLAs with a nonempty initializer will cause a |
9173 | proper error; avoid tree checking errors as well by setting a |
9174 | safe value. */ |
9175 | if (constructor_max_index |
9176 | && TREE_CODE (constructor_max_index) != INTEGER_CST) |
9177 | constructor_max_index = integer_minus_one_node; |
9178 | |
9179 | constructor_index |
9180 | = convert (bitsizetype, |
9181 | TYPE_MIN_VALUE (TYPE_DOMAIN (constructor_type))); |
9182 | } |
9183 | else |
9184 | { |
9185 | constructor_index = bitsize_zero_node; |
9186 | constructor_max_index = NULL_TREE; |
9187 | } |
9188 | |
9189 | constructor_unfilled_index = constructor_index; |
9190 | } |
9191 | else if (gnu_vector_type_p (type: constructor_type)) |
9192 | { |
9193 | /* Vectors are like simple fixed-size arrays. */ |
9194 | constructor_max_index = |
9195 | bitsize_int (TYPE_VECTOR_SUBPARTS (constructor_type) - 1); |
9196 | constructor_index = bitsize_zero_node; |
9197 | constructor_unfilled_index = constructor_index; |
9198 | } |
9199 | else |
9200 | { |
9201 | /* Handle the case of int x = {5}; */ |
9202 | constructor_fields = constructor_type; |
9203 | constructor_unfilled_fields = constructor_type; |
9204 | } |
9205 | } |
9206 | |
9207 | extern location_t last_init_list_comma; |
9208 | |
9209 | /* Called when we see an open brace for a nested initializer. Finish |
9210 | off any pending levels with implicit braces. */ |
9211 | void |
9212 | finish_implicit_inits (location_t loc, struct obstack *braced_init_obstack) |
9213 | { |
9214 | while (constructor_stack->implicit) |
9215 | { |
9216 | if (RECORD_OR_UNION_TYPE_P (constructor_type) |
9217 | && constructor_fields == NULL_TREE) |
9218 | process_init_element (input_location, |
9219 | pop_init_level (loc, 1, braced_init_obstack, |
9220 | last_init_list_comma), |
9221 | true, braced_init_obstack); |
9222 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE |
9223 | && constructor_max_index |
9224 | && tree_int_cst_lt (t1: constructor_max_index, |
9225 | t2: constructor_index)) |
9226 | process_init_element (input_location, |
9227 | pop_init_level (loc, 1, braced_init_obstack, |
9228 | last_init_list_comma), |
9229 | true, braced_init_obstack); |
9230 | else |
9231 | break; |
9232 | } |
9233 | } |
9234 | |
9235 | /* Push down into a subobject, for initialization. |
9236 | If this is for an explicit set of braces, IMPLICIT is 0. |
9237 | If it is because the next element belongs at a lower level, |
9238 | IMPLICIT is 1 (or 2 if the push is because of designator list). */ |
9239 | |
9240 | void |
9241 | push_init_level (location_t loc, int implicit, |
9242 | struct obstack *braced_init_obstack) |
9243 | { |
9244 | struct constructor_stack *p; |
9245 | tree value = NULL_TREE; |
9246 | |
9247 | /* Unless this is an explicit brace, we need to preserve previous |
9248 | content if any. */ |
9249 | if (implicit) |
9250 | { |
9251 | if (RECORD_OR_UNION_TYPE_P (constructor_type) && constructor_fields) |
9252 | value = find_init_member (constructor_fields, braced_init_obstack); |
9253 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
9254 | value = find_init_member (constructor_index, braced_init_obstack); |
9255 | } |
9256 | |
9257 | p = XNEW (struct constructor_stack); |
9258 | p->type = constructor_type; |
9259 | p->fields = constructor_fields; |
9260 | p->index = constructor_index; |
9261 | p->max_index = constructor_max_index; |
9262 | p->unfilled_index = constructor_unfilled_index; |
9263 | p->unfilled_fields = constructor_unfilled_fields; |
9264 | p->bit_index = constructor_bit_index; |
9265 | p->elements = constructor_elements; |
9266 | p->constant = constructor_constant; |
9267 | p->simple = constructor_simple; |
9268 | p->nonconst = constructor_nonconst; |
9269 | p->erroneous = constructor_erroneous; |
9270 | p->pending_elts = constructor_pending_elts; |
9271 | p->depth = constructor_depth; |
9272 | p->replacement_value.value = NULL_TREE; |
9273 | p->replacement_value.original_code = ERROR_MARK; |
9274 | p->replacement_value.original_type = NULL; |
9275 | p->implicit = implicit; |
9276 | p->outer = 0; |
9277 | p->incremental = constructor_incremental; |
9278 | p->designated = constructor_designated; |
9279 | p->designator_depth = designator_depth; |
9280 | p->next = constructor_stack; |
9281 | p->range_stack = 0; |
9282 | constructor_stack = p; |
9283 | |
9284 | constructor_constant = 1; |
9285 | constructor_simple = 1; |
9286 | constructor_nonconst = 0; |
9287 | constructor_depth = SPELLING_DEPTH (); |
9288 | constructor_elements = NULL; |
9289 | constructor_incremental = 1; |
9290 | /* If the upper initializer is designated, then mark this as |
9291 | designated too to prevent bogus warnings. */ |
9292 | constructor_designated = p->designated; |
9293 | constructor_pending_elts = 0; |
9294 | if (!implicit) |
9295 | { |
9296 | p->range_stack = constructor_range_stack; |
9297 | constructor_range_stack = 0; |
9298 | designator_depth = 0; |
9299 | designator_erroneous = 0; |
9300 | } |
9301 | |
9302 | /* Don't die if an entire brace-pair level is superfluous |
9303 | in the containing level. */ |
9304 | if (constructor_type == NULL_TREE) |
9305 | ; |
9306 | else if (RECORD_OR_UNION_TYPE_P (constructor_type)) |
9307 | { |
9308 | /* Don't die if there are extra init elts at the end. */ |
9309 | if (constructor_fields == NULL_TREE) |
9310 | constructor_type = NULL_TREE; |
9311 | else |
9312 | { |
9313 | constructor_type = TREE_TYPE (constructor_fields); |
9314 | push_member_name (decl: constructor_fields); |
9315 | constructor_depth++; |
9316 | } |
9317 | } |
9318 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
9319 | { |
9320 | constructor_type = TREE_TYPE (constructor_type); |
9321 | push_array_bounds (bounds: tree_to_uhwi (constructor_index)); |
9322 | constructor_depth++; |
9323 | } |
9324 | |
9325 | if (constructor_type == NULL_TREE) |
9326 | { |
9327 | error_init (loc, gmsgid: "extra brace group at end of initializer" ); |
9328 | constructor_fields = NULL_TREE; |
9329 | constructor_unfilled_fields = NULL_TREE; |
9330 | return; |
9331 | } |
9332 | |
9333 | if (value && TREE_CODE (value) == CONSTRUCTOR) |
9334 | { |
9335 | constructor_constant = TREE_CONSTANT (value); |
9336 | constructor_simple = TREE_STATIC (value); |
9337 | constructor_nonconst = CONSTRUCTOR_NON_CONST (value); |
9338 | constructor_elements = CONSTRUCTOR_ELTS (value); |
9339 | if (!vec_safe_is_empty (v: constructor_elements) |
9340 | && (TREE_CODE (constructor_type) == RECORD_TYPE |
9341 | || TREE_CODE (constructor_type) == ARRAY_TYPE)) |
9342 | set_nonincremental_init (braced_init_obstack); |
9343 | } |
9344 | |
9345 | if (implicit == 1) |
9346 | { |
9347 | found_missing_braces = 1; |
9348 | if (initializer_stack->missing_brace_richloc) |
9349 | initializer_stack->missing_brace_richloc->add_fixit_insert_before |
9350 | (where: loc, new_content: "{" ); |
9351 | } |
9352 | |
9353 | if (RECORD_OR_UNION_TYPE_P (constructor_type)) |
9354 | { |
9355 | constructor_fields = TYPE_FIELDS (constructor_type); |
9356 | /* Skip any nameless bit fields at the beginning. */ |
9357 | while (constructor_fields != NULL_TREE |
9358 | && DECL_UNNAMED_BIT_FIELD (constructor_fields)) |
9359 | constructor_fields = DECL_CHAIN (constructor_fields); |
9360 | |
9361 | constructor_unfilled_fields = constructor_fields; |
9362 | constructor_bit_index = bitsize_zero_node; |
9363 | } |
9364 | else if (gnu_vector_type_p (type: constructor_type)) |
9365 | { |
9366 | /* Vectors are like simple fixed-size arrays. */ |
9367 | constructor_max_index = |
9368 | bitsize_int (TYPE_VECTOR_SUBPARTS (constructor_type) - 1); |
9369 | constructor_index = bitsize_int (0); |
9370 | constructor_unfilled_index = constructor_index; |
9371 | } |
9372 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
9373 | { |
9374 | if (TYPE_DOMAIN (constructor_type)) |
9375 | { |
9376 | constructor_max_index |
9377 | = TYPE_MAX_VALUE (TYPE_DOMAIN (constructor_type)); |
9378 | |
9379 | /* Detect non-empty initializations of zero-length arrays. */ |
9380 | if (constructor_max_index == NULL_TREE |
9381 | && TYPE_SIZE (constructor_type)) |
9382 | constructor_max_index = integer_minus_one_node; |
9383 | |
9384 | /* constructor_max_index needs to be an INTEGER_CST. Attempts |
9385 | to initialize VLAs will cause a proper error; avoid tree |
9386 | checking errors as well by setting a safe value. */ |
9387 | if (constructor_max_index |
9388 | && TREE_CODE (constructor_max_index) != INTEGER_CST) |
9389 | constructor_max_index = integer_minus_one_node; |
9390 | |
9391 | constructor_index |
9392 | = convert (bitsizetype, |
9393 | TYPE_MIN_VALUE (TYPE_DOMAIN (constructor_type))); |
9394 | } |
9395 | else |
9396 | constructor_index = bitsize_zero_node; |
9397 | |
9398 | constructor_unfilled_index = constructor_index; |
9399 | if (value && TREE_CODE (value) == STRING_CST) |
9400 | { |
9401 | /* We need to split the char/wchar array into individual |
9402 | characters, so that we don't have to special case it |
9403 | everywhere. */ |
9404 | set_nonincremental_init_from_string (value, braced_init_obstack); |
9405 | } |
9406 | } |
9407 | else |
9408 | { |
9409 | if (constructor_type != error_mark_node) |
9410 | warning_init (loc: input_location, opt: 0, gmsgid: "braces around scalar initializer" ); |
9411 | constructor_fields = constructor_type; |
9412 | constructor_unfilled_fields = constructor_type; |
9413 | } |
9414 | } |
9415 | |
9416 | /* At the end of an implicit or explicit brace level, |
9417 | finish up that level of constructor. If a single expression |
9418 | with redundant braces initialized that level, return the |
9419 | c_expr structure for that expression. Otherwise, the original_code |
9420 | element is set to ERROR_MARK. |
9421 | If we were outputting the elements as they are read, return 0 as the value |
9422 | from inner levels (process_init_element ignores that), |
9423 | but return error_mark_node as the value from the outermost level |
9424 | (that's what we want to put in DECL_INITIAL). |
9425 | Otherwise, return a CONSTRUCTOR expression as the value. */ |
9426 | |
9427 | struct c_expr |
9428 | pop_init_level (location_t loc, int implicit, |
9429 | struct obstack *braced_init_obstack, |
9430 | location_t insert_before) |
9431 | { |
9432 | struct constructor_stack *p; |
9433 | struct c_expr ret; |
9434 | ret.value = NULL_TREE; |
9435 | ret.original_code = ERROR_MARK; |
9436 | ret.original_type = NULL; |
9437 | ret.m_decimal = 0; |
9438 | |
9439 | if (implicit == 0) |
9440 | { |
9441 | /* When we come to an explicit close brace, |
9442 | pop any inner levels that didn't have explicit braces. */ |
9443 | while (constructor_stack->implicit) |
9444 | process_init_element (input_location, |
9445 | pop_init_level (loc, implicit: 1, braced_init_obstack, |
9446 | insert_before), |
9447 | true, braced_init_obstack); |
9448 | gcc_assert (!constructor_range_stack); |
9449 | } |
9450 | else |
9451 | if (initializer_stack->missing_brace_richloc) |
9452 | initializer_stack->missing_brace_richloc->add_fixit_insert_before |
9453 | (where: insert_before, new_content: "}" ); |
9454 | |
9455 | /* Now output all pending elements. */ |
9456 | constructor_incremental = 1; |
9457 | output_pending_init_elements (1, braced_init_obstack); |
9458 | |
9459 | p = constructor_stack; |
9460 | |
9461 | /* Error for initializing a flexible array member, or a zero-length |
9462 | array member in an inappropriate context. */ |
9463 | if (constructor_type && constructor_fields |
9464 | && TREE_CODE (constructor_type) == ARRAY_TYPE |
9465 | && TYPE_DOMAIN (constructor_type) |
9466 | && !TYPE_MAX_VALUE (TYPE_DOMAIN (constructor_type))) |
9467 | { |
9468 | /* Silently discard empty initializations. The parser will |
9469 | already have pedwarned for empty brackets. */ |
9470 | if (integer_zerop (constructor_unfilled_index)) |
9471 | constructor_type = NULL_TREE; |
9472 | else |
9473 | { |
9474 | gcc_assert (!TYPE_SIZE (constructor_type)); |
9475 | |
9476 | if (constructor_depth > 2) |
9477 | error_init (loc, gmsgid: "initialization of flexible array member in a nested context" ); |
9478 | else |
9479 | pedwarn_init (loc, opt: OPT_Wpedantic, |
9480 | gmsgid: "initialization of a flexible array member" ); |
9481 | |
9482 | /* We have already issued an error message for the existence |
9483 | of a flexible array member not at the end of the structure. |
9484 | Discard the initializer so that we do not die later. */ |
9485 | if (DECL_CHAIN (constructor_fields) != NULL_TREE) |
9486 | constructor_type = NULL_TREE; |
9487 | } |
9488 | } |
9489 | |
9490 | switch (vec_safe_length (v: constructor_elements)) |
9491 | { |
9492 | case 0: |
9493 | /* Initialization with { } counts as zeroinit. */ |
9494 | constructor_zeroinit = 1; |
9495 | break; |
9496 | case 1: |
9497 | /* This might be zeroinit as well. */ |
9498 | if (integer_zerop ((*constructor_elements)[0].value)) |
9499 | constructor_zeroinit = 1; |
9500 | break; |
9501 | default: |
9502 | /* If the constructor has more than one element, it can't be { 0 }. */ |
9503 | constructor_zeroinit = 0; |
9504 | break; |
9505 | } |
9506 | |
9507 | /* Warn when some structs are initialized with direct aggregation. */ |
9508 | if (!implicit && found_missing_braces && warn_missing_braces |
9509 | && !constructor_zeroinit) |
9510 | { |
9511 | gcc_assert (initializer_stack->missing_brace_richloc); |
9512 | warning_at (initializer_stack->missing_brace_richloc, |
9513 | OPT_Wmissing_braces, |
9514 | "missing braces around initializer" ); |
9515 | } |
9516 | |
9517 | /* Warn when some struct elements are implicitly initialized to zero. */ |
9518 | if (warn_missing_field_initializers |
9519 | && constructor_type |
9520 | && TREE_CODE (constructor_type) == RECORD_TYPE |
9521 | && constructor_unfilled_fields) |
9522 | { |
9523 | /* Do not warn for flexible array members or zero-length arrays. */ |
9524 | while (constructor_unfilled_fields |
9525 | && (!DECL_SIZE (constructor_unfilled_fields) |
9526 | || integer_zerop (DECL_SIZE (constructor_unfilled_fields)))) |
9527 | constructor_unfilled_fields = DECL_CHAIN (constructor_unfilled_fields); |
9528 | |
9529 | if (constructor_unfilled_fields |
9530 | /* Do not warn if this level of the initializer uses member |
9531 | designators; it is likely to be deliberate. */ |
9532 | && !constructor_designated |
9533 | /* Do not warn about initializing with { 0 } or with { }. */ |
9534 | && !constructor_zeroinit) |
9535 | { |
9536 | if (warning_at (input_location, OPT_Wmissing_field_initializers, |
9537 | "missing initializer for field %qD of %qT" , |
9538 | constructor_unfilled_fields, |
9539 | constructor_type)) |
9540 | inform (DECL_SOURCE_LOCATION (constructor_unfilled_fields), |
9541 | "%qD declared here" , constructor_unfilled_fields); |
9542 | } |
9543 | } |
9544 | |
9545 | /* Pad out the end of the structure. */ |
9546 | if (p->replacement_value.value) |
9547 | /* If this closes a superfluous brace pair, |
9548 | just pass out the element between them. */ |
9549 | ret = p->replacement_value; |
9550 | else if (constructor_type == NULL_TREE) |
9551 | ; |
9552 | else if (!RECORD_OR_UNION_TYPE_P (constructor_type) |
9553 | && TREE_CODE (constructor_type) != ARRAY_TYPE |
9554 | && !gnu_vector_type_p (type: constructor_type)) |
9555 | { |
9556 | /* A nonincremental scalar initializer--just return |
9557 | the element, after verifying there is just one. |
9558 | Empty scalar initializers are supported in C23. */ |
9559 | if (vec_safe_is_empty (v: constructor_elements)) |
9560 | { |
9561 | if (constructor_erroneous || constructor_type == error_mark_node) |
9562 | ret.value = error_mark_node; |
9563 | else if (TREE_CODE (constructor_type) == FUNCTION_TYPE) |
9564 | { |
9565 | error_init (loc, gmsgid: "invalid initializer" ); |
9566 | ret.value = error_mark_node; |
9567 | } |
9568 | else if (TREE_CODE (constructor_type) == POINTER_TYPE) |
9569 | /* Ensure this is a null pointer constant in the case of a |
9570 | 'constexpr' object initialized with {}. */ |
9571 | ret.value = build_zero_cst (ptr_type_node); |
9572 | else |
9573 | ret.value = build_zero_cst (constructor_type); |
9574 | } |
9575 | else if (vec_safe_length (v: constructor_elements) != 1) |
9576 | { |
9577 | error_init (loc, gmsgid: "extra elements in scalar initializer" ); |
9578 | ret.value = (*constructor_elements)[0].value; |
9579 | } |
9580 | else |
9581 | ret.value = (*constructor_elements)[0].value; |
9582 | } |
9583 | else |
9584 | { |
9585 | if (constructor_erroneous) |
9586 | ret.value = error_mark_node; |
9587 | else |
9588 | { |
9589 | ret.value = build_constructor (constructor_type, |
9590 | constructor_elements); |
9591 | if (constructor_constant) |
9592 | TREE_CONSTANT (ret.value) = 1; |
9593 | if (constructor_constant && constructor_simple) |
9594 | TREE_STATIC (ret.value) = 1; |
9595 | if (constructor_nonconst) |
9596 | CONSTRUCTOR_NON_CONST (ret.value) = 1; |
9597 | } |
9598 | } |
9599 | |
9600 | if (ret.value && TREE_CODE (ret.value) != CONSTRUCTOR) |
9601 | { |
9602 | if (constructor_nonconst) |
9603 | ret.original_code = C_MAYBE_CONST_EXPR; |
9604 | else if (ret.original_code == C_MAYBE_CONST_EXPR) |
9605 | ret.original_code = ERROR_MARK; |
9606 | } |
9607 | |
9608 | constructor_type = p->type; |
9609 | constructor_fields = p->fields; |
9610 | constructor_index = p->index; |
9611 | constructor_max_index = p->max_index; |
9612 | constructor_unfilled_index = p->unfilled_index; |
9613 | constructor_unfilled_fields = p->unfilled_fields; |
9614 | constructor_bit_index = p->bit_index; |
9615 | constructor_elements = p->elements; |
9616 | constructor_constant = p->constant; |
9617 | constructor_simple = p->simple; |
9618 | constructor_nonconst = p->nonconst; |
9619 | constructor_erroneous = p->erroneous; |
9620 | constructor_incremental = p->incremental; |
9621 | constructor_designated = p->designated; |
9622 | designator_depth = p->designator_depth; |
9623 | constructor_pending_elts = p->pending_elts; |
9624 | constructor_depth = p->depth; |
9625 | if (!p->implicit) |
9626 | constructor_range_stack = p->range_stack; |
9627 | RESTORE_SPELLING_DEPTH (constructor_depth); |
9628 | |
9629 | constructor_stack = p->next; |
9630 | XDELETE (p); |
9631 | |
9632 | if (ret.value == NULL_TREE && constructor_stack == 0) |
9633 | ret.value = error_mark_node; |
9634 | return ret; |
9635 | } |
9636 | |
9637 | /* Common handling for both array range and field name designators. |
9638 | ARRAY argument is nonzero for array ranges. Returns false for success. */ |
9639 | |
9640 | static bool |
9641 | set_designator (location_t loc, bool array, |
9642 | struct obstack *braced_init_obstack) |
9643 | { |
9644 | tree subtype; |
9645 | enum tree_code subcode; |
9646 | |
9647 | /* Don't die if an entire brace-pair level is superfluous |
9648 | in the containing level, or for an erroneous type. */ |
9649 | if (constructor_type == NULL_TREE || constructor_type == error_mark_node) |
9650 | return true; |
9651 | |
9652 | /* If there were errors in this designator list already, bail out |
9653 | silently. */ |
9654 | if (designator_erroneous) |
9655 | return true; |
9656 | |
9657 | /* Likewise for an initializer for a variable-size type. Those are |
9658 | diagnosed in the parser, except for empty initializer braces. */ |
9659 | if (COMPLETE_TYPE_P (constructor_type) |
9660 | && TREE_CODE (TYPE_SIZE (constructor_type)) != INTEGER_CST) |
9661 | return true; |
9662 | |
9663 | if (!designator_depth) |
9664 | { |
9665 | gcc_assert (!constructor_range_stack); |
9666 | |
9667 | /* Designator list starts at the level of closest explicit |
9668 | braces. */ |
9669 | while (constructor_stack->implicit) |
9670 | process_init_element (input_location, |
9671 | pop_init_level (loc, implicit: 1, braced_init_obstack, |
9672 | insert_before: last_init_list_comma), |
9673 | true, braced_init_obstack); |
9674 | constructor_designated = 1; |
9675 | return false; |
9676 | } |
9677 | |
9678 | switch (TREE_CODE (constructor_type)) |
9679 | { |
9680 | case RECORD_TYPE: |
9681 | case UNION_TYPE: |
9682 | subtype = TREE_TYPE (constructor_fields); |
9683 | if (subtype != error_mark_node) |
9684 | subtype = TYPE_MAIN_VARIANT (subtype); |
9685 | break; |
9686 | case ARRAY_TYPE: |
9687 | subtype = TYPE_MAIN_VARIANT (TREE_TYPE (constructor_type)); |
9688 | break; |
9689 | default: |
9690 | gcc_unreachable (); |
9691 | } |
9692 | |
9693 | subcode = TREE_CODE (subtype); |
9694 | if (array && subcode != ARRAY_TYPE) |
9695 | { |
9696 | error_init (loc, gmsgid: "array index in non-array initializer" ); |
9697 | return true; |
9698 | } |
9699 | else if (!array && subcode != RECORD_TYPE && subcode != UNION_TYPE) |
9700 | { |
9701 | error_init (loc, gmsgid: "field name not in record or union initializer" ); |
9702 | return true; |
9703 | } |
9704 | |
9705 | constructor_designated = 1; |
9706 | finish_implicit_inits (loc, braced_init_obstack); |
9707 | push_init_level (loc, implicit: 2, braced_init_obstack); |
9708 | return false; |
9709 | } |
9710 | |
9711 | /* If there are range designators in designator list, push a new designator |
9712 | to constructor_range_stack. RANGE_END is end of such stack range or |
9713 | NULL_TREE if there is no range designator at this level. */ |
9714 | |
9715 | static void |
9716 | push_range_stack (tree range_end, struct obstack * braced_init_obstack) |
9717 | { |
9718 | struct constructor_range_stack *p; |
9719 | |
9720 | p = (struct constructor_range_stack *) |
9721 | obstack_alloc (braced_init_obstack, |
9722 | sizeof (struct constructor_range_stack)); |
9723 | p->prev = constructor_range_stack; |
9724 | p->next = 0; |
9725 | p->fields = constructor_fields; |
9726 | p->range_start = constructor_index; |
9727 | p->index = constructor_index; |
9728 | p->stack = constructor_stack; |
9729 | p->range_end = range_end; |
9730 | if (constructor_range_stack) |
9731 | constructor_range_stack->next = p; |
9732 | constructor_range_stack = p; |
9733 | } |
9734 | |
9735 | /* Within an array initializer, specify the next index to be initialized. |
9736 | FIRST is that index. If LAST is nonzero, then initialize a range |
9737 | of indices, running from FIRST through LAST. */ |
9738 | |
9739 | void |
9740 | set_init_index (location_t loc, tree first, tree last, |
9741 | struct obstack *braced_init_obstack) |
9742 | { |
9743 | if (set_designator (loc, array: true, braced_init_obstack)) |
9744 | return; |
9745 | |
9746 | designator_erroneous = 1; |
9747 | |
9748 | if (!INTEGRAL_TYPE_P (TREE_TYPE (first)) |
9749 | || (last && !INTEGRAL_TYPE_P (TREE_TYPE (last)))) |
9750 | { |
9751 | error_init (loc, gmsgid: "array index in initializer not of integer type" ); |
9752 | return; |
9753 | } |
9754 | |
9755 | if (TREE_CODE (first) != INTEGER_CST) |
9756 | { |
9757 | first = c_fully_fold (first, false, NULL); |
9758 | if (TREE_CODE (first) == INTEGER_CST) |
9759 | pedwarn_init (loc, opt: OPT_Wpedantic, |
9760 | gmsgid: "array index in initializer is not " |
9761 | "an integer constant expression" ); |
9762 | } |
9763 | |
9764 | if (last && TREE_CODE (last) != INTEGER_CST) |
9765 | { |
9766 | last = c_fully_fold (last, false, NULL); |
9767 | if (TREE_CODE (last) == INTEGER_CST) |
9768 | pedwarn_init (loc, opt: OPT_Wpedantic, |
9769 | gmsgid: "array index in initializer is not " |
9770 | "an integer constant expression" ); |
9771 | } |
9772 | |
9773 | if (TREE_CODE (first) != INTEGER_CST) |
9774 | error_init (loc, gmsgid: "nonconstant array index in initializer" ); |
9775 | else if (last != NULL_TREE && TREE_CODE (last) != INTEGER_CST) |
9776 | error_init (loc, gmsgid: "nonconstant array index in initializer" ); |
9777 | else if (TREE_CODE (constructor_type) != ARRAY_TYPE) |
9778 | error_init (loc, gmsgid: "array index in non-array initializer" ); |
9779 | else if (tree_int_cst_sgn (first) == -1) |
9780 | error_init (loc, gmsgid: "array index in initializer exceeds array bounds" ); |
9781 | else if (constructor_max_index |
9782 | && tree_int_cst_lt (t1: constructor_max_index, t2: first)) |
9783 | error_init (loc, gmsgid: "array index in initializer exceeds array bounds" ); |
9784 | else |
9785 | { |
9786 | constant_expression_warning (first); |
9787 | if (last) |
9788 | constant_expression_warning (last); |
9789 | constructor_index = convert (bitsizetype, first); |
9790 | if (tree_int_cst_lt (t1: constructor_index, t2: first)) |
9791 | { |
9792 | constructor_index = copy_node (constructor_index); |
9793 | TREE_OVERFLOW (constructor_index) = 1; |
9794 | } |
9795 | |
9796 | if (last) |
9797 | { |
9798 | if (tree_int_cst_equal (first, last)) |
9799 | last = NULL_TREE; |
9800 | else if (tree_int_cst_lt (t1: last, t2: first)) |
9801 | { |
9802 | error_init (loc, gmsgid: "empty index range in initializer" ); |
9803 | last = NULL_TREE; |
9804 | } |
9805 | else |
9806 | { |
9807 | last = convert (bitsizetype, last); |
9808 | if (constructor_max_index != NULL_TREE |
9809 | && tree_int_cst_lt (t1: constructor_max_index, t2: last)) |
9810 | { |
9811 | error_init (loc, gmsgid: "array index range in initializer exceeds " |
9812 | "array bounds" ); |
9813 | last = NULL_TREE; |
9814 | } |
9815 | } |
9816 | } |
9817 | |
9818 | designator_depth++; |
9819 | designator_erroneous = 0; |
9820 | if (constructor_range_stack || last) |
9821 | push_range_stack (range_end: last, braced_init_obstack); |
9822 | } |
9823 | } |
9824 | |
9825 | /* Within a struct initializer, specify the next field to be initialized. */ |
9826 | |
9827 | void |
9828 | set_init_label (location_t loc, tree fieldname, location_t fieldname_loc, |
9829 | struct obstack *braced_init_obstack) |
9830 | { |
9831 | tree field; |
9832 | |
9833 | if (set_designator (loc, array: false, braced_init_obstack)) |
9834 | return; |
9835 | |
9836 | designator_erroneous = 1; |
9837 | |
9838 | if (!RECORD_OR_UNION_TYPE_P (constructor_type)) |
9839 | { |
9840 | error_init (loc, gmsgid: "field name not in record or union initializer" ); |
9841 | return; |
9842 | } |
9843 | |
9844 | field = lookup_field (type: constructor_type, component: fieldname); |
9845 | |
9846 | if (field == NULL_TREE) |
9847 | { |
9848 | tree guessed_id = lookup_field_fuzzy (type: constructor_type, component: fieldname); |
9849 | if (guessed_id) |
9850 | { |
9851 | gcc_rich_location rich_loc (fieldname_loc); |
9852 | rich_loc.add_fixit_misspelled_id (misspelled_token_loc: fieldname_loc, hint_id: guessed_id); |
9853 | error_at (&rich_loc, |
9854 | "%qT has no member named %qE; did you mean %qE?" , |
9855 | constructor_type, fieldname, guessed_id); |
9856 | } |
9857 | else |
9858 | error_at (fieldname_loc, "%qT has no member named %qE" , |
9859 | constructor_type, fieldname); |
9860 | } |
9861 | else |
9862 | do |
9863 | { |
9864 | constructor_fields = TREE_VALUE (field); |
9865 | designator_depth++; |
9866 | designator_erroneous = 0; |
9867 | if (constructor_range_stack) |
9868 | push_range_stack (NULL_TREE, braced_init_obstack); |
9869 | field = TREE_CHAIN (field); |
9870 | if (field) |
9871 | { |
9872 | if (set_designator (loc, array: false, braced_init_obstack)) |
9873 | return; |
9874 | } |
9875 | } |
9876 | while (field != NULL_TREE); |
9877 | } |
9878 | |
9879 | /* Add a new initializer to the tree of pending initializers. PURPOSE |
9880 | identifies the initializer, either array index or field in a structure. |
9881 | VALUE is the value of that index or field. If ORIGTYPE is not |
9882 | NULL_TREE, it is the original type of VALUE. |
9883 | |
9884 | IMPLICIT is true if value comes from pop_init_level (1), |
9885 | the new initializer has been merged with the existing one |
9886 | and thus no warnings should be emitted about overriding an |
9887 | existing initializer. */ |
9888 | |
9889 | static void |
9890 | add_pending_init (location_t loc, tree purpose, tree value, tree origtype, |
9891 | bool implicit, struct obstack *braced_init_obstack) |
9892 | { |
9893 | struct init_node *p, **q, *r; |
9894 | |
9895 | q = &constructor_pending_elts; |
9896 | p = 0; |
9897 | |
9898 | if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
9899 | { |
9900 | while (*q != 0) |
9901 | { |
9902 | p = *q; |
9903 | if (tree_int_cst_lt (t1: purpose, t2: p->purpose)) |
9904 | q = &p->left; |
9905 | else if (tree_int_cst_lt (t1: p->purpose, t2: purpose)) |
9906 | q = &p->right; |
9907 | else |
9908 | { |
9909 | if (!implicit) |
9910 | { |
9911 | if (TREE_SIDE_EFFECTS (p->value)) |
9912 | warning_init (loc, opt: OPT_Woverride_init_side_effects, |
9913 | gmsgid: "initialized field with side-effects " |
9914 | "overwritten" ); |
9915 | else if (warn_override_init) |
9916 | warning_init (loc, opt: OPT_Woverride_init, |
9917 | gmsgid: "initialized field overwritten" ); |
9918 | } |
9919 | p->value = value; |
9920 | p->origtype = origtype; |
9921 | return; |
9922 | } |
9923 | } |
9924 | } |
9925 | else |
9926 | { |
9927 | tree bitpos; |
9928 | |
9929 | bitpos = bit_position (purpose); |
9930 | while (*q != NULL) |
9931 | { |
9932 | p = *q; |
9933 | if (tree_int_cst_lt (t1: bitpos, t2: bit_position (p->purpose))) |
9934 | q = &p->left; |
9935 | else if (p->purpose != purpose) |
9936 | q = &p->right; |
9937 | else |
9938 | { |
9939 | if (!implicit) |
9940 | { |
9941 | if (TREE_SIDE_EFFECTS (p->value)) |
9942 | warning_init (loc, opt: OPT_Woverride_init_side_effects, |
9943 | gmsgid: "initialized field with side-effects " |
9944 | "overwritten" ); |
9945 | else if (warn_override_init) |
9946 | warning_init (loc, opt: OPT_Woverride_init, |
9947 | gmsgid: "initialized field overwritten" ); |
9948 | } |
9949 | p->value = value; |
9950 | p->origtype = origtype; |
9951 | return; |
9952 | } |
9953 | } |
9954 | } |
9955 | |
9956 | r = (struct init_node *) obstack_alloc (braced_init_obstack, |
9957 | sizeof (struct init_node)); |
9958 | r->purpose = purpose; |
9959 | r->value = value; |
9960 | r->origtype = origtype; |
9961 | |
9962 | *q = r; |
9963 | r->parent = p; |
9964 | r->left = 0; |
9965 | r->right = 0; |
9966 | r->balance = 0; |
9967 | |
9968 | while (p) |
9969 | { |
9970 | struct init_node *s; |
9971 | |
9972 | if (r == p->left) |
9973 | { |
9974 | if (p->balance == 0) |
9975 | p->balance = -1; |
9976 | else if (p->balance < 0) |
9977 | { |
9978 | if (r->balance < 0) |
9979 | { |
9980 | /* L rotation. */ |
9981 | p->left = r->right; |
9982 | if (p->left) |
9983 | p->left->parent = p; |
9984 | r->right = p; |
9985 | |
9986 | p->balance = 0; |
9987 | r->balance = 0; |
9988 | |
9989 | s = p->parent; |
9990 | p->parent = r; |
9991 | r->parent = s; |
9992 | if (s) |
9993 | { |
9994 | if (s->left == p) |
9995 | s->left = r; |
9996 | else |
9997 | s->right = r; |
9998 | } |
9999 | else |
10000 | constructor_pending_elts = r; |
10001 | } |
10002 | else |
10003 | { |
10004 | /* LR rotation. */ |
10005 | struct init_node *t = r->right; |
10006 | |
10007 | r->right = t->left; |
10008 | if (r->right) |
10009 | r->right->parent = r; |
10010 | t->left = r; |
10011 | |
10012 | p->left = t->right; |
10013 | if (p->left) |
10014 | p->left->parent = p; |
10015 | t->right = p; |
10016 | |
10017 | p->balance = t->balance < 0; |
10018 | r->balance = -(t->balance > 0); |
10019 | t->balance = 0; |
10020 | |
10021 | s = p->parent; |
10022 | p->parent = t; |
10023 | r->parent = t; |
10024 | t->parent = s; |
10025 | if (s) |
10026 | { |
10027 | if (s->left == p) |
10028 | s->left = t; |
10029 | else |
10030 | s->right = t; |
10031 | } |
10032 | else |
10033 | constructor_pending_elts = t; |
10034 | } |
10035 | break; |
10036 | } |
10037 | else |
10038 | { |
10039 | /* p->balance == +1; growth of left side balances the node. */ |
10040 | p->balance = 0; |
10041 | break; |
10042 | } |
10043 | } |
10044 | else /* r == p->right */ |
10045 | { |
10046 | if (p->balance == 0) |
10047 | /* Growth propagation from right side. */ |
10048 | p->balance++; |
10049 | else if (p->balance > 0) |
10050 | { |
10051 | if (r->balance > 0) |
10052 | { |
10053 | /* R rotation. */ |
10054 | p->right = r->left; |
10055 | if (p->right) |
10056 | p->right->parent = p; |
10057 | r->left = p; |
10058 | |
10059 | p->balance = 0; |
10060 | r->balance = 0; |
10061 | |
10062 | s = p->parent; |
10063 | p->parent = r; |
10064 | r->parent = s; |
10065 | if (s) |
10066 | { |
10067 | if (s->left == p) |
10068 | s->left = r; |
10069 | else |
10070 | s->right = r; |
10071 | } |
10072 | else |
10073 | constructor_pending_elts = r; |
10074 | } |
10075 | else /* r->balance == -1 */ |
10076 | { |
10077 | /* RL rotation */ |
10078 | struct init_node *t = r->left; |
10079 | |
10080 | r->left = t->right; |
10081 | if (r->left) |
10082 | r->left->parent = r; |
10083 | t->right = r; |
10084 | |
10085 | p->right = t->left; |
10086 | if (p->right) |
10087 | p->right->parent = p; |
10088 | t->left = p; |
10089 | |
10090 | r->balance = (t->balance < 0); |
10091 | p->balance = -(t->balance > 0); |
10092 | t->balance = 0; |
10093 | |
10094 | s = p->parent; |
10095 | p->parent = t; |
10096 | r->parent = t; |
10097 | t->parent = s; |
10098 | if (s) |
10099 | { |
10100 | if (s->left == p) |
10101 | s->left = t; |
10102 | else |
10103 | s->right = t; |
10104 | } |
10105 | else |
10106 | constructor_pending_elts = t; |
10107 | } |
10108 | break; |
10109 | } |
10110 | else |
10111 | { |
10112 | /* p->balance == -1; growth of right side balances the node. */ |
10113 | p->balance = 0; |
10114 | break; |
10115 | } |
10116 | } |
10117 | |
10118 | r = p; |
10119 | p = p->parent; |
10120 | } |
10121 | } |
10122 | |
10123 | /* Build AVL tree from a sorted chain. */ |
10124 | |
10125 | static void |
10126 | set_nonincremental_init (struct obstack * braced_init_obstack) |
10127 | { |
10128 | unsigned HOST_WIDE_INT ix; |
10129 | tree index, value; |
10130 | |
10131 | if (TREE_CODE (constructor_type) != RECORD_TYPE |
10132 | && TREE_CODE (constructor_type) != ARRAY_TYPE) |
10133 | return; |
10134 | |
10135 | FOR_EACH_CONSTRUCTOR_ELT (constructor_elements, ix, index, value) |
10136 | add_pending_init (loc: input_location, purpose: index, value, NULL_TREE, implicit: true, |
10137 | braced_init_obstack); |
10138 | constructor_elements = NULL; |
10139 | if (TREE_CODE (constructor_type) == RECORD_TYPE) |
10140 | { |
10141 | constructor_unfilled_fields = TYPE_FIELDS (constructor_type); |
10142 | /* Skip any nameless bit fields at the beginning. */ |
10143 | while (constructor_unfilled_fields != NULL_TREE |
10144 | && DECL_UNNAMED_BIT_FIELD (constructor_unfilled_fields)) |
10145 | constructor_unfilled_fields = TREE_CHAIN (constructor_unfilled_fields); |
10146 | |
10147 | } |
10148 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
10149 | { |
10150 | if (TYPE_DOMAIN (constructor_type)) |
10151 | constructor_unfilled_index |
10152 | = convert (bitsizetype, |
10153 | TYPE_MIN_VALUE (TYPE_DOMAIN (constructor_type))); |
10154 | else |
10155 | constructor_unfilled_index = bitsize_zero_node; |
10156 | } |
10157 | constructor_incremental = 0; |
10158 | } |
10159 | |
10160 | /* Build AVL tree from a string constant. */ |
10161 | |
10162 | static void |
10163 | set_nonincremental_init_from_string (tree str, |
10164 | struct obstack * braced_init_obstack) |
10165 | { |
10166 | tree value, purpose, type; |
10167 | HOST_WIDE_INT val[2]; |
10168 | const char *p, *end; |
10169 | int byte, wchar_bytes, charwidth, bitpos; |
10170 | |
10171 | gcc_assert (TREE_CODE (constructor_type) == ARRAY_TYPE); |
10172 | |
10173 | wchar_bytes = TYPE_PRECISION (TREE_TYPE (TREE_TYPE (str))) / BITS_PER_UNIT; |
10174 | charwidth = TYPE_PRECISION (char_type_node); |
10175 | gcc_assert ((size_t) wchar_bytes * charwidth |
10176 | <= ARRAY_SIZE (val) * HOST_BITS_PER_WIDE_INT); |
10177 | type = TREE_TYPE (constructor_type); |
10178 | p = TREE_STRING_POINTER (str); |
10179 | end = p + TREE_STRING_LENGTH (str); |
10180 | |
10181 | for (purpose = bitsize_zero_node; |
10182 | p < end |
10183 | && !(constructor_max_index |
10184 | && tree_int_cst_lt (t1: constructor_max_index, t2: purpose)); |
10185 | purpose = size_binop (PLUS_EXPR, purpose, bitsize_one_node)) |
10186 | { |
10187 | if (wchar_bytes == 1) |
10188 | { |
10189 | val[0] = (unsigned char) *p++; |
10190 | val[1] = 0; |
10191 | } |
10192 | else |
10193 | { |
10194 | val[1] = 0; |
10195 | val[0] = 0; |
10196 | for (byte = 0; byte < wchar_bytes; byte++) |
10197 | { |
10198 | if (BYTES_BIG_ENDIAN) |
10199 | bitpos = (wchar_bytes - byte - 1) * charwidth; |
10200 | else |
10201 | bitpos = byte * charwidth; |
10202 | val[bitpos / HOST_BITS_PER_WIDE_INT] |
10203 | |= ((unsigned HOST_WIDE_INT) ((unsigned char) *p++)) |
10204 | << (bitpos % HOST_BITS_PER_WIDE_INT); |
10205 | } |
10206 | } |
10207 | |
10208 | if (!TYPE_UNSIGNED (type)) |
10209 | { |
10210 | bitpos = ((wchar_bytes - 1) * charwidth) + HOST_BITS_PER_CHAR; |
10211 | if (bitpos < HOST_BITS_PER_WIDE_INT) |
10212 | { |
10213 | if (val[0] & (HOST_WIDE_INT_1 << (bitpos - 1))) |
10214 | { |
10215 | val[0] |= HOST_WIDE_INT_M1U << bitpos; |
10216 | val[1] = -1; |
10217 | } |
10218 | } |
10219 | else if (bitpos == HOST_BITS_PER_WIDE_INT) |
10220 | { |
10221 | if (val[0] < 0) |
10222 | val[1] = -1; |
10223 | } |
10224 | else if (val[1] & (HOST_WIDE_INT_1 |
10225 | << (bitpos - 1 - HOST_BITS_PER_WIDE_INT))) |
10226 | val[1] |= HOST_WIDE_INT_M1U << (bitpos - HOST_BITS_PER_WIDE_INT); |
10227 | } |
10228 | |
10229 | value = wide_int_to_tree (type, |
10230 | cst: wide_int::from_array (val, len: 2, |
10231 | HOST_BITS_PER_WIDE_INT * 2)); |
10232 | add_pending_init (loc: input_location, purpose, value, NULL_TREE, implicit: true, |
10233 | braced_init_obstack); |
10234 | } |
10235 | |
10236 | constructor_incremental = 0; |
10237 | } |
10238 | |
10239 | /* Return value of FIELD in pending initializer or NULL_TREE if the field was |
10240 | not initialized yet. */ |
10241 | |
10242 | static tree |
10243 | find_init_member (tree field, struct obstack * braced_init_obstack) |
10244 | { |
10245 | struct init_node *p; |
10246 | |
10247 | if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
10248 | { |
10249 | if (constructor_incremental |
10250 | && tree_int_cst_lt (t1: field, t2: constructor_unfilled_index)) |
10251 | set_nonincremental_init (braced_init_obstack); |
10252 | |
10253 | p = constructor_pending_elts; |
10254 | while (p) |
10255 | { |
10256 | if (tree_int_cst_lt (t1: field, t2: p->purpose)) |
10257 | p = p->left; |
10258 | else if (tree_int_cst_lt (t1: p->purpose, t2: field)) |
10259 | p = p->right; |
10260 | else |
10261 | return p->value; |
10262 | } |
10263 | } |
10264 | else if (TREE_CODE (constructor_type) == RECORD_TYPE) |
10265 | { |
10266 | tree bitpos = bit_position (field); |
10267 | |
10268 | if (constructor_incremental |
10269 | && (!constructor_unfilled_fields |
10270 | || tree_int_cst_lt (t1: bitpos, |
10271 | t2: bit_position (constructor_unfilled_fields)))) |
10272 | set_nonincremental_init (braced_init_obstack); |
10273 | |
10274 | p = constructor_pending_elts; |
10275 | while (p) |
10276 | { |
10277 | if (field == p->purpose) |
10278 | return p->value; |
10279 | else if (tree_int_cst_lt (t1: bitpos, t2: bit_position (p->purpose))) |
10280 | p = p->left; |
10281 | else |
10282 | p = p->right; |
10283 | } |
10284 | } |
10285 | else if (TREE_CODE (constructor_type) == UNION_TYPE) |
10286 | { |
10287 | if (!vec_safe_is_empty (v: constructor_elements) |
10288 | && (constructor_elements->last ().index == field)) |
10289 | return constructor_elements->last ().value; |
10290 | } |
10291 | return NULL_TREE; |
10292 | } |
10293 | |
10294 | /* "Output" the next constructor element. |
10295 | At top level, really output it to assembler code now. |
10296 | Otherwise, collect it in a list from which we will make a CONSTRUCTOR. |
10297 | If ORIGTYPE is not NULL_TREE, it is the original type of VALUE. |
10298 | TYPE is the data type that the containing data type wants here. |
10299 | FIELD is the field (a FIELD_DECL) or the index that this element fills. |
10300 | If VALUE is a string constant, STRICT_STRING is true if it is |
10301 | unparenthesized or we should not warn here for it being parenthesized. |
10302 | For other types of VALUE, STRICT_STRING is not used. |
10303 | |
10304 | PENDING if true means output pending elements that belong |
10305 | right after this element. (PENDING is normally true; |
10306 | it is false while outputting pending elements, to avoid recursion.) |
10307 | |
10308 | IMPLICIT is true if value comes from pop_init_level (1), |
10309 | the new initializer has been merged with the existing one |
10310 | and thus no warnings should be emitted about overriding an |
10311 | existing initializer. */ |
10312 | |
10313 | static void |
10314 | output_init_element (location_t loc, tree value, tree origtype, |
10315 | bool strict_string, tree type, tree field, bool pending, |
10316 | bool implicit, struct obstack * braced_init_obstack) |
10317 | { |
10318 | tree semantic_type = NULL_TREE; |
10319 | bool maybe_const = true; |
10320 | bool npc, int_const_expr, arith_const_expr; |
10321 | |
10322 | if (type == error_mark_node || value == error_mark_node) |
10323 | { |
10324 | constructor_erroneous = 1; |
10325 | return; |
10326 | } |
10327 | if (TREE_CODE (TREE_TYPE (value)) == ARRAY_TYPE |
10328 | && (TREE_CODE (value) == STRING_CST |
10329 | || TREE_CODE (value) == COMPOUND_LITERAL_EXPR) |
10330 | && !(TREE_CODE (value) == STRING_CST |
10331 | && TREE_CODE (type) == ARRAY_TYPE |
10332 | && INTEGRAL_TYPE_P (TREE_TYPE (type))) |
10333 | && !comptypes (TYPE_MAIN_VARIANT (TREE_TYPE (value)), |
10334 | TYPE_MAIN_VARIANT (type))) |
10335 | value = array_to_pointer_conversion (loc: input_location, exp: value); |
10336 | |
10337 | if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR |
10338 | && require_constant_value && pending) |
10339 | { |
10340 | /* As an extension, allow initializing objects with static storage |
10341 | duration with compound literals (which are then treated just as |
10342 | the brace enclosed list they contain). */ |
10343 | if (flag_isoc99) |
10344 | pedwarn_init (loc, opt: OPT_Wpedantic, gmsgid: "initializer element is not " |
10345 | "constant" ); |
10346 | tree decl = COMPOUND_LITERAL_EXPR_DECL (value); |
10347 | value = DECL_INITIAL (decl); |
10348 | } |
10349 | |
10350 | npc = null_pointer_constant_p (expr: value); |
10351 | int_const_expr = (TREE_CODE (value) == INTEGER_CST |
10352 | && !TREE_OVERFLOW (value) |
10353 | && INTEGRAL_TYPE_P (TREE_TYPE (value))); |
10354 | /* Not fully determined before folding. */ |
10355 | arith_const_expr = true; |
10356 | if (TREE_CODE (value) == EXCESS_PRECISION_EXPR) |
10357 | { |
10358 | semantic_type = TREE_TYPE (value); |
10359 | value = TREE_OPERAND (value, 0); |
10360 | } |
10361 | value = c_fully_fold (value, require_constant_value, &maybe_const); |
10362 | /* TODO: this may not detect all cases of expressions folding to |
10363 | constants that are not arithmetic constant expressions. */ |
10364 | if (!maybe_const) |
10365 | arith_const_expr = false; |
10366 | else if (!INTEGRAL_TYPE_P (TREE_TYPE (value)) |
10367 | && TREE_CODE (TREE_TYPE (value)) != REAL_TYPE |
10368 | && TREE_CODE (TREE_TYPE (value)) != COMPLEX_TYPE) |
10369 | arith_const_expr = false; |
10370 | else if (TREE_CODE (value) != INTEGER_CST |
10371 | && TREE_CODE (value) != REAL_CST |
10372 | && TREE_CODE (value) != COMPLEX_CST) |
10373 | arith_const_expr = false; |
10374 | else if (TREE_OVERFLOW (value)) |
10375 | arith_const_expr = false; |
10376 | |
10377 | if (value == error_mark_node) |
10378 | constructor_erroneous = 1; |
10379 | else if (!TREE_CONSTANT (value)) |
10380 | constructor_constant = 0; |
10381 | else if (!initializer_constant_valid_p (value, |
10382 | TREE_TYPE (value), |
10383 | AGGREGATE_TYPE_P (constructor_type) |
10384 | && TYPE_REVERSE_STORAGE_ORDER |
10385 | (constructor_type)) |
10386 | || (RECORD_OR_UNION_TYPE_P (constructor_type) |
10387 | && DECL_C_BIT_FIELD (field) |
10388 | && TREE_CODE (value) != INTEGER_CST)) |
10389 | constructor_simple = 0; |
10390 | if (!maybe_const) |
10391 | constructor_nonconst = 1; |
10392 | |
10393 | /* Digest the initializer and issue any errors about incompatible |
10394 | types before issuing errors about non-constant initializers. */ |
10395 | tree new_value = value; |
10396 | if (semantic_type) |
10397 | new_value = build1 (EXCESS_PRECISION_EXPR, semantic_type, value); |
10398 | /* In the case of braces around a scalar initializer, the result of |
10399 | this initializer processing goes through digest_init again at the |
10400 | outer level. In the case of a constexpr initializer for a |
10401 | pointer, avoid converting a null pointer constant to something |
10402 | that is not a null pointer constant to avoid a spurious error |
10403 | from that second processing. */ |
10404 | if (!require_constexpr_value |
10405 | || !npc |
10406 | || TREE_CODE (constructor_type) != POINTER_TYPE) |
10407 | new_value = digest_init (init_loc: loc, type, init: new_value, origtype, null_pointer_constant: npc, |
10408 | int_const_expr, arith_const_expr, strict_string, |
10409 | require_constant: require_constant_value, require_constexpr: require_constexpr_value); |
10410 | if (new_value == error_mark_node) |
10411 | { |
10412 | constructor_erroneous = 1; |
10413 | return; |
10414 | } |
10415 | if (require_constant_value || require_constant_elements) |
10416 | constant_expression_warning (new_value); |
10417 | |
10418 | /* Proceed to check the constness of the original initializer. */ |
10419 | if (!initializer_constant_valid_p (value, TREE_TYPE (value))) |
10420 | { |
10421 | if (require_constant_value) |
10422 | { |
10423 | error_init (loc, gmsgid: "initializer element is not constant" ); |
10424 | value = error_mark_node; |
10425 | } |
10426 | else if (require_constant_elements) |
10427 | pedwarn (loc, OPT_Wpedantic, |
10428 | "initializer element is not computable at load time" ); |
10429 | } |
10430 | else if (!maybe_const |
10431 | && (require_constant_value || require_constant_elements)) |
10432 | pedwarn_init (loc, opt: OPT_Wpedantic, |
10433 | gmsgid: "initializer element is not a constant expression" ); |
10434 | /* digest_init has already carried out the additional checks |
10435 | required for 'constexpr' initializers (using the information |
10436 | passed to it about whether the original initializer was certain |
10437 | kinds of constant expression), so that check does not need to be |
10438 | repeated here. */ |
10439 | |
10440 | /* Issue -Wc++-compat warnings about initializing a bitfield with |
10441 | enum type. */ |
10442 | if (warn_cxx_compat |
10443 | && field != NULL_TREE |
10444 | && TREE_CODE (field) == FIELD_DECL |
10445 | && DECL_BIT_FIELD_TYPE (field) != NULL_TREE |
10446 | && (TYPE_MAIN_VARIANT (DECL_BIT_FIELD_TYPE (field)) |
10447 | != TYPE_MAIN_VARIANT (type)) |
10448 | && TREE_CODE (DECL_BIT_FIELD_TYPE (field)) == ENUMERAL_TYPE) |
10449 | { |
10450 | tree checktype = origtype != NULL_TREE ? origtype : TREE_TYPE (value); |
10451 | if (checktype != error_mark_node |
10452 | && (TYPE_MAIN_VARIANT (checktype) |
10453 | != TYPE_MAIN_VARIANT (DECL_BIT_FIELD_TYPE (field)))) |
10454 | warning_init (loc, opt: OPT_Wc___compat, |
10455 | gmsgid: "enum conversion in initialization is invalid in C++" ); |
10456 | } |
10457 | |
10458 | /* If this field is empty and does not have side effects (and is not at |
10459 | the end of structure), don't do anything other than checking the |
10460 | initializer. */ |
10461 | if (field |
10462 | && (TREE_TYPE (field) == error_mark_node |
10463 | || (COMPLETE_TYPE_P (TREE_TYPE (field)) |
10464 | && integer_zerop (TYPE_SIZE (TREE_TYPE (field))) |
10465 | && !TREE_SIDE_EFFECTS (new_value) |
10466 | && (TREE_CODE (constructor_type) == ARRAY_TYPE |
10467 | || DECL_CHAIN (field))))) |
10468 | return; |
10469 | |
10470 | /* Finally, set VALUE to the initializer value digested above. */ |
10471 | value = new_value; |
10472 | |
10473 | /* If this element doesn't come next in sequence, |
10474 | put it on constructor_pending_elts. */ |
10475 | if (TREE_CODE (constructor_type) == ARRAY_TYPE |
10476 | && (!constructor_incremental |
10477 | || !tree_int_cst_equal (field, constructor_unfilled_index))) |
10478 | { |
10479 | if (constructor_incremental |
10480 | && tree_int_cst_lt (t1: field, t2: constructor_unfilled_index)) |
10481 | set_nonincremental_init (braced_init_obstack); |
10482 | |
10483 | add_pending_init (loc, purpose: field, value, origtype, implicit, |
10484 | braced_init_obstack); |
10485 | return; |
10486 | } |
10487 | else if (TREE_CODE (constructor_type) == RECORD_TYPE |
10488 | && (!constructor_incremental |
10489 | || field != constructor_unfilled_fields)) |
10490 | { |
10491 | /* We do this for records but not for unions. In a union, |
10492 | no matter which field is specified, it can be initialized |
10493 | right away since it starts at the beginning of the union. */ |
10494 | if (constructor_incremental) |
10495 | { |
10496 | if (!constructor_unfilled_fields) |
10497 | set_nonincremental_init (braced_init_obstack); |
10498 | else |
10499 | { |
10500 | tree bitpos, unfillpos; |
10501 | |
10502 | bitpos = bit_position (field); |
10503 | unfillpos = bit_position (constructor_unfilled_fields); |
10504 | |
10505 | if (tree_int_cst_lt (t1: bitpos, t2: unfillpos)) |
10506 | set_nonincremental_init (braced_init_obstack); |
10507 | } |
10508 | } |
10509 | |
10510 | add_pending_init (loc, purpose: field, value, origtype, implicit, |
10511 | braced_init_obstack); |
10512 | return; |
10513 | } |
10514 | else if (TREE_CODE (constructor_type) == UNION_TYPE |
10515 | && !vec_safe_is_empty (v: constructor_elements)) |
10516 | { |
10517 | if (!implicit) |
10518 | { |
10519 | if (TREE_SIDE_EFFECTS (constructor_elements->last ().value)) |
10520 | warning_init (loc, opt: OPT_Woverride_init_side_effects, |
10521 | gmsgid: "initialized field with side-effects overwritten" ); |
10522 | else if (warn_override_init) |
10523 | warning_init (loc, opt: OPT_Woverride_init, |
10524 | gmsgid: "initialized field overwritten" ); |
10525 | } |
10526 | |
10527 | /* We can have just one union field set. */ |
10528 | constructor_elements = NULL; |
10529 | } |
10530 | |
10531 | /* Otherwise, output this element either to |
10532 | constructor_elements or to the assembler file. */ |
10533 | |
10534 | constructor_elt celt = {.index: field, .value: value}; |
10535 | vec_safe_push (v&: constructor_elements, obj: celt); |
10536 | |
10537 | /* Advance the variable that indicates sequential elements output. */ |
10538 | if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
10539 | constructor_unfilled_index |
10540 | = size_binop_loc (input_location, PLUS_EXPR, constructor_unfilled_index, |
10541 | bitsize_one_node); |
10542 | else if (TREE_CODE (constructor_type) == RECORD_TYPE) |
10543 | { |
10544 | constructor_unfilled_fields |
10545 | = DECL_CHAIN (constructor_unfilled_fields); |
10546 | |
10547 | /* Skip any nameless bit fields. */ |
10548 | while (constructor_unfilled_fields != NULL_TREE |
10549 | && DECL_UNNAMED_BIT_FIELD (constructor_unfilled_fields)) |
10550 | constructor_unfilled_fields = |
10551 | DECL_CHAIN (constructor_unfilled_fields); |
10552 | } |
10553 | else if (TREE_CODE (constructor_type) == UNION_TYPE) |
10554 | constructor_unfilled_fields = NULL_TREE; |
10555 | |
10556 | /* Now output any pending elements which have become next. */ |
10557 | if (pending) |
10558 | output_pending_init_elements (0, braced_init_obstack); |
10559 | } |
10560 | |
10561 | /* For two FIELD_DECLs in the same chain, return -1 if field1 |
10562 | comes before field2, 1 if field1 comes after field2 and |
10563 | 0 if field1 == field2. */ |
10564 | |
10565 | static int |
10566 | init_field_decl_cmp (tree field1, tree field2) |
10567 | { |
10568 | if (field1 == field2) |
10569 | return 0; |
10570 | |
10571 | tree bitpos1 = bit_position (field1); |
10572 | tree bitpos2 = bit_position (field2); |
10573 | if (tree_int_cst_equal (bitpos1, bitpos2)) |
10574 | { |
10575 | /* If one of the fields has non-zero bitsize, then that |
10576 | field must be the last one in a sequence of zero |
10577 | sized fields, fields after it will have bigger |
10578 | bit_position. */ |
10579 | if (TREE_TYPE (field1) != error_mark_node |
10580 | && COMPLETE_TYPE_P (TREE_TYPE (field1)) |
10581 | && integer_nonzerop (TREE_TYPE (field1))) |
10582 | return 1; |
10583 | if (TREE_TYPE (field2) != error_mark_node |
10584 | && COMPLETE_TYPE_P (TREE_TYPE (field2)) |
10585 | && integer_nonzerop (TREE_TYPE (field2))) |
10586 | return -1; |
10587 | /* Otherwise, fallback to DECL_CHAIN walk to find out |
10588 | which field comes earlier. Walk chains of both |
10589 | fields, so that if field1 and field2 are close to each |
10590 | other in either order, it is found soon even for large |
10591 | sequences of zero sized fields. */ |
10592 | tree f1 = field1, f2 = field2; |
10593 | while (1) |
10594 | { |
10595 | f1 = DECL_CHAIN (f1); |
10596 | f2 = DECL_CHAIN (f2); |
10597 | if (f1 == NULL_TREE) |
10598 | { |
10599 | gcc_assert (f2); |
10600 | return 1; |
10601 | } |
10602 | if (f2 == NULL_TREE) |
10603 | return -1; |
10604 | if (f1 == field2) |
10605 | return -1; |
10606 | if (f2 == field1) |
10607 | return 1; |
10608 | if (!tree_int_cst_equal (bit_position (f1), bitpos1)) |
10609 | return 1; |
10610 | if (!tree_int_cst_equal (bit_position (f2), bitpos1)) |
10611 | return -1; |
10612 | } |
10613 | } |
10614 | else if (tree_int_cst_lt (t1: bitpos1, t2: bitpos2)) |
10615 | return -1; |
10616 | else |
10617 | return 1; |
10618 | } |
10619 | |
10620 | /* Output any pending elements which have become next. |
10621 | As we output elements, constructor_unfilled_{fields,index} |
10622 | advances, which may cause other elements to become next; |
10623 | if so, they too are output. |
10624 | |
10625 | If ALL is 0, we return when there are |
10626 | no more pending elements to output now. |
10627 | |
10628 | If ALL is 1, we output space as necessary so that |
10629 | we can output all the pending elements. */ |
10630 | static void |
10631 | output_pending_init_elements (int all, struct obstack * braced_init_obstack) |
10632 | { |
10633 | struct init_node *elt = constructor_pending_elts; |
10634 | tree next; |
10635 | |
10636 | retry: |
10637 | |
10638 | /* Look through the whole pending tree. |
10639 | If we find an element that should be output now, |
10640 | output it. Otherwise, set NEXT to the element |
10641 | that comes first among those still pending. */ |
10642 | |
10643 | next = NULL_TREE; |
10644 | while (elt) |
10645 | { |
10646 | if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
10647 | { |
10648 | if (tree_int_cst_equal (elt->purpose, |
10649 | constructor_unfilled_index)) |
10650 | output_init_element (loc: input_location, value: elt->value, origtype: elt->origtype, |
10651 | strict_string: true, TREE_TYPE (constructor_type), |
10652 | field: constructor_unfilled_index, pending: false, implicit: false, |
10653 | braced_init_obstack); |
10654 | else if (tree_int_cst_lt (t1: constructor_unfilled_index, |
10655 | t2: elt->purpose)) |
10656 | { |
10657 | /* Advance to the next smaller node. */ |
10658 | if (elt->left) |
10659 | elt = elt->left; |
10660 | else |
10661 | { |
10662 | /* We have reached the smallest node bigger than the |
10663 | current unfilled index. Fill the space first. */ |
10664 | next = elt->purpose; |
10665 | break; |
10666 | } |
10667 | } |
10668 | else |
10669 | { |
10670 | /* Advance to the next bigger node. */ |
10671 | if (elt->right) |
10672 | elt = elt->right; |
10673 | else |
10674 | { |
10675 | /* We have reached the biggest node in a subtree. Find |
10676 | the parent of it, which is the next bigger node. */ |
10677 | while (elt->parent && elt->parent->right == elt) |
10678 | elt = elt->parent; |
10679 | elt = elt->parent; |
10680 | if (elt && tree_int_cst_lt (t1: constructor_unfilled_index, |
10681 | t2: elt->purpose)) |
10682 | { |
10683 | next = elt->purpose; |
10684 | break; |
10685 | } |
10686 | } |
10687 | } |
10688 | } |
10689 | else if (RECORD_OR_UNION_TYPE_P (constructor_type)) |
10690 | { |
10691 | /* If the current record is complete we are done. */ |
10692 | if (constructor_unfilled_fields == NULL_TREE) |
10693 | break; |
10694 | |
10695 | int cmp = init_field_decl_cmp (field1: constructor_unfilled_fields, |
10696 | field2: elt->purpose); |
10697 | if (cmp == 0) |
10698 | output_init_element (loc: input_location, value: elt->value, origtype: elt->origtype, |
10699 | strict_string: true, TREE_TYPE (elt->purpose), |
10700 | field: elt->purpose, pending: false, implicit: false, |
10701 | braced_init_obstack); |
10702 | else if (cmp < 0) |
10703 | { |
10704 | /* Advance to the next smaller node. */ |
10705 | if (elt->left) |
10706 | elt = elt->left; |
10707 | else |
10708 | { |
10709 | /* We have reached the smallest node bigger than the |
10710 | current unfilled field. Fill the space first. */ |
10711 | next = elt->purpose; |
10712 | break; |
10713 | } |
10714 | } |
10715 | else |
10716 | { |
10717 | /* Advance to the next bigger node. */ |
10718 | if (elt->right) |
10719 | elt = elt->right; |
10720 | else |
10721 | { |
10722 | /* We have reached the biggest node in a subtree. Find |
10723 | the parent of it, which is the next bigger node. */ |
10724 | while (elt->parent && elt->parent->right == elt) |
10725 | elt = elt->parent; |
10726 | elt = elt->parent; |
10727 | if (elt |
10728 | && init_field_decl_cmp (field1: constructor_unfilled_fields, |
10729 | field2: elt->purpose) < 0) |
10730 | { |
10731 | next = elt->purpose; |
10732 | break; |
10733 | } |
10734 | } |
10735 | } |
10736 | } |
10737 | } |
10738 | |
10739 | /* Ordinarily return, but not if we want to output all |
10740 | and there are elements left. */ |
10741 | if (!(all && next != NULL_TREE)) |
10742 | return; |
10743 | |
10744 | /* If it's not incremental, just skip over the gap, so that after |
10745 | jumping to retry we will output the next successive element. */ |
10746 | if (RECORD_OR_UNION_TYPE_P (constructor_type)) |
10747 | constructor_unfilled_fields = next; |
10748 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
10749 | constructor_unfilled_index = next; |
10750 | |
10751 | /* ELT now points to the node in the pending tree with the next |
10752 | initializer to output. */ |
10753 | goto retry; |
10754 | } |
10755 | |
10756 | /* Expression VALUE coincides with the start of type TYPE in a braced |
10757 | initializer. Return true if we should treat VALUE as initializing |
10758 | the first element of TYPE, false if we should treat it as initializing |
10759 | TYPE as a whole. |
10760 | |
10761 | If the initializer is clearly invalid, the question becomes: |
10762 | which choice gives the best error message? */ |
10763 | |
10764 | static bool |
10765 | initialize_elementwise_p (tree type, tree value) |
10766 | { |
10767 | if (type == error_mark_node || value == error_mark_node) |
10768 | return false; |
10769 | |
10770 | gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type); |
10771 | |
10772 | tree value_type = TREE_TYPE (value); |
10773 | if (value_type == error_mark_node) |
10774 | return false; |
10775 | |
10776 | /* GNU vectors can be initialized elementwise. However, treat any |
10777 | kind of vector value as initializing the vector type as a whole, |
10778 | regardless of whether the value is a GNU vector. Such initializers |
10779 | are valid if and only if they would have been valid in a non-braced |
10780 | initializer like: |
10781 | |
10782 | TYPE foo = VALUE; |
10783 | |
10784 | so recursing into the vector type would be at best confusing or at |
10785 | worst wrong. For example, when -flax-vector-conversions is in effect, |
10786 | it's possible to initialize a V8HI from a V4SI, even though the vectors |
10787 | have different element types and different numbers of elements. */ |
10788 | if (gnu_vector_type_p (type)) |
10789 | return !VECTOR_TYPE_P (value_type); |
10790 | |
10791 | if (AGGREGATE_TYPE_P (type)) |
10792 | return !comptypes (type1: type, TYPE_MAIN_VARIANT (value_type)); |
10793 | |
10794 | return false; |
10795 | } |
10796 | |
10797 | /* Add one non-braced element to the current constructor level. |
10798 | This adjusts the current position within the constructor's type. |
10799 | This may also start or terminate implicit levels |
10800 | to handle a partly-braced initializer. |
10801 | |
10802 | Once this has found the correct level for the new element, |
10803 | it calls output_init_element. |
10804 | |
10805 | IMPLICIT is true if value comes from pop_init_level (1), |
10806 | the new initializer has been merged with the existing one |
10807 | and thus no warnings should be emitted about overriding an |
10808 | existing initializer. */ |
10809 | |
10810 | void |
10811 | process_init_element (location_t loc, struct c_expr value, bool implicit, |
10812 | struct obstack * braced_init_obstack) |
10813 | { |
10814 | tree orig_value = value.value; |
10815 | int string_flag |
10816 | = (orig_value != NULL_TREE && TREE_CODE (orig_value) == STRING_CST); |
10817 | bool strict_string = value.original_code == STRING_CST; |
10818 | bool was_designated = designator_depth != 0; |
10819 | |
10820 | designator_depth = 0; |
10821 | designator_erroneous = 0; |
10822 | |
10823 | if (!implicit && value.value && !integer_zerop (value.value)) |
10824 | constructor_zeroinit = 0; |
10825 | |
10826 | /* Handle superfluous braces around string cst as in |
10827 | char x[] = {"foo"}; */ |
10828 | if (constructor_type |
10829 | && !was_designated |
10830 | && TREE_CODE (constructor_type) == ARRAY_TYPE |
10831 | && INTEGRAL_TYPE_P (TREE_TYPE (constructor_type)) |
10832 | && integer_zerop (constructor_unfilled_index)) |
10833 | { |
10834 | if (constructor_stack->replacement_value.value) |
10835 | { |
10836 | error_init (loc, gmsgid: "excess elements in %qT initializer" , constructor_type); |
10837 | return; |
10838 | } |
10839 | else if (string_flag) |
10840 | { |
10841 | constructor_stack->replacement_value = value; |
10842 | return; |
10843 | } |
10844 | } |
10845 | |
10846 | if (constructor_stack->replacement_value.value != NULL_TREE) |
10847 | { |
10848 | error_init (loc, gmsgid: "excess elements in struct initializer" ); |
10849 | return; |
10850 | } |
10851 | |
10852 | /* Ignore elements of a brace group if it is entirely superfluous |
10853 | and has already been diagnosed, or if the type is erroneous. */ |
10854 | if (constructor_type == NULL_TREE || constructor_type == error_mark_node) |
10855 | return; |
10856 | |
10857 | /* Ignore elements of an initializer for a variable-size type. |
10858 | Those are diagnosed in the parser (empty initializer braces are OK). */ |
10859 | if (COMPLETE_TYPE_P (constructor_type) |
10860 | && !poly_int_tree_p (TYPE_SIZE (constructor_type))) |
10861 | return; |
10862 | |
10863 | if (!implicit && warn_designated_init && !was_designated |
10864 | && TREE_CODE (constructor_type) == RECORD_TYPE |
10865 | && lookup_attribute (attr_name: "designated_init" , |
10866 | TYPE_ATTRIBUTES (constructor_type))) |
10867 | warning_init (loc, |
10868 | opt: OPT_Wdesignated_init, |
10869 | gmsgid: "positional initialization of field " |
10870 | "in %<struct%> declared with %<designated_init%> attribute" ); |
10871 | |
10872 | /* If we've exhausted any levels that didn't have braces, |
10873 | pop them now. */ |
10874 | while (constructor_stack->implicit) |
10875 | { |
10876 | if (RECORD_OR_UNION_TYPE_P (constructor_type) |
10877 | && constructor_fields == NULL_TREE) |
10878 | process_init_element (loc, |
10879 | value: pop_init_level (loc, implicit: 1, braced_init_obstack, |
10880 | insert_before: last_init_list_comma), |
10881 | implicit: true, braced_init_obstack); |
10882 | else if ((TREE_CODE (constructor_type) == ARRAY_TYPE |
10883 | || gnu_vector_type_p (type: constructor_type)) |
10884 | && constructor_max_index |
10885 | && tree_int_cst_lt (t1: constructor_max_index, |
10886 | t2: constructor_index)) |
10887 | process_init_element (loc, |
10888 | value: pop_init_level (loc, implicit: 1, braced_init_obstack, |
10889 | insert_before: last_init_list_comma), |
10890 | implicit: true, braced_init_obstack); |
10891 | else |
10892 | break; |
10893 | } |
10894 | |
10895 | /* In the case of [LO ... HI] = VALUE, only evaluate VALUE once. */ |
10896 | if (constructor_range_stack) |
10897 | { |
10898 | /* If value is a compound literal and we'll be just using its |
10899 | content, don't put it into a SAVE_EXPR. */ |
10900 | if (TREE_CODE (value.value) != COMPOUND_LITERAL_EXPR |
10901 | || !require_constant_value) |
10902 | { |
10903 | tree semantic_type = NULL_TREE; |
10904 | if (TREE_CODE (value.value) == EXCESS_PRECISION_EXPR) |
10905 | { |
10906 | semantic_type = TREE_TYPE (value.value); |
10907 | value.value = TREE_OPERAND (value.value, 0); |
10908 | } |
10909 | value.value = save_expr (value.value); |
10910 | if (semantic_type) |
10911 | value.value = build1 (EXCESS_PRECISION_EXPR, semantic_type, |
10912 | value.value); |
10913 | } |
10914 | } |
10915 | |
10916 | while (1) |
10917 | { |
10918 | if (TREE_CODE (constructor_type) == RECORD_TYPE) |
10919 | { |
10920 | tree fieldtype; |
10921 | enum tree_code fieldcode; |
10922 | |
10923 | if (constructor_fields == NULL_TREE) |
10924 | { |
10925 | pedwarn_init (loc, opt: 0, gmsgid: "excess elements in struct initializer" ); |
10926 | break; |
10927 | } |
10928 | |
10929 | fieldtype = TREE_TYPE (constructor_fields); |
10930 | if (fieldtype != error_mark_node) |
10931 | fieldtype = TYPE_MAIN_VARIANT (fieldtype); |
10932 | fieldcode = TREE_CODE (fieldtype); |
10933 | |
10934 | /* Error for non-static initialization of a flexible array member. */ |
10935 | if (fieldcode == ARRAY_TYPE |
10936 | && !require_constant_value |
10937 | && TYPE_SIZE (fieldtype) == NULL_TREE |
10938 | && DECL_CHAIN (constructor_fields) == NULL_TREE) |
10939 | { |
10940 | error_init (loc, gmsgid: "non-static initialization of a flexible " |
10941 | "array member" ); |
10942 | break; |
10943 | } |
10944 | |
10945 | /* Error for initialization of a flexible array member with |
10946 | a string constant if the structure is in an array. E.g.: |
10947 | struct S { int x; char y[]; }; |
10948 | struct S s[] = { { 1, "foo" } }; |
10949 | is invalid. */ |
10950 | if (string_flag |
10951 | && fieldcode == ARRAY_TYPE |
10952 | && constructor_depth > 1 |
10953 | && TYPE_SIZE (fieldtype) == NULL_TREE |
10954 | && DECL_CHAIN (constructor_fields) == NULL_TREE) |
10955 | { |
10956 | bool in_array_p = false; |
10957 | for (struct constructor_stack *p = constructor_stack; |
10958 | p && p->type; p = p->next) |
10959 | if (TREE_CODE (p->type) == ARRAY_TYPE) |
10960 | { |
10961 | in_array_p = true; |
10962 | break; |
10963 | } |
10964 | if (in_array_p) |
10965 | { |
10966 | error_init (loc, gmsgid: "initialization of flexible array " |
10967 | "member in a nested context" ); |
10968 | break; |
10969 | } |
10970 | } |
10971 | |
10972 | /* Accept a string constant to initialize a subarray. */ |
10973 | if (value.value != NULL_TREE |
10974 | && fieldcode == ARRAY_TYPE |
10975 | && INTEGRAL_TYPE_P (TREE_TYPE (fieldtype)) |
10976 | && string_flag) |
10977 | value.value = orig_value; |
10978 | /* Otherwise, if we have come to a subaggregate, |
10979 | and we don't have an element of its type, push into it. */ |
10980 | else if (value.value != NULL_TREE |
10981 | && initialize_elementwise_p (type: fieldtype, value: value.value)) |
10982 | { |
10983 | push_init_level (loc, implicit: 1, braced_init_obstack); |
10984 | continue; |
10985 | } |
10986 | |
10987 | if (value.value) |
10988 | { |
10989 | push_member_name (decl: constructor_fields); |
10990 | output_init_element (loc, value: value.value, origtype: value.original_type, |
10991 | strict_string, type: fieldtype, |
10992 | field: constructor_fields, pending: true, implicit, |
10993 | braced_init_obstack); |
10994 | RESTORE_SPELLING_DEPTH (constructor_depth); |
10995 | } |
10996 | else |
10997 | /* Do the bookkeeping for an element that was |
10998 | directly output as a constructor. */ |
10999 | { |
11000 | /* For a record, keep track of end position of last field. */ |
11001 | if (DECL_SIZE (constructor_fields)) |
11002 | constructor_bit_index |
11003 | = size_binop_loc (input_location, PLUS_EXPR, |
11004 | bit_position (constructor_fields), |
11005 | DECL_SIZE (constructor_fields)); |
11006 | |
11007 | /* If the current field was the first one not yet written out, |
11008 | it isn't now, so update. */ |
11009 | if (constructor_unfilled_fields == constructor_fields) |
11010 | { |
11011 | constructor_unfilled_fields = DECL_CHAIN (constructor_fields); |
11012 | /* Skip any nameless bit fields. */ |
11013 | while (constructor_unfilled_fields != 0 |
11014 | && (DECL_UNNAMED_BIT_FIELD |
11015 | (constructor_unfilled_fields))) |
11016 | constructor_unfilled_fields = |
11017 | DECL_CHAIN (constructor_unfilled_fields); |
11018 | } |
11019 | } |
11020 | |
11021 | constructor_fields = DECL_CHAIN (constructor_fields); |
11022 | /* Skip any nameless bit fields at the beginning. */ |
11023 | while (constructor_fields != NULL_TREE |
11024 | && DECL_UNNAMED_BIT_FIELD (constructor_fields)) |
11025 | constructor_fields = DECL_CHAIN (constructor_fields); |
11026 | } |
11027 | else if (TREE_CODE (constructor_type) == UNION_TYPE) |
11028 | { |
11029 | tree fieldtype; |
11030 | enum tree_code fieldcode; |
11031 | |
11032 | if (constructor_fields == NULL_TREE) |
11033 | { |
11034 | pedwarn_init (loc, opt: 0, |
11035 | gmsgid: "excess elements in union initializer" ); |
11036 | break; |
11037 | } |
11038 | |
11039 | fieldtype = TREE_TYPE (constructor_fields); |
11040 | if (fieldtype != error_mark_node) |
11041 | fieldtype = TYPE_MAIN_VARIANT (fieldtype); |
11042 | fieldcode = TREE_CODE (fieldtype); |
11043 | |
11044 | /* Warn that traditional C rejects initialization of unions. |
11045 | We skip the warning if the value is zero. This is done |
11046 | under the assumption that the zero initializer in user |
11047 | code appears conditioned on e.g. __STDC__ to avoid |
11048 | "missing initializer" warnings and relies on default |
11049 | initialization to zero in the traditional C case. |
11050 | We also skip the warning if the initializer is designated, |
11051 | again on the assumption that this must be conditional on |
11052 | __STDC__ anyway (and we've already complained about the |
11053 | member-designator already). */ |
11054 | if (!in_system_header_at (loc: input_location) && !constructor_designated |
11055 | && !(value.value && (integer_zerop (value.value) |
11056 | || real_zerop (value.value)))) |
11057 | warning (OPT_Wtraditional, "traditional C rejects initialization " |
11058 | "of unions" ); |
11059 | |
11060 | /* Accept a string constant to initialize a subarray. */ |
11061 | if (value.value != NULL_TREE |
11062 | && fieldcode == ARRAY_TYPE |
11063 | && INTEGRAL_TYPE_P (TREE_TYPE (fieldtype)) |
11064 | && string_flag) |
11065 | value.value = orig_value; |
11066 | /* Otherwise, if we have come to a subaggregate, |
11067 | and we don't have an element of its type, push into it. */ |
11068 | else if (value.value != NULL_TREE |
11069 | && initialize_elementwise_p (type: fieldtype, value: value.value)) |
11070 | { |
11071 | push_init_level (loc, implicit: 1, braced_init_obstack); |
11072 | continue; |
11073 | } |
11074 | |
11075 | if (value.value) |
11076 | { |
11077 | push_member_name (decl: constructor_fields); |
11078 | output_init_element (loc, value: value.value, origtype: value.original_type, |
11079 | strict_string, type: fieldtype, |
11080 | field: constructor_fields, pending: true, implicit, |
11081 | braced_init_obstack); |
11082 | RESTORE_SPELLING_DEPTH (constructor_depth); |
11083 | } |
11084 | else |
11085 | /* Do the bookkeeping for an element that was |
11086 | directly output as a constructor. */ |
11087 | { |
11088 | constructor_bit_index = DECL_SIZE (constructor_fields); |
11089 | constructor_unfilled_fields = DECL_CHAIN (constructor_fields); |
11090 | } |
11091 | |
11092 | constructor_fields = NULL_TREE; |
11093 | } |
11094 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
11095 | { |
11096 | tree elttype = TYPE_MAIN_VARIANT (TREE_TYPE (constructor_type)); |
11097 | enum tree_code eltcode = TREE_CODE (elttype); |
11098 | |
11099 | /* Accept a string constant to initialize a subarray. */ |
11100 | if (value.value != NULL_TREE |
11101 | && eltcode == ARRAY_TYPE |
11102 | && INTEGRAL_TYPE_P (TREE_TYPE (elttype)) |
11103 | && string_flag) |
11104 | value.value = orig_value; |
11105 | /* Otherwise, if we have come to a subaggregate, |
11106 | and we don't have an element of its type, push into it. */ |
11107 | else if (value.value != NULL_TREE |
11108 | && initialize_elementwise_p (type: elttype, value: value.value)) |
11109 | { |
11110 | push_init_level (loc, implicit: 1, braced_init_obstack); |
11111 | continue; |
11112 | } |
11113 | |
11114 | if (constructor_max_index != NULL_TREE |
11115 | && (tree_int_cst_lt (t1: constructor_max_index, t2: constructor_index) |
11116 | || integer_all_onesp (constructor_max_index))) |
11117 | { |
11118 | pedwarn_init (loc, opt: 0, |
11119 | gmsgid: "excess elements in array initializer" ); |
11120 | break; |
11121 | } |
11122 | |
11123 | /* Now output the actual element. */ |
11124 | if (value.value) |
11125 | { |
11126 | push_array_bounds (bounds: tree_to_uhwi (constructor_index)); |
11127 | output_init_element (loc, value: value.value, origtype: value.original_type, |
11128 | strict_string, type: elttype, |
11129 | field: constructor_index, pending: true, implicit, |
11130 | braced_init_obstack); |
11131 | RESTORE_SPELLING_DEPTH (constructor_depth); |
11132 | } |
11133 | |
11134 | constructor_index |
11135 | = size_binop_loc (input_location, PLUS_EXPR, |
11136 | constructor_index, bitsize_one_node); |
11137 | |
11138 | if (!value.value) |
11139 | /* If we are doing the bookkeeping for an element that was |
11140 | directly output as a constructor, we must update |
11141 | constructor_unfilled_index. */ |
11142 | constructor_unfilled_index = constructor_index; |
11143 | } |
11144 | else if (gnu_vector_type_p (type: constructor_type)) |
11145 | { |
11146 | tree elttype = TYPE_MAIN_VARIANT (TREE_TYPE (constructor_type)); |
11147 | |
11148 | /* Do a basic check of initializer size. Note that vectors |
11149 | always have a fixed size derived from their type. */ |
11150 | if (tree_int_cst_lt (t1: constructor_max_index, t2: constructor_index)) |
11151 | { |
11152 | pedwarn_init (loc, opt: 0, |
11153 | gmsgid: "excess elements in vector initializer" ); |
11154 | break; |
11155 | } |
11156 | |
11157 | /* Now output the actual element. */ |
11158 | if (value.value) |
11159 | { |
11160 | if (TREE_CODE (value.value) == VECTOR_CST) |
11161 | elttype = TYPE_MAIN_VARIANT (constructor_type); |
11162 | output_init_element (loc, value: value.value, origtype: value.original_type, |
11163 | strict_string, type: elttype, |
11164 | field: constructor_index, pending: true, implicit, |
11165 | braced_init_obstack); |
11166 | } |
11167 | |
11168 | constructor_index |
11169 | = size_binop_loc (input_location, |
11170 | PLUS_EXPR, constructor_index, bitsize_one_node); |
11171 | |
11172 | if (!value.value) |
11173 | /* If we are doing the bookkeeping for an element that was |
11174 | directly output as a constructor, we must update |
11175 | constructor_unfilled_index. */ |
11176 | constructor_unfilled_index = constructor_index; |
11177 | } |
11178 | |
11179 | /* Handle the sole element allowed in a braced initializer |
11180 | for a scalar variable. */ |
11181 | else if (constructor_type != error_mark_node |
11182 | && constructor_fields == NULL_TREE) |
11183 | { |
11184 | pedwarn_init (loc, opt: 0, |
11185 | gmsgid: "excess elements in scalar initializer" ); |
11186 | break; |
11187 | } |
11188 | else |
11189 | { |
11190 | if (value.value) |
11191 | output_init_element (loc, value: value.value, origtype: value.original_type, |
11192 | strict_string, type: constructor_type, |
11193 | NULL_TREE, pending: true, implicit, |
11194 | braced_init_obstack); |
11195 | constructor_fields = NULL_TREE; |
11196 | } |
11197 | |
11198 | /* Handle range initializers either at this level or anywhere higher |
11199 | in the designator stack. */ |
11200 | if (constructor_range_stack) |
11201 | { |
11202 | struct constructor_range_stack *p, *range_stack; |
11203 | int finish = 0; |
11204 | |
11205 | range_stack = constructor_range_stack; |
11206 | constructor_range_stack = 0; |
11207 | while (constructor_stack != range_stack->stack) |
11208 | { |
11209 | gcc_assert (constructor_stack->implicit); |
11210 | process_init_element (loc, |
11211 | value: pop_init_level (loc, implicit: 1, |
11212 | braced_init_obstack, |
11213 | insert_before: last_init_list_comma), |
11214 | implicit: true, braced_init_obstack); |
11215 | } |
11216 | for (p = range_stack; |
11217 | !p->range_end || tree_int_cst_equal (p->index, p->range_end); |
11218 | p = p->prev) |
11219 | { |
11220 | gcc_assert (constructor_stack->implicit); |
11221 | process_init_element (loc, |
11222 | value: pop_init_level (loc, implicit: 1, |
11223 | braced_init_obstack, |
11224 | insert_before: last_init_list_comma), |
11225 | implicit: true, braced_init_obstack); |
11226 | } |
11227 | |
11228 | p->index = size_binop_loc (input_location, |
11229 | PLUS_EXPR, p->index, bitsize_one_node); |
11230 | if (tree_int_cst_equal (p->index, p->range_end) && !p->prev) |
11231 | finish = 1; |
11232 | |
11233 | while (1) |
11234 | { |
11235 | constructor_index = p->index; |
11236 | constructor_fields = p->fields; |
11237 | if (finish && p->range_end && p->index == p->range_start) |
11238 | { |
11239 | finish = 0; |
11240 | p->prev = 0; |
11241 | } |
11242 | p = p->next; |
11243 | if (!p) |
11244 | break; |
11245 | finish_implicit_inits (loc, braced_init_obstack); |
11246 | push_init_level (loc, implicit: 2, braced_init_obstack); |
11247 | p->stack = constructor_stack; |
11248 | if (p->range_end && tree_int_cst_equal (p->index, p->range_end)) |
11249 | p->index = p->range_start; |
11250 | } |
11251 | |
11252 | if (!finish) |
11253 | constructor_range_stack = range_stack; |
11254 | continue; |
11255 | } |
11256 | |
11257 | break; |
11258 | } |
11259 | |
11260 | constructor_range_stack = 0; |
11261 | } |
11262 | |
11263 | /* Build a complete asm-statement, whose components are a CV_QUALIFIER |
11264 | (guaranteed to be 'volatile' or null) and ARGS (represented using |
11265 | an ASM_EXPR node). */ |
11266 | tree |
11267 | build_asm_stmt (bool is_volatile, tree args) |
11268 | { |
11269 | if (is_volatile) |
11270 | ASM_VOLATILE_P (args) = 1; |
11271 | return add_stmt (args); |
11272 | } |
11273 | |
11274 | /* Build an asm-expr, whose components are a STRING, some OUTPUTS, |
11275 | some INPUTS, and some CLOBBERS. The latter three may be NULL. |
11276 | SIMPLE indicates whether there was anything at all after the |
11277 | string in the asm expression -- asm("blah") and asm("blah" : ) |
11278 | are subtly different. We use a ASM_EXPR node to represent this. |
11279 | LOC is the location of the asm, and IS_INLINE says whether this |
11280 | is asm inline. */ |
11281 | tree |
11282 | build_asm_expr (location_t loc, tree string, tree outputs, tree inputs, |
11283 | tree clobbers, tree labels, bool simple, bool is_inline) |
11284 | { |
11285 | tree tail; |
11286 | tree args; |
11287 | int i; |
11288 | const char *constraint; |
11289 | const char **oconstraints; |
11290 | bool allows_mem, allows_reg, is_inout; |
11291 | int ninputs, noutputs; |
11292 | |
11293 | ninputs = list_length (inputs); |
11294 | noutputs = list_length (outputs); |
11295 | oconstraints = (const char **) alloca (noutputs * sizeof (const char *)); |
11296 | |
11297 | string = resolve_asm_operand_names (string, outputs, inputs, labels); |
11298 | |
11299 | /* Remove output conversions that change the type but not the mode. */ |
11300 | for (i = 0, tail = outputs; tail; ++i, tail = TREE_CHAIN (tail)) |
11301 | { |
11302 | tree output = TREE_VALUE (tail); |
11303 | |
11304 | output = c_fully_fold (output, false, NULL, true); |
11305 | |
11306 | /* ??? Really, this should not be here. Users should be using a |
11307 | proper lvalue, dammit. But there's a long history of using casts |
11308 | in the output operands. In cases like longlong.h, this becomes a |
11309 | primitive form of typechecking -- if the cast can be removed, then |
11310 | the output operand had a type of the proper width; otherwise we'll |
11311 | get an error. Gross, but ... */ |
11312 | STRIP_NOPS (output); |
11313 | |
11314 | if (!lvalue_or_else (loc, ref: output, use: lv_asm)) |
11315 | output = error_mark_node; |
11316 | |
11317 | if (output != error_mark_node |
11318 | && (TREE_READONLY (output) |
11319 | || TYPE_READONLY (TREE_TYPE (output)) |
11320 | || (RECORD_OR_UNION_TYPE_P (TREE_TYPE (output)) |
11321 | && C_TYPE_FIELDS_READONLY (TREE_TYPE (output))))) |
11322 | readonly_error (loc, output, lv_asm); |
11323 | |
11324 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tail))); |
11325 | oconstraints[i] = constraint; |
11326 | |
11327 | if (parse_output_constraint (&constraint, i, ninputs, noutputs, |
11328 | &allows_mem, &allows_reg, &is_inout)) |
11329 | { |
11330 | /* If the operand is going to end up in memory, |
11331 | mark it addressable. */ |
11332 | if (!allows_reg && !c_mark_addressable (exp: output)) |
11333 | output = error_mark_node; |
11334 | if (!(!allows_reg && allows_mem) |
11335 | && output != error_mark_node |
11336 | && VOID_TYPE_P (TREE_TYPE (output))) |
11337 | { |
11338 | error_at (loc, "invalid use of void expression" ); |
11339 | output = error_mark_node; |
11340 | } |
11341 | } |
11342 | else |
11343 | output = error_mark_node; |
11344 | |
11345 | TREE_VALUE (tail) = output; |
11346 | } |
11347 | |
11348 | for (i = 0, tail = inputs; tail; ++i, tail = TREE_CHAIN (tail)) |
11349 | { |
11350 | tree input; |
11351 | |
11352 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tail))); |
11353 | input = TREE_VALUE (tail); |
11354 | |
11355 | if (parse_input_constraint (&constraint, i, ninputs, noutputs, 0, |
11356 | oconstraints, &allows_mem, &allows_reg)) |
11357 | { |
11358 | /* If the operand is going to end up in memory, |
11359 | mark it addressable. */ |
11360 | if (!allows_reg && allows_mem) |
11361 | { |
11362 | input = c_fully_fold (input, false, NULL, true); |
11363 | |
11364 | /* Strip the nops as we allow this case. FIXME, this really |
11365 | should be rejected or made deprecated. */ |
11366 | STRIP_NOPS (input); |
11367 | if (!c_mark_addressable (exp: input)) |
11368 | input = error_mark_node; |
11369 | } |
11370 | else |
11371 | { |
11372 | struct c_expr expr; |
11373 | memset (s: &expr, c: 0, n: sizeof (expr)); |
11374 | expr.value = input; |
11375 | expr = convert_lvalue_to_rvalue (loc, exp: expr, convert_p: true, read_p: false); |
11376 | input = c_fully_fold (expr.value, false, NULL); |
11377 | |
11378 | if (input != error_mark_node && VOID_TYPE_P (TREE_TYPE (input))) |
11379 | { |
11380 | error_at (loc, "invalid use of void expression" ); |
11381 | input = error_mark_node; |
11382 | } |
11383 | } |
11384 | } |
11385 | else |
11386 | input = error_mark_node; |
11387 | |
11388 | TREE_VALUE (tail) = input; |
11389 | } |
11390 | |
11391 | args = build_stmt (loc, ASM_EXPR, string, outputs, inputs, clobbers, labels); |
11392 | |
11393 | /* asm statements without outputs, including simple ones, are treated |
11394 | as volatile. */ |
11395 | ASM_INPUT_P (args) = simple; |
11396 | ASM_VOLATILE_P (args) = (noutputs == 0); |
11397 | ASM_INLINE_P (args) = is_inline; |
11398 | |
11399 | return args; |
11400 | } |
11401 | |
11402 | /* Generate a goto statement to LABEL. LOC is the location of the |
11403 | GOTO. */ |
11404 | |
11405 | tree |
11406 | c_finish_goto_label (location_t loc, tree label) |
11407 | { |
11408 | tree decl = lookup_label_for_goto (loc, label); |
11409 | if (!decl) |
11410 | return NULL_TREE; |
11411 | TREE_USED (decl) = 1; |
11412 | { |
11413 | add_stmt (build_predict_expr (PRED_GOTO, NOT_TAKEN)); |
11414 | tree t = build1 (GOTO_EXPR, void_type_node, decl); |
11415 | SET_EXPR_LOCATION (t, loc); |
11416 | return add_stmt (t); |
11417 | } |
11418 | } |
11419 | |
11420 | /* Generate a computed goto statement to EXPR. LOC is the location of |
11421 | the GOTO. */ |
11422 | |
11423 | tree |
11424 | c_finish_goto_ptr (location_t loc, c_expr val) |
11425 | { |
11426 | tree expr = val.value; |
11427 | tree t; |
11428 | pedwarn (loc, OPT_Wpedantic, "ISO C forbids %<goto *expr;%>" ); |
11429 | if (expr != error_mark_node |
11430 | && !POINTER_TYPE_P (TREE_TYPE (expr)) |
11431 | && !null_pointer_constant_p (expr)) |
11432 | { |
11433 | error_at (val.get_location (), |
11434 | "computed goto must be pointer type" ); |
11435 | expr = build_zero_cst (ptr_type_node); |
11436 | } |
11437 | expr = c_fully_fold (expr, false, NULL); |
11438 | expr = convert (ptr_type_node, expr); |
11439 | t = build1 (GOTO_EXPR, void_type_node, expr); |
11440 | SET_EXPR_LOCATION (t, loc); |
11441 | return add_stmt (t); |
11442 | } |
11443 | |
11444 | /* Generate a C `return' statement. RETVAL is the expression for what |
11445 | to return, or a null pointer for `return;' with no value. LOC is |
11446 | the location of the return statement, or the location of the expression, |
11447 | if the statement has any. If ORIGTYPE is not NULL_TREE, it |
11448 | is the original type of RETVAL. */ |
11449 | |
11450 | tree |
11451 | c_finish_return (location_t loc, tree retval, tree origtype) |
11452 | { |
11453 | tree valtype = TREE_TYPE (TREE_TYPE (current_function_decl)), ret_stmt; |
11454 | bool no_warning = false; |
11455 | bool npc = false; |
11456 | |
11457 | /* Use the expansion point to handle cases such as returning NULL |
11458 | in a function returning void. */ |
11459 | location_t xloc = expansion_point_location_if_in_system_header (loc); |
11460 | |
11461 | if (TREE_THIS_VOLATILE (current_function_decl)) |
11462 | warning_at (xloc, 0, |
11463 | "function declared %<noreturn%> has a %<return%> statement" ); |
11464 | |
11465 | if (retval) |
11466 | { |
11467 | tree semantic_type = NULL_TREE; |
11468 | npc = null_pointer_constant_p (expr: retval); |
11469 | if (TREE_CODE (retval) == EXCESS_PRECISION_EXPR) |
11470 | { |
11471 | semantic_type = TREE_TYPE (retval); |
11472 | retval = TREE_OPERAND (retval, 0); |
11473 | } |
11474 | retval = c_fully_fold (retval, false, NULL); |
11475 | if (semantic_type |
11476 | && valtype != NULL_TREE |
11477 | && TREE_CODE (valtype) != VOID_TYPE) |
11478 | retval = build1 (EXCESS_PRECISION_EXPR, semantic_type, retval); |
11479 | } |
11480 | |
11481 | if (!retval) |
11482 | { |
11483 | current_function_returns_null = 1; |
11484 | if ((warn_return_type >= 0 || flag_isoc99) |
11485 | && valtype != NULL_TREE && TREE_CODE (valtype) != VOID_TYPE) |
11486 | { |
11487 | no_warning = true; |
11488 | if (emit_diagnostic (flag_isoc99 ? DK_PERMERROR : DK_WARNING, |
11489 | loc, OPT_Wreturn_mismatch, |
11490 | "%<return%> with no value," |
11491 | " in function returning non-void" )) |
11492 | inform (DECL_SOURCE_LOCATION (current_function_decl), |
11493 | "declared here" ); |
11494 | } |
11495 | } |
11496 | else if (valtype == NULL_TREE || VOID_TYPE_P (valtype)) |
11497 | { |
11498 | current_function_returns_null = 1; |
11499 | bool warned_here; |
11500 | if (TREE_CODE (TREE_TYPE (retval)) != VOID_TYPE) |
11501 | warned_here = permerror_opt |
11502 | (xloc, OPT_Wreturn_mismatch, |
11503 | "%<return%> with a value, in function returning void" ); |
11504 | else |
11505 | warned_here = pedwarn |
11506 | (xloc, OPT_Wpedantic, "ISO C forbids " |
11507 | "%<return%> with expression, in function returning void" ); |
11508 | if (warned_here) |
11509 | inform (DECL_SOURCE_LOCATION (current_function_decl), |
11510 | "declared here" ); |
11511 | } |
11512 | else |
11513 | { |
11514 | tree t = convert_for_assignment (location: loc, UNKNOWN_LOCATION, type: valtype, |
11515 | rhs: retval, origtype, errtype: ic_return, |
11516 | null_pointer_constant: npc, NULL_TREE, NULL_TREE, parmnum: 0); |
11517 | tree res = DECL_RESULT (current_function_decl); |
11518 | tree inner; |
11519 | bool save; |
11520 | |
11521 | current_function_returns_value = 1; |
11522 | if (t == error_mark_node) |
11523 | return NULL_TREE; |
11524 | |
11525 | save = in_late_binary_op; |
11526 | if (C_BOOLEAN_TYPE_P (TREE_TYPE (res)) |
11527 | || TREE_CODE (TREE_TYPE (res)) == COMPLEX_TYPE |
11528 | || (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t)) |
11529 | && (TREE_CODE (TREE_TYPE (res)) == INTEGER_TYPE |
11530 | || TREE_CODE (TREE_TYPE (res)) == ENUMERAL_TYPE) |
11531 | && sanitize_flags_p (flag: SANITIZE_FLOAT_CAST))) |
11532 | in_late_binary_op = true; |
11533 | inner = t = convert (TREE_TYPE (res), t); |
11534 | in_late_binary_op = save; |
11535 | |
11536 | /* Strip any conversions, additions, and subtractions, and see if |
11537 | we are returning the address of a local variable. Warn if so. */ |
11538 | while (1) |
11539 | { |
11540 | switch (TREE_CODE (inner)) |
11541 | { |
11542 | CASE_CONVERT: |
11543 | case NON_LVALUE_EXPR: |
11544 | case PLUS_EXPR: |
11545 | case POINTER_PLUS_EXPR: |
11546 | inner = TREE_OPERAND (inner, 0); |
11547 | continue; |
11548 | |
11549 | case MINUS_EXPR: |
11550 | /* If the second operand of the MINUS_EXPR has a pointer |
11551 | type (or is converted from it), this may be valid, so |
11552 | don't give a warning. */ |
11553 | { |
11554 | tree op1 = TREE_OPERAND (inner, 1); |
11555 | |
11556 | while (!POINTER_TYPE_P (TREE_TYPE (op1)) |
11557 | && (CONVERT_EXPR_P (op1) |
11558 | || TREE_CODE (op1) == NON_LVALUE_EXPR)) |
11559 | op1 = TREE_OPERAND (op1, 0); |
11560 | |
11561 | if (POINTER_TYPE_P (TREE_TYPE (op1))) |
11562 | break; |
11563 | |
11564 | inner = TREE_OPERAND (inner, 0); |
11565 | continue; |
11566 | } |
11567 | |
11568 | case ADDR_EXPR: |
11569 | inner = TREE_OPERAND (inner, 0); |
11570 | |
11571 | while (REFERENCE_CLASS_P (inner) |
11572 | && !INDIRECT_REF_P (inner)) |
11573 | inner = TREE_OPERAND (inner, 0); |
11574 | |
11575 | if (DECL_P (inner) |
11576 | && !DECL_EXTERNAL (inner) |
11577 | && !TREE_STATIC (inner) |
11578 | && DECL_CONTEXT (inner) == current_function_decl |
11579 | && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) |
11580 | { |
11581 | if (TREE_CODE (inner) == LABEL_DECL) |
11582 | warning_at (loc, OPT_Wreturn_local_addr, |
11583 | "function returns address of label" ); |
11584 | else |
11585 | { |
11586 | warning_at (loc, OPT_Wreturn_local_addr, |
11587 | "function returns address of local variable" ); |
11588 | tree zero = build_zero_cst (TREE_TYPE (res)); |
11589 | t = build2 (COMPOUND_EXPR, TREE_TYPE (res), t, zero); |
11590 | } |
11591 | } |
11592 | break; |
11593 | |
11594 | default: |
11595 | break; |
11596 | } |
11597 | |
11598 | break; |
11599 | } |
11600 | |
11601 | retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, t); |
11602 | SET_EXPR_LOCATION (retval, loc); |
11603 | |
11604 | if (warn_sequence_point) |
11605 | verify_sequence_points (retval); |
11606 | } |
11607 | |
11608 | ret_stmt = build_stmt (loc, RETURN_EXPR, retval); |
11609 | if (no_warning) |
11610 | suppress_warning (ret_stmt, OPT_Wreturn_type); |
11611 | return add_stmt (ret_stmt); |
11612 | } |
11613 | |
11614 | struct c_switch { |
11615 | /* The SWITCH_STMT being built. */ |
11616 | tree switch_stmt; |
11617 | |
11618 | /* The original type of the testing expression, i.e. before the |
11619 | default conversion is applied. */ |
11620 | tree orig_type; |
11621 | |
11622 | /* A splay-tree mapping the low element of a case range to the high |
11623 | element, or NULL_TREE if there is no high element. Used to |
11624 | determine whether or not a new case label duplicates an old case |
11625 | label. We need a tree, rather than simply a hash table, because |
11626 | of the GNU case range extension. */ |
11627 | splay_tree cases; |
11628 | |
11629 | /* The bindings at the point of the switch. This is used for |
11630 | warnings crossing decls when branching to a case label. */ |
11631 | struct c_spot_bindings *bindings; |
11632 | |
11633 | /* Whether the switch includes any break statements. */ |
11634 | bool break_stmt_seen_p; |
11635 | |
11636 | /* The next node on the stack. */ |
11637 | struct c_switch *next; |
11638 | |
11639 | /* Remember whether the controlling expression had boolean type |
11640 | before integer promotions for the sake of -Wswitch-bool. */ |
11641 | bool bool_cond_p; |
11642 | }; |
11643 | |
11644 | /* A stack of the currently active switch statements. The innermost |
11645 | switch statement is on the top of the stack. There is no need to |
11646 | mark the stack for garbage collection because it is only active |
11647 | during the processing of the body of a function, and we never |
11648 | collect at that point. */ |
11649 | |
11650 | struct c_switch *c_switch_stack; |
11651 | |
11652 | /* Start a C switch statement, testing expression EXP. Return the new |
11653 | SWITCH_STMT. SWITCH_LOC is the location of the `switch'. |
11654 | SWITCH_COND_LOC is the location of the switch's condition. |
11655 | EXPLICIT_CAST_P is true if the expression EXP has an explicit cast. */ |
11656 | |
11657 | tree |
11658 | c_start_switch (location_t switch_loc, |
11659 | location_t switch_cond_loc, |
11660 | tree exp, bool explicit_cast_p) |
11661 | { |
11662 | tree orig_type = error_mark_node; |
11663 | bool bool_cond_p = false; |
11664 | struct c_switch *cs; |
11665 | |
11666 | if (exp != error_mark_node) |
11667 | { |
11668 | orig_type = TREE_TYPE (exp); |
11669 | |
11670 | if (!INTEGRAL_TYPE_P (orig_type)) |
11671 | { |
11672 | if (orig_type != error_mark_node) |
11673 | { |
11674 | error_at (switch_cond_loc, "switch quantity not an integer" ); |
11675 | orig_type = error_mark_node; |
11676 | } |
11677 | exp = integer_zero_node; |
11678 | } |
11679 | else |
11680 | { |
11681 | tree type = TYPE_MAIN_VARIANT (orig_type); |
11682 | tree e = exp; |
11683 | |
11684 | /* Warn if the condition has boolean value. */ |
11685 | while (TREE_CODE (e) == COMPOUND_EXPR) |
11686 | e = TREE_OPERAND (e, 1); |
11687 | |
11688 | if ((C_BOOLEAN_TYPE_P (type) |
11689 | || truth_value_p (TREE_CODE (e))) |
11690 | /* Explicit cast to int suppresses this warning. */ |
11691 | && !(TREE_CODE (type) == INTEGER_TYPE |
11692 | && explicit_cast_p)) |
11693 | bool_cond_p = true; |
11694 | |
11695 | if (!in_system_header_at (loc: input_location) |
11696 | && (type == long_integer_type_node |
11697 | || type == long_unsigned_type_node)) |
11698 | warning_at (switch_cond_loc, |
11699 | OPT_Wtraditional, "%<long%> switch expression not " |
11700 | "converted to %<int%> in ISO C" ); |
11701 | |
11702 | exp = c_fully_fold (exp, false, NULL); |
11703 | exp = default_conversion (exp); |
11704 | |
11705 | if (warn_sequence_point) |
11706 | verify_sequence_points (exp); |
11707 | } |
11708 | } |
11709 | |
11710 | /* Add this new SWITCH_STMT to the stack. */ |
11711 | cs = XNEW (struct c_switch); |
11712 | cs->switch_stmt = build_stmt (switch_loc, SWITCH_STMT, exp, |
11713 | NULL_TREE, orig_type, NULL_TREE); |
11714 | cs->orig_type = orig_type; |
11715 | cs->cases = splay_tree_new (case_compare, NULL, NULL); |
11716 | cs->bindings = c_get_switch_bindings (); |
11717 | cs->break_stmt_seen_p = false; |
11718 | cs->bool_cond_p = bool_cond_p; |
11719 | cs->next = c_switch_stack; |
11720 | c_switch_stack = cs; |
11721 | |
11722 | return add_stmt (cs->switch_stmt); |
11723 | } |
11724 | |
11725 | /* Process a case label at location LOC, with attributes ATTRS. */ |
11726 | |
11727 | tree |
11728 | do_case (location_t loc, tree low_value, tree high_value, tree attrs) |
11729 | { |
11730 | tree label = NULL_TREE; |
11731 | |
11732 | if (low_value && TREE_CODE (low_value) != INTEGER_CST) |
11733 | { |
11734 | low_value = c_fully_fold (low_value, false, NULL); |
11735 | if (TREE_CODE (low_value) == INTEGER_CST) |
11736 | pedwarn (loc, OPT_Wpedantic, |
11737 | "case label is not an integer constant expression" ); |
11738 | } |
11739 | |
11740 | if (high_value && TREE_CODE (high_value) != INTEGER_CST) |
11741 | { |
11742 | high_value = c_fully_fold (high_value, false, NULL); |
11743 | if (TREE_CODE (high_value) == INTEGER_CST) |
11744 | pedwarn (input_location, OPT_Wpedantic, |
11745 | "case label is not an integer constant expression" ); |
11746 | } |
11747 | |
11748 | if (c_switch_stack == NULL) |
11749 | { |
11750 | if (low_value) |
11751 | error_at (loc, "case label not within a switch statement" ); |
11752 | else |
11753 | error_at (loc, "%<default%> label not within a switch statement" ); |
11754 | return NULL_TREE; |
11755 | } |
11756 | |
11757 | if (c_check_switch_jump_warnings (c_switch_stack->bindings, |
11758 | EXPR_LOCATION (c_switch_stack->switch_stmt), |
11759 | loc)) |
11760 | return NULL_TREE; |
11761 | |
11762 | label = c_add_case_label (loc, c_switch_stack->cases, |
11763 | SWITCH_STMT_COND (c_switch_stack->switch_stmt), |
11764 | low_value, high_value, attrs); |
11765 | if (label == error_mark_node) |
11766 | label = NULL_TREE; |
11767 | return label; |
11768 | } |
11769 | |
11770 | /* Finish the switch statement. TYPE is the original type of the |
11771 | controlling expression of the switch, or NULL_TREE. */ |
11772 | |
11773 | void |
11774 | c_finish_switch (tree body, tree type) |
11775 | { |
11776 | struct c_switch *cs = c_switch_stack; |
11777 | location_t switch_location; |
11778 | |
11779 | SWITCH_STMT_BODY (cs->switch_stmt) = body; |
11780 | |
11781 | /* Emit warnings as needed. */ |
11782 | switch_location = EXPR_LOCATION (cs->switch_stmt); |
11783 | c_do_switch_warnings (cs->cases, switch_location, |
11784 | type ? type : SWITCH_STMT_TYPE (cs->switch_stmt), |
11785 | SWITCH_STMT_COND (cs->switch_stmt), cs->bool_cond_p); |
11786 | if (c_switch_covers_all_cases_p (cs->cases, |
11787 | SWITCH_STMT_TYPE (cs->switch_stmt))) |
11788 | SWITCH_STMT_ALL_CASES_P (cs->switch_stmt) = 1; |
11789 | SWITCH_STMT_NO_BREAK_P (cs->switch_stmt) = !cs->break_stmt_seen_p; |
11790 | |
11791 | /* Pop the stack. */ |
11792 | c_switch_stack = cs->next; |
11793 | splay_tree_delete (cs->cases); |
11794 | c_release_switch_bindings (cs->bindings); |
11795 | XDELETE (cs); |
11796 | } |
11797 | |
11798 | /* Emit an if statement. IF_LOCUS is the location of the 'if'. COND, |
11799 | THEN_BLOCK and ELSE_BLOCK are expressions to be used; ELSE_BLOCK |
11800 | may be null. */ |
11801 | |
11802 | void |
11803 | c_finish_if_stmt (location_t if_locus, tree cond, tree then_block, |
11804 | tree else_block) |
11805 | { |
11806 | tree stmt; |
11807 | |
11808 | stmt = build3 (COND_EXPR, void_type_node, cond, then_block, else_block); |
11809 | SET_EXPR_LOCATION (stmt, if_locus); |
11810 | add_stmt (stmt); |
11811 | } |
11812 | |
11813 | tree |
11814 | c_finish_bc_stmt (location_t loc, tree label, bool is_break) |
11815 | { |
11816 | /* In switch statements break is sometimes stylistically used after |
11817 | a return statement. This can lead to spurious warnings about |
11818 | control reaching the end of a non-void function when it is |
11819 | inlined. Note that we are calling block_may_fallthru with |
11820 | language specific tree nodes; this works because |
11821 | block_may_fallthru returns true when given something it does not |
11822 | understand. */ |
11823 | bool skip = !block_may_fallthru (cur_stmt_list); |
11824 | |
11825 | if (is_break) |
11826 | switch (in_statement) |
11827 | { |
11828 | case 0: |
11829 | error_at (loc, "break statement not within loop or switch" ); |
11830 | return NULL_TREE; |
11831 | case IN_OMP_BLOCK: |
11832 | error_at (loc, "invalid exit from OpenMP structured block" ); |
11833 | return NULL_TREE; |
11834 | case IN_OMP_FOR: |
11835 | error_at (loc, "break statement used with OpenMP for loop" ); |
11836 | return NULL_TREE; |
11837 | case IN_ITERATION_STMT: |
11838 | case IN_OBJC_FOREACH: |
11839 | break; |
11840 | default: |
11841 | gcc_assert (in_statement & IN_SWITCH_STMT); |
11842 | c_switch_stack->break_stmt_seen_p = true; |
11843 | break; |
11844 | } |
11845 | else |
11846 | switch (in_statement & ~IN_SWITCH_STMT) |
11847 | { |
11848 | case 0: |
11849 | error_at (loc, "continue statement not within a loop" ); |
11850 | return NULL_TREE; |
11851 | case IN_OMP_BLOCK: |
11852 | error_at (loc, "invalid exit from OpenMP structured block" ); |
11853 | return NULL_TREE; |
11854 | case IN_ITERATION_STMT: |
11855 | case IN_OMP_FOR: |
11856 | case IN_OBJC_FOREACH: |
11857 | break; |
11858 | default: |
11859 | gcc_unreachable (); |
11860 | } |
11861 | |
11862 | if (skip) |
11863 | return NULL_TREE; |
11864 | else if ((in_statement & IN_OBJC_FOREACH) |
11865 | && !(is_break && (in_statement & IN_SWITCH_STMT))) |
11866 | { |
11867 | /* The foreach expander produces low-level code using gotos instead |
11868 | of a structured loop construct. */ |
11869 | gcc_assert (label); |
11870 | return add_stmt (build_stmt (loc, GOTO_EXPR, label)); |
11871 | } |
11872 | return add_stmt (build_stmt (loc, (is_break ? BREAK_STMT : CONTINUE_STMT))); |
11873 | } |
11874 | |
11875 | /* A helper routine for c_process_expr_stmt and c_finish_stmt_expr. */ |
11876 | |
11877 | static void |
11878 | emit_side_effect_warnings (location_t loc, tree expr) |
11879 | { |
11880 | maybe_warn_nodiscard (loc, expr); |
11881 | if (!warn_unused_value) |
11882 | return; |
11883 | if (expr == error_mark_node) |
11884 | ; |
11885 | else if (!TREE_SIDE_EFFECTS (expr)) |
11886 | { |
11887 | if (!VOID_TYPE_P (TREE_TYPE (expr)) |
11888 | && !warning_suppressed_p (expr, OPT_Wunused_value)) |
11889 | warning_at (loc, OPT_Wunused_value, "statement with no effect" ); |
11890 | } |
11891 | else if (TREE_CODE (expr) == COMPOUND_EXPR) |
11892 | { |
11893 | tree r = expr; |
11894 | location_t cloc = loc; |
11895 | while (TREE_CODE (r) == COMPOUND_EXPR) |
11896 | { |
11897 | if (EXPR_HAS_LOCATION (r)) |
11898 | cloc = EXPR_LOCATION (r); |
11899 | r = TREE_OPERAND (r, 1); |
11900 | } |
11901 | if (!TREE_SIDE_EFFECTS (r) |
11902 | && !VOID_TYPE_P (TREE_TYPE (r)) |
11903 | && !CONVERT_EXPR_P (r) |
11904 | && !warning_suppressed_p (r, OPT_Wunused_value) |
11905 | && !warning_suppressed_p (expr, OPT_Wunused_value)) |
11906 | warning_at (cloc, OPT_Wunused_value, |
11907 | "right-hand operand of comma expression has no effect" ); |
11908 | } |
11909 | else |
11910 | warn_if_unused_value (expr, loc); |
11911 | } |
11912 | |
11913 | /* Process an expression as if it were a complete statement. Emit |
11914 | diagnostics, but do not call ADD_STMT. LOC is the location of the |
11915 | statement. */ |
11916 | |
11917 | tree |
11918 | c_process_expr_stmt (location_t loc, tree expr) |
11919 | { |
11920 | tree exprv; |
11921 | |
11922 | if (!expr) |
11923 | return NULL_TREE; |
11924 | |
11925 | expr = c_fully_fold (expr, false, NULL); |
11926 | |
11927 | if (warn_sequence_point) |
11928 | verify_sequence_points (expr); |
11929 | |
11930 | if (TREE_TYPE (expr) != error_mark_node |
11931 | && !COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (expr)) |
11932 | && TREE_CODE (TREE_TYPE (expr)) != ARRAY_TYPE) |
11933 | error_at (loc, "expression statement has incomplete type" ); |
11934 | |
11935 | /* If we're not processing a statement expression, warn about unused values. |
11936 | Warnings for statement expressions will be emitted later, once we figure |
11937 | out which is the result. */ |
11938 | if (!STATEMENT_LIST_STMT_EXPR (cur_stmt_list) |
11939 | && (warn_unused_value || warn_unused_result)) |
11940 | emit_side_effect_warnings (EXPR_LOC_OR_LOC (expr, loc), expr); |
11941 | |
11942 | exprv = expr; |
11943 | while (TREE_CODE (exprv) == COMPOUND_EXPR) |
11944 | exprv = TREE_OPERAND (exprv, 1); |
11945 | while (CONVERT_EXPR_P (exprv)) |
11946 | exprv = TREE_OPERAND (exprv, 0); |
11947 | if (DECL_P (exprv) |
11948 | || handled_component_p (t: exprv) |
11949 | || TREE_CODE (exprv) == ADDR_EXPR) |
11950 | mark_exp_read (exp: exprv); |
11951 | |
11952 | /* If the expression is not of a type to which we cannot assign a line |
11953 | number, wrap the thing in a no-op NOP_EXPR. */ |
11954 | if (DECL_P (expr) || CONSTANT_CLASS_P (expr)) |
11955 | { |
11956 | expr = build1 (NOP_EXPR, TREE_TYPE (expr), expr); |
11957 | SET_EXPR_LOCATION (expr, loc); |
11958 | } |
11959 | |
11960 | return expr; |
11961 | } |
11962 | |
11963 | /* Emit an expression as a statement. LOC is the location of the |
11964 | expression. */ |
11965 | |
11966 | tree |
11967 | c_finish_expr_stmt (location_t loc, tree expr) |
11968 | { |
11969 | if (expr) |
11970 | return add_stmt (c_process_expr_stmt (loc, expr)); |
11971 | else |
11972 | return NULL; |
11973 | } |
11974 | |
11975 | /* Do the opposite and emit a statement as an expression. To begin, |
11976 | create a new binding level and return it. */ |
11977 | |
11978 | tree |
11979 | c_begin_stmt_expr (void) |
11980 | { |
11981 | tree ret; |
11982 | |
11983 | /* We must force a BLOCK for this level so that, if it is not expanded |
11984 | later, there is a way to turn off the entire subtree of blocks that |
11985 | are contained in it. */ |
11986 | keep_next_level (); |
11987 | ret = c_begin_compound_stmt (true); |
11988 | |
11989 | c_bindings_start_stmt_expr (c_switch_stack == NULL |
11990 | ? NULL |
11991 | : c_switch_stack->bindings); |
11992 | |
11993 | /* Mark the current statement list as belonging to a statement list. */ |
11994 | STATEMENT_LIST_STMT_EXPR (ret) = 1; |
11995 | |
11996 | return ret; |
11997 | } |
11998 | |
11999 | /* LOC is the location of the compound statement to which this body |
12000 | belongs. */ |
12001 | |
12002 | tree |
12003 | c_finish_stmt_expr (location_t loc, tree body) |
12004 | { |
12005 | tree last, type, tmp, val; |
12006 | tree *last_p; |
12007 | |
12008 | body = c_end_compound_stmt (loc, body, true); |
12009 | |
12010 | c_bindings_end_stmt_expr (c_switch_stack == NULL |
12011 | ? NULL |
12012 | : c_switch_stack->bindings); |
12013 | |
12014 | /* Locate the last statement in BODY. See c_end_compound_stmt |
12015 | about always returning a BIND_EXPR. */ |
12016 | last_p = &BIND_EXPR_BODY (body); |
12017 | last = BIND_EXPR_BODY (body); |
12018 | |
12019 | continue_searching: |
12020 | if (TREE_CODE (last) == STATEMENT_LIST) |
12021 | { |
12022 | tree_stmt_iterator l = tsi_last (t: last); |
12023 | |
12024 | while (!tsi_end_p (i: l) && TREE_CODE (tsi_stmt (l)) == DEBUG_BEGIN_STMT) |
12025 | tsi_prev (i: &l); |
12026 | |
12027 | /* This can happen with degenerate cases like ({ }). No value. */ |
12028 | if (tsi_end_p (i: l)) |
12029 | return body; |
12030 | |
12031 | /* If we're supposed to generate side effects warnings, process |
12032 | all of the statements except the last. */ |
12033 | if (warn_unused_value || warn_unused_result) |
12034 | { |
12035 | for (tree_stmt_iterator i = tsi_start (t: last); |
12036 | tsi_stmt (i) != tsi_stmt (i: l); tsi_next (i: &i)) |
12037 | { |
12038 | location_t tloc; |
12039 | tree t = tsi_stmt (i); |
12040 | |
12041 | tloc = EXPR_HAS_LOCATION (t) ? EXPR_LOCATION (t) : loc; |
12042 | emit_side_effect_warnings (loc: tloc, expr: t); |
12043 | } |
12044 | } |
12045 | last_p = tsi_stmt_ptr (i: l); |
12046 | last = *last_p; |
12047 | } |
12048 | |
12049 | /* If the end of the list is exception related, then the list was split |
12050 | by a call to push_cleanup. Continue searching. */ |
12051 | if (TREE_CODE (last) == TRY_FINALLY_EXPR |
12052 | || TREE_CODE (last) == TRY_CATCH_EXPR) |
12053 | { |
12054 | last_p = &TREE_OPERAND (last, 0); |
12055 | last = *last_p; |
12056 | goto continue_searching; |
12057 | } |
12058 | |
12059 | if (last == error_mark_node) |
12060 | return last; |
12061 | |
12062 | /* In the case that the BIND_EXPR is not necessary, return the |
12063 | expression out from inside it. */ |
12064 | if ((last == BIND_EXPR_BODY (body) |
12065 | /* Skip nested debug stmts. */ |
12066 | || last == expr_first (BIND_EXPR_BODY (body))) |
12067 | && BIND_EXPR_VARS (body) == NULL) |
12068 | { |
12069 | /* Even if this looks constant, do not allow it in a constant |
12070 | expression. */ |
12071 | last = c_wrap_maybe_const (last, true); |
12072 | /* Do not warn if the return value of a statement expression is |
12073 | unused. */ |
12074 | suppress_warning (last, OPT_Wunused); |
12075 | return last; |
12076 | } |
12077 | |
12078 | /* Extract the type of said expression. */ |
12079 | type = TREE_TYPE (last); |
12080 | |
12081 | /* If we're not returning a value at all, then the BIND_EXPR that |
12082 | we already have is a fine expression to return. */ |
12083 | if (!type || VOID_TYPE_P (type)) |
12084 | return body; |
12085 | |
12086 | /* Now that we've located the expression containing the value, it seems |
12087 | silly to make voidify_wrapper_expr repeat the process. Create a |
12088 | temporary of the appropriate type and stick it in a TARGET_EXPR. */ |
12089 | tmp = create_tmp_var_raw (type); |
12090 | |
12091 | /* Unwrap a no-op NOP_EXPR as added by c_finish_expr_stmt. This avoids |
12092 | tree_expr_nonnegative_p giving up immediately. */ |
12093 | val = last; |
12094 | if (TREE_CODE (val) == NOP_EXPR |
12095 | && TREE_TYPE (val) == TREE_TYPE (TREE_OPERAND (val, 0))) |
12096 | val = TREE_OPERAND (val, 0); |
12097 | |
12098 | *last_p = build2 (MODIFY_EXPR, void_type_node, tmp, val); |
12099 | SET_EXPR_LOCATION (*last_p, EXPR_LOCATION (last)); |
12100 | |
12101 | { |
12102 | tree t = build4 (TARGET_EXPR, type, tmp, body, NULL_TREE, NULL_TREE); |
12103 | SET_EXPR_LOCATION (t, loc); |
12104 | return t; |
12105 | } |
12106 | } |
12107 | |
12108 | /* Begin and end compound statements. This is as simple as pushing |
12109 | and popping new statement lists from the tree. */ |
12110 | |
12111 | tree |
12112 | c_begin_compound_stmt (bool do_scope) |
12113 | { |
12114 | tree stmt = push_stmt_list (); |
12115 | if (do_scope) |
12116 | push_scope (); |
12117 | return stmt; |
12118 | } |
12119 | |
12120 | /* End a compound statement. STMT is the statement. LOC is the |
12121 | location of the compound statement-- this is usually the location |
12122 | of the opening brace. */ |
12123 | |
12124 | tree |
12125 | c_end_compound_stmt (location_t loc, tree stmt, bool do_scope) |
12126 | { |
12127 | tree block = NULL; |
12128 | |
12129 | if (do_scope) |
12130 | { |
12131 | if (c_dialect_objc ()) |
12132 | objc_clear_super_receiver (); |
12133 | block = pop_scope (); |
12134 | } |
12135 | |
12136 | stmt = pop_stmt_list (stmt); |
12137 | stmt = c_build_bind_expr (loc, block, stmt); |
12138 | |
12139 | /* If this compound statement is nested immediately inside a statement |
12140 | expression, then force a BIND_EXPR to be created. Otherwise we'll |
12141 | do the wrong thing for ({ { 1; } }) or ({ 1; { } }). In particular, |
12142 | STATEMENT_LISTs merge, and thus we can lose track of what statement |
12143 | was really last. */ |
12144 | if (building_stmt_list_p () |
12145 | && STATEMENT_LIST_STMT_EXPR (cur_stmt_list) |
12146 | && TREE_CODE (stmt) != BIND_EXPR) |
12147 | { |
12148 | stmt = build3 (BIND_EXPR, void_type_node, NULL, stmt, NULL); |
12149 | TREE_SIDE_EFFECTS (stmt) = 1; |
12150 | SET_EXPR_LOCATION (stmt, loc); |
12151 | } |
12152 | |
12153 | return stmt; |
12154 | } |
12155 | |
12156 | /* Queue a cleanup. CLEANUP is an expression/statement to be executed |
12157 | when the current scope is exited. EH_ONLY is true when this is not |
12158 | meant to apply to normal control flow transfer. */ |
12159 | |
12160 | void |
12161 | push_cleanup (tree decl, tree cleanup, bool eh_only) |
12162 | { |
12163 | enum tree_code code; |
12164 | tree stmt, list; |
12165 | bool stmt_expr; |
12166 | |
12167 | code = eh_only ? TRY_CATCH_EXPR : TRY_FINALLY_EXPR; |
12168 | stmt = build_stmt (DECL_SOURCE_LOCATION (decl), code, NULL, cleanup); |
12169 | add_stmt (stmt); |
12170 | stmt_expr = STATEMENT_LIST_STMT_EXPR (cur_stmt_list); |
12171 | list = push_stmt_list (); |
12172 | TREE_OPERAND (stmt, 0) = list; |
12173 | STATEMENT_LIST_STMT_EXPR (list) = stmt_expr; |
12174 | } |
12175 | |
12176 | /* Build a vector comparison of ARG0 and ARG1 using CODE opcode |
12177 | into a value of TYPE type. Comparison is done via VEC_COND_EXPR. */ |
12178 | |
12179 | static tree |
12180 | build_vec_cmp (tree_code code, tree type, |
12181 | tree arg0, tree arg1) |
12182 | { |
12183 | tree zero_vec = build_zero_cst (type); |
12184 | tree minus_one_vec = build_minus_one_cst (type); |
12185 | tree cmp_type = truth_type_for (TREE_TYPE (arg0)); |
12186 | tree cmp = build2 (code, cmp_type, arg0, arg1); |
12187 | return build3 (VEC_COND_EXPR, type, cmp, minus_one_vec, zero_vec); |
12188 | } |
12189 | |
12190 | /* Possibly warn about an address of OP never being NULL in a comparison |
12191 | operation CODE involving null. */ |
12192 | |
12193 | static void |
12194 | maybe_warn_for_null_address (location_t loc, tree op, tree_code code) |
12195 | { |
12196 | /* Prevent warnings issued for macro expansion. */ |
12197 | if (!warn_address |
12198 | || warning_suppressed_p (op, OPT_Waddress) |
12199 | || from_macro_expansion_at (loc)) |
12200 | return; |
12201 | |
12202 | if (TREE_CODE (op) == NOP_EXPR) |
12203 | { |
12204 | /* Allow casts to intptr_t to suppress the warning. */ |
12205 | tree type = TREE_TYPE (op); |
12206 | if (TREE_CODE (type) == INTEGER_TYPE) |
12207 | return; |
12208 | op = TREE_OPERAND (op, 0); |
12209 | } |
12210 | |
12211 | if (TREE_CODE (op) == POINTER_PLUS_EXPR) |
12212 | { |
12213 | /* Allow a cast to void* to suppress the warning. */ |
12214 | tree type = TREE_TYPE (TREE_TYPE (op)); |
12215 | if (VOID_TYPE_P (type)) |
12216 | return; |
12217 | |
12218 | /* Adding any value to a null pointer, including zero, is undefined |
12219 | in C. This includes the expression &p[0] where p is the null |
12220 | pointer, although &p[0] will have been folded to p by this point |
12221 | and so not diagnosed. */ |
12222 | if (code == EQ_EXPR) |
12223 | warning_at (loc, OPT_Waddress, |
12224 | "the comparison will always evaluate as %<false%> " |
12225 | "for the pointer operand in %qE must not be NULL" , |
12226 | op); |
12227 | else |
12228 | warning_at (loc, OPT_Waddress, |
12229 | "the comparison will always evaluate as %<true%> " |
12230 | "for the pointer operand in %qE must not be NULL" , |
12231 | op); |
12232 | |
12233 | return; |
12234 | } |
12235 | |
12236 | if (TREE_CODE (op) != ADDR_EXPR) |
12237 | return; |
12238 | |
12239 | op = TREE_OPERAND (op, 0); |
12240 | |
12241 | if (TREE_CODE (op) == IMAGPART_EXPR |
12242 | || TREE_CODE (op) == REALPART_EXPR) |
12243 | { |
12244 | /* The address of either complex part may not be null. */ |
12245 | if (code == EQ_EXPR) |
12246 | warning_at (loc, OPT_Waddress, |
12247 | "the comparison will always evaluate as %<false%> " |
12248 | "for the address of %qE will never be NULL" , |
12249 | op); |
12250 | else |
12251 | warning_at (loc, OPT_Waddress, |
12252 | "the comparison will always evaluate as %<true%> " |
12253 | "for the address of %qE will never be NULL" , |
12254 | op); |
12255 | return; |
12256 | } |
12257 | |
12258 | /* Set to true in the loop below if OP dereferences is operand. |
12259 | In such a case the ultimate target need not be a decl for |
12260 | the null [in]equality test to be constant. */ |
12261 | bool deref = false; |
12262 | |
12263 | /* Get the outermost array or object, or member. */ |
12264 | while (handled_component_p (t: op)) |
12265 | { |
12266 | if (TREE_CODE (op) == COMPONENT_REF) |
12267 | { |
12268 | /* Get the member (its address is never null). */ |
12269 | op = TREE_OPERAND (op, 1); |
12270 | break; |
12271 | } |
12272 | |
12273 | /* Get the outer array/object to refer to in the warning. */ |
12274 | op = TREE_OPERAND (op, 0); |
12275 | deref = true; |
12276 | } |
12277 | |
12278 | if ((!deref && !decl_with_nonnull_addr_p (op)) |
12279 | || from_macro_expansion_at (loc)) |
12280 | return; |
12281 | |
12282 | bool w; |
12283 | if (code == EQ_EXPR) |
12284 | w = warning_at (loc, OPT_Waddress, |
12285 | "the comparison will always evaluate as %<false%> " |
12286 | "for the address of %qE will never be NULL" , |
12287 | op); |
12288 | else |
12289 | w = warning_at (loc, OPT_Waddress, |
12290 | "the comparison will always evaluate as %<true%> " |
12291 | "for the address of %qE will never be NULL" , |
12292 | op); |
12293 | |
12294 | if (w && DECL_P (op)) |
12295 | inform (DECL_SOURCE_LOCATION (op), "%qD declared here" , op); |
12296 | } |
12297 | |
12298 | /* Build a binary-operation expression without default conversions. |
12299 | CODE is the kind of expression to build. |
12300 | LOCATION is the operator's location. |
12301 | This function differs from `build' in several ways: |
12302 | the data type of the result is computed and recorded in it, |
12303 | warnings are generated if arg data types are invalid, |
12304 | special handling for addition and subtraction of pointers is known, |
12305 | and some optimization is done (operations on narrow ints |
12306 | are done in the narrower type when that gives the same result). |
12307 | Constant folding is also done before the result is returned. |
12308 | |
12309 | Note that the operands will never have enumeral types, or function |
12310 | or array types, because either they will have the default conversions |
12311 | performed or they have both just been converted to some other type in which |
12312 | the arithmetic is to be done. */ |
12313 | |
12314 | tree |
12315 | build_binary_op (location_t location, enum tree_code code, |
12316 | tree orig_op0, tree orig_op1, bool convert_p) |
12317 | { |
12318 | tree type0, type1, orig_type0, orig_type1; |
12319 | tree eptype; |
12320 | enum tree_code code0, code1; |
12321 | tree op0, op1; |
12322 | tree ret = error_mark_node; |
12323 | const char *invalid_op_diag; |
12324 | bool op0_int_operands, op1_int_operands; |
12325 | bool int_const, int_const_or_overflow, int_operands; |
12326 | |
12327 | /* Expression code to give to the expression when it is built. |
12328 | Normally this is CODE, which is what the caller asked for, |
12329 | but in some special cases we change it. */ |
12330 | enum tree_code resultcode = code; |
12331 | |
12332 | /* Data type in which the computation is to be performed. |
12333 | In the simplest cases this is the common type of the arguments. */ |
12334 | tree result_type = NULL; |
12335 | |
12336 | /* When the computation is in excess precision, the type of the |
12337 | final EXCESS_PRECISION_EXPR. */ |
12338 | tree semantic_result_type = NULL; |
12339 | |
12340 | /* Nonzero means operands have already been type-converted |
12341 | in whatever way is necessary. |
12342 | Zero means they need to be converted to RESULT_TYPE. */ |
12343 | int converted = 0; |
12344 | |
12345 | /* Nonzero means create the expression with this type, rather than |
12346 | RESULT_TYPE. */ |
12347 | tree build_type = NULL_TREE; |
12348 | |
12349 | /* Nonzero means after finally constructing the expression |
12350 | convert it to this type. */ |
12351 | tree final_type = NULL_TREE; |
12352 | |
12353 | /* Nonzero if this is an operation like MIN or MAX which can |
12354 | safely be computed in short if both args are promoted shorts. |
12355 | Also implies COMMON. |
12356 | -1 indicates a bitwise operation; this makes a difference |
12357 | in the exact conditions for when it is safe to do the operation |
12358 | in a narrower mode. */ |
12359 | int shorten = 0; |
12360 | |
12361 | /* Nonzero if this is a comparison operation; |
12362 | if both args are promoted shorts, compare the original shorts. |
12363 | Also implies COMMON. */ |
12364 | int short_compare = 0; |
12365 | |
12366 | /* Nonzero if this is a right-shift operation, which can be computed on the |
12367 | original short and then promoted if the operand is a promoted short. */ |
12368 | int short_shift = 0; |
12369 | |
12370 | /* Nonzero means set RESULT_TYPE to the common type of the args. */ |
12371 | int common = 0; |
12372 | |
12373 | /* True means types are compatible as far as ObjC is concerned. */ |
12374 | bool objc_ok; |
12375 | |
12376 | /* True means this is an arithmetic operation that may need excess |
12377 | precision. */ |
12378 | bool may_need_excess_precision; |
12379 | |
12380 | /* True means this is a boolean operation that converts both its |
12381 | operands to truth-values. */ |
12382 | bool boolean_op = false; |
12383 | |
12384 | /* Remember whether we're doing / or %. */ |
12385 | bool doing_div_or_mod = false; |
12386 | |
12387 | /* Remember whether we're doing << or >>. */ |
12388 | bool doing_shift = false; |
12389 | |
12390 | /* Tree holding instrumentation expression. */ |
12391 | tree instrument_expr = NULL; |
12392 | |
12393 | if (location == UNKNOWN_LOCATION) |
12394 | location = input_location; |
12395 | |
12396 | op0 = orig_op0; |
12397 | op1 = orig_op1; |
12398 | |
12399 | op0_int_operands = EXPR_INT_CONST_OPERANDS (orig_op0); |
12400 | if (op0_int_operands) |
12401 | op0 = remove_c_maybe_const_expr (expr: op0); |
12402 | op1_int_operands = EXPR_INT_CONST_OPERANDS (orig_op1); |
12403 | if (op1_int_operands) |
12404 | op1 = remove_c_maybe_const_expr (expr: op1); |
12405 | int_operands = (op0_int_operands && op1_int_operands); |
12406 | if (int_operands) |
12407 | { |
12408 | int_const_or_overflow = (TREE_CODE (orig_op0) == INTEGER_CST |
12409 | && TREE_CODE (orig_op1) == INTEGER_CST); |
12410 | int_const = (int_const_or_overflow |
12411 | && !TREE_OVERFLOW (orig_op0) |
12412 | && !TREE_OVERFLOW (orig_op1)); |
12413 | } |
12414 | else |
12415 | int_const = int_const_or_overflow = false; |
12416 | |
12417 | /* Do not apply default conversion in mixed vector/scalar expression. */ |
12418 | if (convert_p |
12419 | && VECTOR_TYPE_P (TREE_TYPE (op0)) == VECTOR_TYPE_P (TREE_TYPE (op1))) |
12420 | { |
12421 | op0 = default_conversion (exp: op0); |
12422 | op1 = default_conversion (exp: op1); |
12423 | } |
12424 | |
12425 | orig_type0 = type0 = TREE_TYPE (op0); |
12426 | |
12427 | orig_type1 = type1 = TREE_TYPE (op1); |
12428 | |
12429 | /* The expression codes of the data types of the arguments tell us |
12430 | whether the arguments are integers, floating, pointers, etc. */ |
12431 | code0 = TREE_CODE (type0); |
12432 | code1 = TREE_CODE (type1); |
12433 | |
12434 | /* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */ |
12435 | STRIP_TYPE_NOPS (op0); |
12436 | STRIP_TYPE_NOPS (op1); |
12437 | |
12438 | /* If an error was already reported for one of the arguments, |
12439 | avoid reporting another error. */ |
12440 | |
12441 | if (code0 == ERROR_MARK || code1 == ERROR_MARK) |
12442 | return error_mark_node; |
12443 | |
12444 | if (code0 == POINTER_TYPE |
12445 | && reject_gcc_builtin (op0, EXPR_LOCATION (orig_op0))) |
12446 | return error_mark_node; |
12447 | |
12448 | if (code1 == POINTER_TYPE |
12449 | && reject_gcc_builtin (op1, EXPR_LOCATION (orig_op1))) |
12450 | return error_mark_node; |
12451 | |
12452 | if ((invalid_op_diag |
12453 | = targetm.invalid_binary_op (code, type0, type1))) |
12454 | { |
12455 | error_at (location, invalid_op_diag); |
12456 | return error_mark_node; |
12457 | } |
12458 | |
12459 | switch (code) |
12460 | { |
12461 | case PLUS_EXPR: |
12462 | case MINUS_EXPR: |
12463 | case MULT_EXPR: |
12464 | case TRUNC_DIV_EXPR: |
12465 | case CEIL_DIV_EXPR: |
12466 | case FLOOR_DIV_EXPR: |
12467 | case ROUND_DIV_EXPR: |
12468 | case EXACT_DIV_EXPR: |
12469 | may_need_excess_precision = true; |
12470 | break; |
12471 | |
12472 | case EQ_EXPR: |
12473 | case NE_EXPR: |
12474 | case LE_EXPR: |
12475 | case GE_EXPR: |
12476 | case LT_EXPR: |
12477 | case GT_EXPR: |
12478 | /* Excess precision for implicit conversions of integers to |
12479 | floating point in C11 and later. */ |
12480 | may_need_excess_precision = (flag_isoc11 |
12481 | && (ANY_INTEGRAL_TYPE_P (type0) |
12482 | || ANY_INTEGRAL_TYPE_P (type1))); |
12483 | break; |
12484 | |
12485 | default: |
12486 | may_need_excess_precision = false; |
12487 | break; |
12488 | } |
12489 | if (TREE_CODE (op0) == EXCESS_PRECISION_EXPR) |
12490 | { |
12491 | op0 = TREE_OPERAND (op0, 0); |
12492 | type0 = TREE_TYPE (op0); |
12493 | } |
12494 | else if (may_need_excess_precision |
12495 | && (eptype = excess_precision_type (type0)) != NULL_TREE) |
12496 | { |
12497 | type0 = eptype; |
12498 | op0 = convert (eptype, op0); |
12499 | } |
12500 | if (TREE_CODE (op1) == EXCESS_PRECISION_EXPR) |
12501 | { |
12502 | op1 = TREE_OPERAND (op1, 0); |
12503 | type1 = TREE_TYPE (op1); |
12504 | } |
12505 | else if (may_need_excess_precision |
12506 | && (eptype = excess_precision_type (type1)) != NULL_TREE) |
12507 | { |
12508 | type1 = eptype; |
12509 | op1 = convert (eptype, op1); |
12510 | } |
12511 | |
12512 | objc_ok = objc_compare_types (type0, type1, -3, NULL_TREE); |
12513 | |
12514 | /* In case when one of the operands of the binary operation is |
12515 | a vector and another is a scalar -- convert scalar to vector. */ |
12516 | if ((gnu_vector_type_p (type: type0) && code1 != VECTOR_TYPE) |
12517 | || (gnu_vector_type_p (type: type1) && code0 != VECTOR_TYPE)) |
12518 | { |
12519 | enum stv_conv convert_flag = scalar_to_vector (loc: location, code, op0: orig_op0, |
12520 | op1: orig_op1, true); |
12521 | |
12522 | switch (convert_flag) |
12523 | { |
12524 | case stv_error: |
12525 | return error_mark_node; |
12526 | case stv_firstarg: |
12527 | { |
12528 | bool maybe_const = true; |
12529 | tree sc; |
12530 | sc = c_fully_fold (op0, false, &maybe_const); |
12531 | sc = save_expr (sc); |
12532 | sc = convert (TREE_TYPE (type1), sc); |
12533 | op0 = build_vector_from_val (type1, sc); |
12534 | if (!maybe_const) |
12535 | op0 = c_wrap_maybe_const (op0, true); |
12536 | orig_type0 = type0 = TREE_TYPE (op0); |
12537 | code0 = TREE_CODE (type0); |
12538 | converted = 1; |
12539 | break; |
12540 | } |
12541 | case stv_secondarg: |
12542 | { |
12543 | bool maybe_const = true; |
12544 | tree sc; |
12545 | sc = c_fully_fold (op1, false, &maybe_const); |
12546 | sc = save_expr (sc); |
12547 | sc = convert (TREE_TYPE (type0), sc); |
12548 | op1 = build_vector_from_val (type0, sc); |
12549 | if (!maybe_const) |
12550 | op1 = c_wrap_maybe_const (op1, true); |
12551 | orig_type1 = type1 = TREE_TYPE (op1); |
12552 | code1 = TREE_CODE (type1); |
12553 | converted = 1; |
12554 | break; |
12555 | } |
12556 | default: |
12557 | break; |
12558 | } |
12559 | } |
12560 | |
12561 | switch (code) |
12562 | { |
12563 | case PLUS_EXPR: |
12564 | /* Handle the pointer + int case. */ |
12565 | if (code0 == POINTER_TYPE |
12566 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12567 | { |
12568 | ret = pointer_int_sum (location, PLUS_EXPR, op0, op1); |
12569 | goto return_build_binary_op; |
12570 | } |
12571 | else if (code1 == POINTER_TYPE |
12572 | && (code0 == INTEGER_TYPE || code0 == BITINT_TYPE)) |
12573 | { |
12574 | ret = pointer_int_sum (location, PLUS_EXPR, op1, op0); |
12575 | goto return_build_binary_op; |
12576 | } |
12577 | else |
12578 | common = 1; |
12579 | break; |
12580 | |
12581 | case MINUS_EXPR: |
12582 | /* Subtraction of two similar pointers. |
12583 | We must subtract them as integers, then divide by object size. */ |
12584 | if (code0 == POINTER_TYPE && code1 == POINTER_TYPE |
12585 | && comp_target_types (location, ttl: type0, ttr: type1)) |
12586 | { |
12587 | ret = pointer_diff (loc: location, op0, op1, instrument_expr: &instrument_expr); |
12588 | goto return_build_binary_op; |
12589 | } |
12590 | /* Handle pointer minus int. Just like pointer plus int. */ |
12591 | else if (code0 == POINTER_TYPE |
12592 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12593 | { |
12594 | ret = pointer_int_sum (location, MINUS_EXPR, op0, op1); |
12595 | goto return_build_binary_op; |
12596 | } |
12597 | else |
12598 | common = 1; |
12599 | break; |
12600 | |
12601 | case MULT_EXPR: |
12602 | common = 1; |
12603 | break; |
12604 | |
12605 | case TRUNC_DIV_EXPR: |
12606 | case CEIL_DIV_EXPR: |
12607 | case FLOOR_DIV_EXPR: |
12608 | case ROUND_DIV_EXPR: |
12609 | case EXACT_DIV_EXPR: |
12610 | doing_div_or_mod = true; |
12611 | warn_for_div_by_zero (location, divisor: op1); |
12612 | |
12613 | if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE |
12614 | || code0 == FIXED_POINT_TYPE || code0 == BITINT_TYPE |
12615 | || code0 == COMPLEX_TYPE |
12616 | || gnu_vector_type_p (type: type0)) |
12617 | && (code1 == INTEGER_TYPE || code1 == REAL_TYPE |
12618 | || code1 == FIXED_POINT_TYPE || code1 == BITINT_TYPE |
12619 | || code1 == COMPLEX_TYPE |
12620 | || gnu_vector_type_p (type: type1))) |
12621 | { |
12622 | enum tree_code tcode0 = code0, tcode1 = code1; |
12623 | |
12624 | if (code0 == COMPLEX_TYPE || code0 == VECTOR_TYPE) |
12625 | tcode0 = TREE_CODE (TREE_TYPE (TREE_TYPE (op0))); |
12626 | if (code1 == COMPLEX_TYPE || code1 == VECTOR_TYPE) |
12627 | tcode1 = TREE_CODE (TREE_TYPE (TREE_TYPE (op1))); |
12628 | |
12629 | if (!(((tcode0 == INTEGER_TYPE || tcode0 == BITINT_TYPE) |
12630 | && (tcode1 == INTEGER_TYPE || tcode1 == BITINT_TYPE)) |
12631 | || (tcode0 == FIXED_POINT_TYPE && tcode1 == FIXED_POINT_TYPE))) |
12632 | resultcode = RDIV_EXPR; |
12633 | else |
12634 | /* Although it would be tempting to shorten always here, that |
12635 | loses on some targets, since the modulo instruction is |
12636 | undefined if the quotient can't be represented in the |
12637 | computation mode. We shorten only if unsigned or if |
12638 | dividing by something we know != -1. */ |
12639 | shorten = may_shorten_divmod (op0, op1); |
12640 | common = 1; |
12641 | } |
12642 | break; |
12643 | |
12644 | case BIT_AND_EXPR: |
12645 | case BIT_IOR_EXPR: |
12646 | case BIT_XOR_EXPR: |
12647 | if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE) |
12648 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12649 | shorten = -1; |
12650 | /* Allow vector types which are not floating point types. */ |
12651 | else if (gnu_vector_type_p (type: type0) |
12652 | && gnu_vector_type_p (type: type1) |
12653 | && !VECTOR_FLOAT_TYPE_P (type0) |
12654 | && !VECTOR_FLOAT_TYPE_P (type1)) |
12655 | common = 1; |
12656 | break; |
12657 | |
12658 | case TRUNC_MOD_EXPR: |
12659 | case FLOOR_MOD_EXPR: |
12660 | doing_div_or_mod = true; |
12661 | warn_for_div_by_zero (location, divisor: op1); |
12662 | |
12663 | if (gnu_vector_type_p (type: type0) |
12664 | && gnu_vector_type_p (type: type1) |
12665 | && TREE_CODE (TREE_TYPE (type0)) == INTEGER_TYPE |
12666 | && TREE_CODE (TREE_TYPE (type1)) == INTEGER_TYPE) |
12667 | common = 1; |
12668 | else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE) |
12669 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12670 | { |
12671 | /* Although it would be tempting to shorten always here, that loses |
12672 | on some targets, since the modulo instruction is undefined if the |
12673 | quotient can't be represented in the computation mode. We shorten |
12674 | only if unsigned or if dividing by something we know != -1. */ |
12675 | shorten = may_shorten_divmod (op0, op1); |
12676 | common = 1; |
12677 | } |
12678 | break; |
12679 | |
12680 | case TRUTH_ANDIF_EXPR: |
12681 | case TRUTH_ORIF_EXPR: |
12682 | case TRUTH_AND_EXPR: |
12683 | case TRUTH_OR_EXPR: |
12684 | case TRUTH_XOR_EXPR: |
12685 | if ((code0 == INTEGER_TYPE || code0 == POINTER_TYPE |
12686 | || code0 == REAL_TYPE || code0 == COMPLEX_TYPE |
12687 | || code0 == FIXED_POINT_TYPE || code0 == NULLPTR_TYPE |
12688 | || code0 == BITINT_TYPE) |
12689 | && (code1 == INTEGER_TYPE || code1 == POINTER_TYPE |
12690 | || code1 == REAL_TYPE || code1 == COMPLEX_TYPE |
12691 | || code1 == FIXED_POINT_TYPE || code1 == NULLPTR_TYPE |
12692 | || code1 == BITINT_TYPE)) |
12693 | { |
12694 | /* Result of these operations is always an int, |
12695 | but that does not mean the operands should be |
12696 | converted to ints! */ |
12697 | result_type = integer_type_node; |
12698 | if (op0_int_operands) |
12699 | { |
12700 | op0 = c_objc_common_truthvalue_conversion (location, orig_op0); |
12701 | op0 = remove_c_maybe_const_expr (expr: op0); |
12702 | } |
12703 | else |
12704 | op0 = c_objc_common_truthvalue_conversion (location, op0); |
12705 | if (op1_int_operands) |
12706 | { |
12707 | op1 = c_objc_common_truthvalue_conversion (location, orig_op1); |
12708 | op1 = remove_c_maybe_const_expr (expr: op1); |
12709 | } |
12710 | else |
12711 | op1 = c_objc_common_truthvalue_conversion (location, op1); |
12712 | converted = 1; |
12713 | boolean_op = true; |
12714 | } |
12715 | if (code == TRUTH_ANDIF_EXPR) |
12716 | { |
12717 | int_const_or_overflow = (int_operands |
12718 | && TREE_CODE (orig_op0) == INTEGER_CST |
12719 | && (op0 == truthvalue_false_node |
12720 | || TREE_CODE (orig_op1) == INTEGER_CST)); |
12721 | int_const = (int_const_or_overflow |
12722 | && !TREE_OVERFLOW (orig_op0) |
12723 | && (op0 == truthvalue_false_node |
12724 | || !TREE_OVERFLOW (orig_op1))); |
12725 | } |
12726 | else if (code == TRUTH_ORIF_EXPR) |
12727 | { |
12728 | int_const_or_overflow = (int_operands |
12729 | && TREE_CODE (orig_op0) == INTEGER_CST |
12730 | && (op0 == truthvalue_true_node |
12731 | || TREE_CODE (orig_op1) == INTEGER_CST)); |
12732 | int_const = (int_const_or_overflow |
12733 | && !TREE_OVERFLOW (orig_op0) |
12734 | && (op0 == truthvalue_true_node |
12735 | || !TREE_OVERFLOW (orig_op1))); |
12736 | } |
12737 | break; |
12738 | |
12739 | /* Shift operations: result has same type as first operand; |
12740 | always convert second operand to int. |
12741 | Also set SHORT_SHIFT if shifting rightward. */ |
12742 | |
12743 | case RSHIFT_EXPR: |
12744 | if (gnu_vector_type_p (type: type0) |
12745 | && gnu_vector_type_p (type: type1) |
12746 | && TREE_CODE (TREE_TYPE (type0)) == INTEGER_TYPE |
12747 | && TREE_CODE (TREE_TYPE (type1)) == INTEGER_TYPE |
12748 | && known_eq (TYPE_VECTOR_SUBPARTS (type0), |
12749 | TYPE_VECTOR_SUBPARTS (type1))) |
12750 | { |
12751 | result_type = type0; |
12752 | converted = 1; |
12753 | } |
12754 | else if ((code0 == INTEGER_TYPE || code0 == FIXED_POINT_TYPE |
12755 | || code0 == BITINT_TYPE |
12756 | || (gnu_vector_type_p (type: type0) |
12757 | && TREE_CODE (TREE_TYPE (type0)) == INTEGER_TYPE)) |
12758 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12759 | { |
12760 | doing_shift = true; |
12761 | if (TREE_CODE (op1) == INTEGER_CST) |
12762 | { |
12763 | if (tree_int_cst_sgn (op1) < 0) |
12764 | { |
12765 | int_const = false; |
12766 | if (c_inhibit_evaluation_warnings == 0) |
12767 | warning_at (location, OPT_Wshift_count_negative, |
12768 | "right shift count is negative" ); |
12769 | } |
12770 | else if (code0 == VECTOR_TYPE) |
12771 | { |
12772 | if (compare_tree_int (op1, |
12773 | TYPE_PRECISION (TREE_TYPE (type0))) |
12774 | >= 0) |
12775 | { |
12776 | int_const = false; |
12777 | if (c_inhibit_evaluation_warnings == 0) |
12778 | warning_at (location, OPT_Wshift_count_overflow, |
12779 | "right shift count >= width of vector element" ); |
12780 | } |
12781 | } |
12782 | else |
12783 | { |
12784 | if (!integer_zerop (op1)) |
12785 | short_shift = 1; |
12786 | |
12787 | if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0) |
12788 | { |
12789 | int_const = false; |
12790 | if (c_inhibit_evaluation_warnings == 0) |
12791 | warning_at (location, OPT_Wshift_count_overflow, |
12792 | "right shift count >= width of type" ); |
12793 | } |
12794 | } |
12795 | } |
12796 | |
12797 | /* Use the type of the value to be shifted. */ |
12798 | result_type = type0; |
12799 | /* Avoid converting op1 to result_type later. */ |
12800 | converted = 1; |
12801 | } |
12802 | break; |
12803 | |
12804 | case LSHIFT_EXPR: |
12805 | if (gnu_vector_type_p (type: type0) |
12806 | && gnu_vector_type_p (type: type1) |
12807 | && TREE_CODE (TREE_TYPE (type0)) == INTEGER_TYPE |
12808 | && TREE_CODE (TREE_TYPE (type1)) == INTEGER_TYPE |
12809 | && known_eq (TYPE_VECTOR_SUBPARTS (type0), |
12810 | TYPE_VECTOR_SUBPARTS (type1))) |
12811 | { |
12812 | result_type = type0; |
12813 | converted = 1; |
12814 | } |
12815 | else if ((code0 == INTEGER_TYPE || code0 == FIXED_POINT_TYPE |
12816 | || code0 == BITINT_TYPE |
12817 | || (gnu_vector_type_p (type: type0) |
12818 | && TREE_CODE (TREE_TYPE (type0)) == INTEGER_TYPE)) |
12819 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12820 | { |
12821 | doing_shift = true; |
12822 | if (TREE_CODE (op0) == INTEGER_CST |
12823 | && tree_int_cst_sgn (op0) < 0 |
12824 | && !TYPE_OVERFLOW_WRAPS (type0)) |
12825 | { |
12826 | /* Don't reject a left shift of a negative value in a context |
12827 | where a constant expression is needed in C90. */ |
12828 | if (flag_isoc99) |
12829 | int_const = false; |
12830 | if (c_inhibit_evaluation_warnings == 0) |
12831 | warning_at (location, OPT_Wshift_negative_value, |
12832 | "left shift of negative value" ); |
12833 | } |
12834 | if (TREE_CODE (op1) == INTEGER_CST) |
12835 | { |
12836 | if (tree_int_cst_sgn (op1) < 0) |
12837 | { |
12838 | int_const = false; |
12839 | if (c_inhibit_evaluation_warnings == 0) |
12840 | warning_at (location, OPT_Wshift_count_negative, |
12841 | "left shift count is negative" ); |
12842 | } |
12843 | else if (code0 == VECTOR_TYPE) |
12844 | { |
12845 | if (compare_tree_int (op1, |
12846 | TYPE_PRECISION (TREE_TYPE (type0))) |
12847 | >= 0) |
12848 | { |
12849 | int_const = false; |
12850 | if (c_inhibit_evaluation_warnings == 0) |
12851 | warning_at (location, OPT_Wshift_count_overflow, |
12852 | "left shift count >= width of vector element" ); |
12853 | } |
12854 | } |
12855 | else if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0) |
12856 | { |
12857 | int_const = false; |
12858 | if (c_inhibit_evaluation_warnings == 0) |
12859 | warning_at (location, OPT_Wshift_count_overflow, |
12860 | "left shift count >= width of type" ); |
12861 | } |
12862 | else if (TREE_CODE (op0) == INTEGER_CST |
12863 | && maybe_warn_shift_overflow (location, op0, op1) |
12864 | && flag_isoc99) |
12865 | int_const = false; |
12866 | } |
12867 | |
12868 | /* Use the type of the value to be shifted. */ |
12869 | result_type = type0; |
12870 | /* Avoid converting op1 to result_type later. */ |
12871 | converted = 1; |
12872 | } |
12873 | break; |
12874 | |
12875 | case EQ_EXPR: |
12876 | case NE_EXPR: |
12877 | if (gnu_vector_type_p (type: type0) && gnu_vector_type_p (type: type1)) |
12878 | { |
12879 | tree intt; |
12880 | if (!vector_types_compatible_elements_p (type0, type1)) |
12881 | { |
12882 | error_at (location, "comparing vectors with different " |
12883 | "element types" ); |
12884 | return error_mark_node; |
12885 | } |
12886 | |
12887 | if (maybe_ne (a: TYPE_VECTOR_SUBPARTS (node: type0), |
12888 | b: TYPE_VECTOR_SUBPARTS (node: type1))) |
12889 | { |
12890 | error_at (location, "comparing vectors with different " |
12891 | "number of elements" ); |
12892 | return error_mark_node; |
12893 | } |
12894 | |
12895 | /* It's not precisely specified how the usual arithmetic |
12896 | conversions apply to the vector types. Here, we use |
12897 | the unsigned type if one of the operands is signed and |
12898 | the other one is unsigned. */ |
12899 | if (TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)) |
12900 | { |
12901 | if (!TYPE_UNSIGNED (type0)) |
12902 | op0 = build1 (VIEW_CONVERT_EXPR, type1, op0); |
12903 | else |
12904 | op1 = build1 (VIEW_CONVERT_EXPR, type0, op1); |
12905 | warning_at (location, OPT_Wsign_compare, "comparison between " |
12906 | "types %qT and %qT" , type0, type1); |
12907 | } |
12908 | |
12909 | /* Always construct signed integer vector type. */ |
12910 | intt = c_common_type_for_size (GET_MODE_BITSIZE |
12911 | (SCALAR_TYPE_MODE |
12912 | (TREE_TYPE (type0))), 0); |
12913 | if (!intt) |
12914 | { |
12915 | error_at (location, "could not find an integer type " |
12916 | "of the same size as %qT" , |
12917 | TREE_TYPE (type0)); |
12918 | return error_mark_node; |
12919 | } |
12920 | result_type = build_opaque_vector_type (intt, |
12921 | TYPE_VECTOR_SUBPARTS (node: type0)); |
12922 | converted = 1; |
12923 | ret = build_vec_cmp (code: resultcode, type: result_type, arg0: op0, arg1: op1); |
12924 | goto return_build_binary_op; |
12925 | } |
12926 | if (FLOAT_TYPE_P (type0) || FLOAT_TYPE_P (type1)) |
12927 | warning_at (location, |
12928 | OPT_Wfloat_equal, |
12929 | "comparing floating-point with %<==%> or %<!=%> is unsafe" ); |
12930 | /* Result of comparison is always int, |
12931 | but don't convert the args to int! */ |
12932 | build_type = integer_type_node; |
12933 | if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE || code0 == BITINT_TYPE |
12934 | || code0 == FIXED_POINT_TYPE || code0 == COMPLEX_TYPE) |
12935 | && (code1 == INTEGER_TYPE || code1 == REAL_TYPE |
12936 | || code1 == BITINT_TYPE |
12937 | || code1 == FIXED_POINT_TYPE || code1 == COMPLEX_TYPE)) |
12938 | short_compare = 1; |
12939 | else if (code0 == POINTER_TYPE |
12940 | && (code1 == NULLPTR_TYPE |
12941 | || null_pointer_constant_p (expr: orig_op1))) |
12942 | { |
12943 | maybe_warn_for_null_address (loc: location, op: op0, code); |
12944 | result_type = type0; |
12945 | } |
12946 | else if (code1 == POINTER_TYPE |
12947 | && (code0 == NULLPTR_TYPE |
12948 | || null_pointer_constant_p (expr: orig_op0))) |
12949 | { |
12950 | maybe_warn_for_null_address (loc: location, op: op1, code); |
12951 | result_type = type1; |
12952 | } |
12953 | else if (code0 == POINTER_TYPE && code1 == POINTER_TYPE) |
12954 | { |
12955 | tree tt0 = TREE_TYPE (type0); |
12956 | tree tt1 = TREE_TYPE (type1); |
12957 | addr_space_t as0 = TYPE_ADDR_SPACE (tt0); |
12958 | addr_space_t as1 = TYPE_ADDR_SPACE (tt1); |
12959 | addr_space_t as_common = ADDR_SPACE_GENERIC; |
12960 | |
12961 | /* Anything compares with void *. void * compares with anything. |
12962 | Otherwise, the targets must be compatible |
12963 | and both must be object or both incomplete. */ |
12964 | if (comp_target_types (location, ttl: type0, ttr: type1)) |
12965 | result_type = common_pointer_type (t1: type0, t2: type1); |
12966 | else if (!addr_space_superset (as1: as0, as2: as1, common: &as_common)) |
12967 | { |
12968 | error_at (location, "comparison of pointers to " |
12969 | "disjoint address spaces" ); |
12970 | return error_mark_node; |
12971 | } |
12972 | else if (VOID_TYPE_P (tt0) && !TYPE_ATOMIC (tt0)) |
12973 | { |
12974 | if (pedantic && TREE_CODE (tt1) == FUNCTION_TYPE) |
12975 | pedwarn (location, OPT_Wpedantic, "ISO C forbids " |
12976 | "comparison of %<void *%> with function pointer" ); |
12977 | } |
12978 | else if (VOID_TYPE_P (tt1) && !TYPE_ATOMIC (tt1)) |
12979 | { |
12980 | if (pedantic && TREE_CODE (tt0) == FUNCTION_TYPE) |
12981 | pedwarn (location, OPT_Wpedantic, "ISO C forbids " |
12982 | "comparison of %<void *%> with function pointer" ); |
12983 | } |
12984 | else |
12985 | /* Avoid warning about the volatile ObjC EH puts on decls. */ |
12986 | if (!objc_ok) |
12987 | pedwarn (location, OPT_Wcompare_distinct_pointer_types, |
12988 | "comparison of distinct pointer types lacks a cast" ); |
12989 | |
12990 | if (result_type == NULL_TREE) |
12991 | { |
12992 | int qual = ENCODE_QUAL_ADDR_SPACE (as_common); |
12993 | result_type = build_pointer_type |
12994 | (build_qualified_type (void_type_node, qual)); |
12995 | } |
12996 | } |
12997 | else if (code0 == POINTER_TYPE |
12998 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12999 | { |
13000 | result_type = type0; |
13001 | pedwarn (location, 0, "comparison between pointer and integer" ); |
13002 | } |
13003 | else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE) |
13004 | && code1 == POINTER_TYPE) |
13005 | { |
13006 | result_type = type1; |
13007 | pedwarn (location, 0, "comparison between pointer and integer" ); |
13008 | } |
13009 | /* 6.5.9: One of the following shall hold: |
13010 | -- both operands have type nullptr_t; */ |
13011 | else if (code0 == NULLPTR_TYPE && code1 == NULLPTR_TYPE) |
13012 | { |
13013 | result_type = nullptr_type_node; |
13014 | /* No need to convert the operands to result_type later. */ |
13015 | converted = 1; |
13016 | } |
13017 | /* -- one operand has type nullptr_t and the other is a null pointer |
13018 | constant. We will have to convert the former to the type of the |
13019 | latter, because during gimplification we can't have mismatching |
13020 | comparison operand type. We convert from nullptr_t to the other |
13021 | type, since only nullptr_t can be converted to nullptr_t. Also, |
13022 | even a constant 0 is a null pointer constant, so we may have to |
13023 | create a pointer type from its type. */ |
13024 | else if (code0 == NULLPTR_TYPE && null_pointer_constant_p (expr: orig_op1)) |
13025 | result_type = (INTEGRAL_TYPE_P (type1) |
13026 | ? build_pointer_type (type1) : type1); |
13027 | else if (code1 == NULLPTR_TYPE && null_pointer_constant_p (expr: orig_op0)) |
13028 | result_type = (INTEGRAL_TYPE_P (type0) |
13029 | ? build_pointer_type (type0) : type0); |
13030 | if ((C_BOOLEAN_TYPE_P (TREE_TYPE (orig_op0)) |
13031 | || truth_value_p (TREE_CODE (orig_op0))) |
13032 | ^ (C_BOOLEAN_TYPE_P (TREE_TYPE (orig_op1)) |
13033 | || truth_value_p (TREE_CODE (orig_op1)))) |
13034 | maybe_warn_bool_compare (location, code, orig_op0, orig_op1); |
13035 | break; |
13036 | |
13037 | case LE_EXPR: |
13038 | case GE_EXPR: |
13039 | case LT_EXPR: |
13040 | case GT_EXPR: |
13041 | if (gnu_vector_type_p (type: type0) && gnu_vector_type_p (type: type1)) |
13042 | { |
13043 | tree intt; |
13044 | if (!vector_types_compatible_elements_p (type0, type1)) |
13045 | { |
13046 | error_at (location, "comparing vectors with different " |
13047 | "element types" ); |
13048 | return error_mark_node; |
13049 | } |
13050 | |
13051 | if (maybe_ne (a: TYPE_VECTOR_SUBPARTS (node: type0), |
13052 | b: TYPE_VECTOR_SUBPARTS (node: type1))) |
13053 | { |
13054 | error_at (location, "comparing vectors with different " |
13055 | "number of elements" ); |
13056 | return error_mark_node; |
13057 | } |
13058 | |
13059 | /* It's not precisely specified how the usual arithmetic |
13060 | conversions apply to the vector types. Here, we use |
13061 | the unsigned type if one of the operands is signed and |
13062 | the other one is unsigned. */ |
13063 | if (TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)) |
13064 | { |
13065 | if (!TYPE_UNSIGNED (type0)) |
13066 | op0 = build1 (VIEW_CONVERT_EXPR, type1, op0); |
13067 | else |
13068 | op1 = build1 (VIEW_CONVERT_EXPR, type0, op1); |
13069 | warning_at (location, OPT_Wsign_compare, "comparison between " |
13070 | "types %qT and %qT" , type0, type1); |
13071 | } |
13072 | |
13073 | /* Always construct signed integer vector type. */ |
13074 | intt = c_common_type_for_size (GET_MODE_BITSIZE |
13075 | (SCALAR_TYPE_MODE |
13076 | (TREE_TYPE (type0))), 0); |
13077 | if (!intt) |
13078 | { |
13079 | error_at (location, "could not find an integer type " |
13080 | "of the same size as %qT" , |
13081 | TREE_TYPE (type0)); |
13082 | return error_mark_node; |
13083 | } |
13084 | result_type = build_opaque_vector_type (intt, |
13085 | TYPE_VECTOR_SUBPARTS (node: type0)); |
13086 | converted = 1; |
13087 | ret = build_vec_cmp (code: resultcode, type: result_type, arg0: op0, arg1: op1); |
13088 | goto return_build_binary_op; |
13089 | } |
13090 | build_type = integer_type_node; |
13091 | if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE |
13092 | || code0 == BITINT_TYPE || code0 == FIXED_POINT_TYPE) |
13093 | && (code1 == INTEGER_TYPE || code1 == REAL_TYPE |
13094 | || code1 == BITINT_TYPE || code1 == FIXED_POINT_TYPE)) |
13095 | short_compare = 1; |
13096 | else if (code0 == POINTER_TYPE && code1 == POINTER_TYPE) |
13097 | { |
13098 | addr_space_t as0 = TYPE_ADDR_SPACE (TREE_TYPE (type0)); |
13099 | addr_space_t as1 = TYPE_ADDR_SPACE (TREE_TYPE (type1)); |
13100 | addr_space_t as_common; |
13101 | |
13102 | if (comp_target_types (location, ttl: type0, ttr: type1)) |
13103 | { |
13104 | result_type = common_pointer_type (t1: type0, t2: type1); |
13105 | if (!COMPLETE_TYPE_P (TREE_TYPE (type0)) |
13106 | != !COMPLETE_TYPE_P (TREE_TYPE (type1))) |
13107 | pedwarn_c99 (location, opt: OPT_Wpedantic, |
13108 | "comparison of complete and incomplete pointers" ); |
13109 | else if (TREE_CODE (TREE_TYPE (type0)) == FUNCTION_TYPE) |
13110 | pedwarn (location, OPT_Wpedantic, "ISO C forbids " |
13111 | "ordered comparisons of pointers to functions" ); |
13112 | else if (null_pointer_constant_p (expr: orig_op0) |
13113 | || null_pointer_constant_p (expr: orig_op1)) |
13114 | warning_at (location, OPT_Wextra, |
13115 | "ordered comparison of pointer with null pointer" ); |
13116 | |
13117 | } |
13118 | else if (!addr_space_superset (as1: as0, as2: as1, common: &as_common)) |
13119 | { |
13120 | error_at (location, "comparison of pointers to " |
13121 | "disjoint address spaces" ); |
13122 | return error_mark_node; |
13123 | } |
13124 | else |
13125 | { |
13126 | int qual = ENCODE_QUAL_ADDR_SPACE (as_common); |
13127 | result_type = build_pointer_type |
13128 | (build_qualified_type (void_type_node, qual)); |
13129 | pedwarn (location, OPT_Wcompare_distinct_pointer_types, |
13130 | "comparison of distinct pointer types lacks a cast" ); |
13131 | } |
13132 | } |
13133 | else if (code0 == POINTER_TYPE && null_pointer_constant_p (expr: orig_op1)) |
13134 | { |
13135 | result_type = type0; |
13136 | if (pedantic) |
13137 | pedwarn (location, OPT_Wpedantic, |
13138 | "ordered comparison of pointer with integer zero" ); |
13139 | else if (extra_warnings) |
13140 | warning_at (location, OPT_Wextra, |
13141 | "ordered comparison of pointer with integer zero" ); |
13142 | } |
13143 | else if (code1 == POINTER_TYPE && null_pointer_constant_p (expr: orig_op0)) |
13144 | { |
13145 | result_type = type1; |
13146 | if (pedantic) |
13147 | pedwarn (location, OPT_Wpedantic, |
13148 | "ordered comparison of pointer with integer zero" ); |
13149 | else if (extra_warnings) |
13150 | warning_at (location, OPT_Wextra, |
13151 | "ordered comparison of pointer with integer zero" ); |
13152 | } |
13153 | else if (code0 == POINTER_TYPE |
13154 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
13155 | { |
13156 | result_type = type0; |
13157 | pedwarn (location, 0, "comparison between pointer and integer" ); |
13158 | } |
13159 | else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE) |
13160 | && code1 == POINTER_TYPE) |
13161 | { |
13162 | result_type = type1; |
13163 | pedwarn (location, 0, "comparison between pointer and integer" ); |
13164 | } |
13165 | |
13166 | if ((code0 == POINTER_TYPE || code1 == POINTER_TYPE) |
13167 | && current_function_decl != NULL_TREE |
13168 | && sanitize_flags_p (flag: SANITIZE_POINTER_COMPARE)) |
13169 | { |
13170 | op0 = save_expr (op0); |
13171 | op1 = save_expr (op1); |
13172 | |
13173 | tree tt = builtin_decl_explicit (fncode: BUILT_IN_ASAN_POINTER_COMPARE); |
13174 | instrument_expr = build_call_expr_loc (location, tt, 2, op0, op1); |
13175 | } |
13176 | |
13177 | if ((C_BOOLEAN_TYPE_P (TREE_TYPE (orig_op0)) |
13178 | || truth_value_p (TREE_CODE (orig_op0))) |
13179 | ^ (C_BOOLEAN_TYPE_P (TREE_TYPE (orig_op1)) |
13180 | || truth_value_p (TREE_CODE (orig_op1)))) |
13181 | maybe_warn_bool_compare (location, code, orig_op0, orig_op1); |
13182 | break; |
13183 | |
13184 | case MIN_EXPR: |
13185 | case MAX_EXPR: |
13186 | /* Used for OpenMP atomics. */ |
13187 | gcc_assert (flag_openmp); |
13188 | common = 1; |
13189 | break; |
13190 | |
13191 | default: |
13192 | gcc_unreachable (); |
13193 | } |
13194 | |
13195 | if (code0 == ERROR_MARK || code1 == ERROR_MARK) |
13196 | return error_mark_node; |
13197 | |
13198 | if (gnu_vector_type_p (type: type0) |
13199 | && gnu_vector_type_p (type: type1) |
13200 | && (!tree_int_cst_equal (TYPE_SIZE (type0), TYPE_SIZE (type1)) |
13201 | || !vector_types_compatible_elements_p (type0, type1))) |
13202 | { |
13203 | gcc_rich_location richloc (location); |
13204 | maybe_range_label_for_tree_type_mismatch |
13205 | label_for_op0 (orig_op0, orig_op1), |
13206 | label_for_op1 (orig_op1, orig_op0); |
13207 | richloc.maybe_add_expr (t: orig_op0, label: &label_for_op0); |
13208 | richloc.maybe_add_expr (t: orig_op1, label: &label_for_op1); |
13209 | binary_op_error (&richloc, code, type0, type1); |
13210 | return error_mark_node; |
13211 | } |
13212 | |
13213 | if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE || code0 == COMPLEX_TYPE |
13214 | || code0 == FIXED_POINT_TYPE || code0 == BITINT_TYPE |
13215 | || gnu_vector_type_p (type: type0)) |
13216 | && (code1 == INTEGER_TYPE || code1 == REAL_TYPE || code1 == COMPLEX_TYPE |
13217 | || code1 == FIXED_POINT_TYPE || code1 == BITINT_TYPE |
13218 | || gnu_vector_type_p (type: type1))) |
13219 | { |
13220 | bool first_complex = (code0 == COMPLEX_TYPE); |
13221 | bool second_complex = (code1 == COMPLEX_TYPE); |
13222 | int none_complex = (!first_complex && !second_complex); |
13223 | |
13224 | if (shorten || common || short_compare) |
13225 | { |
13226 | result_type = c_common_type (t1: type0, t2: type1); |
13227 | do_warn_double_promotion (result_type, type0, type1, |
13228 | "implicit conversion from %qT to %qT " |
13229 | "to match other operand of binary " |
13230 | "expression" , |
13231 | location); |
13232 | if (result_type == error_mark_node) |
13233 | return error_mark_node; |
13234 | } |
13235 | |
13236 | if (first_complex != second_complex |
13237 | && (code == PLUS_EXPR |
13238 | || code == MINUS_EXPR |
13239 | || code == MULT_EXPR |
13240 | || (code == TRUNC_DIV_EXPR && first_complex)) |
13241 | && TREE_CODE (TREE_TYPE (result_type)) == REAL_TYPE |
13242 | && flag_signed_zeros) |
13243 | { |
13244 | /* An operation on mixed real/complex operands must be |
13245 | handled specially, but the language-independent code can |
13246 | more easily optimize the plain complex arithmetic if |
13247 | -fno-signed-zeros. */ |
13248 | tree real_type = TREE_TYPE (result_type); |
13249 | tree real, imag; |
13250 | if (type0 != orig_type0 || type1 != orig_type1) |
13251 | { |
13252 | gcc_assert (may_need_excess_precision && common); |
13253 | semantic_result_type = c_common_type (t1: orig_type0, t2: orig_type1); |
13254 | } |
13255 | if (first_complex) |
13256 | { |
13257 | if (TREE_TYPE (op0) != result_type) |
13258 | op0 = convert_and_check (location, result_type, op0); |
13259 | if (TREE_TYPE (op1) != real_type) |
13260 | op1 = convert_and_check (location, real_type, op1); |
13261 | } |
13262 | else |
13263 | { |
13264 | if (TREE_TYPE (op0) != real_type) |
13265 | op0 = convert_and_check (location, real_type, op0); |
13266 | if (TREE_TYPE (op1) != result_type) |
13267 | op1 = convert_and_check (location, result_type, op1); |
13268 | } |
13269 | if (TREE_CODE (op0) == ERROR_MARK || TREE_CODE (op1) == ERROR_MARK) |
13270 | return error_mark_node; |
13271 | if (first_complex) |
13272 | { |
13273 | op0 = save_expr (op0); |
13274 | real = build_unary_op (EXPR_LOCATION (orig_op0), code: REALPART_EXPR, |
13275 | xarg: op0, noconvert: true); |
13276 | imag = build_unary_op (EXPR_LOCATION (orig_op0), code: IMAGPART_EXPR, |
13277 | xarg: op0, noconvert: true); |
13278 | switch (code) |
13279 | { |
13280 | case MULT_EXPR: |
13281 | case TRUNC_DIV_EXPR: |
13282 | op1 = save_expr (op1); |
13283 | imag = build2 (resultcode, real_type, imag, op1); |
13284 | /* Fall through. */ |
13285 | case PLUS_EXPR: |
13286 | case MINUS_EXPR: |
13287 | real = build2 (resultcode, real_type, real, op1); |
13288 | break; |
13289 | default: |
13290 | gcc_unreachable(); |
13291 | } |
13292 | } |
13293 | else |
13294 | { |
13295 | op1 = save_expr (op1); |
13296 | real = build_unary_op (EXPR_LOCATION (orig_op1), code: REALPART_EXPR, |
13297 | xarg: op1, noconvert: true); |
13298 | imag = build_unary_op (EXPR_LOCATION (orig_op1), code: IMAGPART_EXPR, |
13299 | xarg: op1, noconvert: true); |
13300 | switch (code) |
13301 | { |
13302 | case MULT_EXPR: |
13303 | op0 = save_expr (op0); |
13304 | imag = build2 (resultcode, real_type, op0, imag); |
13305 | /* Fall through. */ |
13306 | case PLUS_EXPR: |
13307 | real = build2 (resultcode, real_type, op0, real); |
13308 | break; |
13309 | case MINUS_EXPR: |
13310 | real = build2 (resultcode, real_type, op0, real); |
13311 | imag = build1 (NEGATE_EXPR, real_type, imag); |
13312 | break; |
13313 | default: |
13314 | gcc_unreachable(); |
13315 | } |
13316 | } |
13317 | ret = build2 (COMPLEX_EXPR, result_type, real, imag); |
13318 | goto return_build_binary_op; |
13319 | } |
13320 | |
13321 | /* For certain operations (which identify themselves by shorten != 0) |
13322 | if both args were extended from the same smaller type, |
13323 | do the arithmetic in that type and then extend. |
13324 | |
13325 | shorten !=0 and !=1 indicates a bitwise operation. |
13326 | For them, this optimization is safe only if |
13327 | both args are zero-extended or both are sign-extended. |
13328 | Otherwise, we might change the result. |
13329 | Eg, (short)-1 | (unsigned short)-1 is (int)-1 |
13330 | but calculated in (unsigned short) it would be (unsigned short)-1. */ |
13331 | |
13332 | if (shorten && none_complex) |
13333 | { |
13334 | final_type = result_type; |
13335 | result_type = shorten_binary_op (result_type, op0, op1, |
13336 | bitwise: shorten == -1); |
13337 | } |
13338 | |
13339 | /* Shifts can be shortened if shifting right. */ |
13340 | |
13341 | if (short_shift) |
13342 | { |
13343 | int unsigned_arg; |
13344 | tree arg0 = get_narrower (op0, &unsigned_arg); |
13345 | |
13346 | final_type = result_type; |
13347 | |
13348 | if (arg0 == op0 && final_type == TREE_TYPE (op0)) |
13349 | unsigned_arg = TYPE_UNSIGNED (TREE_TYPE (op0)); |
13350 | |
13351 | if (TYPE_PRECISION (TREE_TYPE (arg0)) < TYPE_PRECISION (result_type) |
13352 | && tree_int_cst_sgn (op1) > 0 |
13353 | /* We can shorten only if the shift count is less than the |
13354 | number of bits in the smaller type size. */ |
13355 | && compare_tree_int (op1, TYPE_PRECISION (TREE_TYPE (arg0))) < 0 |
13356 | /* We cannot drop an unsigned shift after sign-extension. */ |
13357 | && (!TYPE_UNSIGNED (final_type) || unsigned_arg)) |
13358 | { |
13359 | /* Do an unsigned shift if the operand was zero-extended. */ |
13360 | result_type |
13361 | = c_common_signed_or_unsigned_type (unsigned_arg, |
13362 | TREE_TYPE (arg0)); |
13363 | /* Convert value-to-be-shifted to that type. */ |
13364 | if (TREE_TYPE (op0) != result_type) |
13365 | op0 = convert (result_type, op0); |
13366 | converted = 1; |
13367 | } |
13368 | } |
13369 | |
13370 | /* Comparison operations are shortened too but differently. |
13371 | They identify themselves by setting short_compare = 1. */ |
13372 | |
13373 | if (short_compare) |
13374 | { |
13375 | /* Don't write &op0, etc., because that would prevent op0 |
13376 | from being kept in a register. |
13377 | Instead, make copies of the our local variables and |
13378 | pass the copies by reference, then copy them back afterward. */ |
13379 | tree xop0 = op0, xop1 = op1, xresult_type = result_type; |
13380 | enum tree_code xresultcode = resultcode; |
13381 | tree val |
13382 | = shorten_compare (location, &xop0, &xop1, &xresult_type, |
13383 | &xresultcode); |
13384 | |
13385 | if (val != NULL_TREE) |
13386 | { |
13387 | ret = val; |
13388 | goto return_build_binary_op; |
13389 | } |
13390 | |
13391 | op0 = xop0, op1 = xop1; |
13392 | converted = 1; |
13393 | resultcode = xresultcode; |
13394 | |
13395 | if (c_inhibit_evaluation_warnings == 0 && !c_in_omp_for) |
13396 | { |
13397 | bool op0_maybe_const = true; |
13398 | bool op1_maybe_const = true; |
13399 | tree orig_op0_folded, orig_op1_folded; |
13400 | |
13401 | if (in_late_binary_op) |
13402 | { |
13403 | orig_op0_folded = orig_op0; |
13404 | orig_op1_folded = orig_op1; |
13405 | } |
13406 | else |
13407 | { |
13408 | /* Fold for the sake of possible warnings, as in |
13409 | build_conditional_expr. This requires the |
13410 | "original" values to be folded, not just op0 and |
13411 | op1. */ |
13412 | c_inhibit_evaluation_warnings++; |
13413 | op0 = c_fully_fold (op0, require_constant_value, |
13414 | &op0_maybe_const); |
13415 | op1 = c_fully_fold (op1, require_constant_value, |
13416 | &op1_maybe_const); |
13417 | c_inhibit_evaluation_warnings--; |
13418 | orig_op0_folded = c_fully_fold (orig_op0, |
13419 | require_constant_value, |
13420 | NULL); |
13421 | orig_op1_folded = c_fully_fold (orig_op1, |
13422 | require_constant_value, |
13423 | NULL); |
13424 | } |
13425 | |
13426 | if (warn_sign_compare) |
13427 | warn_for_sign_compare (location, orig_op0: orig_op0_folded, |
13428 | orig_op1: orig_op1_folded, op0, op1, |
13429 | result_type, resultcode); |
13430 | if (!in_late_binary_op && !int_operands) |
13431 | { |
13432 | if (!op0_maybe_const || TREE_CODE (op0) != INTEGER_CST) |
13433 | op0 = c_wrap_maybe_const (op0, !op0_maybe_const); |
13434 | if (!op1_maybe_const || TREE_CODE (op1) != INTEGER_CST) |
13435 | op1 = c_wrap_maybe_const (op1, !op1_maybe_const); |
13436 | } |
13437 | } |
13438 | } |
13439 | } |
13440 | |
13441 | /* At this point, RESULT_TYPE must be nonzero to avoid an error message. |
13442 | If CONVERTED is zero, both args will be converted to type RESULT_TYPE. |
13443 | Then the expression will be built. |
13444 | It will be given type FINAL_TYPE if that is nonzero; |
13445 | otherwise, it will be given type RESULT_TYPE. */ |
13446 | |
13447 | if (!result_type) |
13448 | { |
13449 | /* Favor showing any expression locations that are available. */ |
13450 | op_location_t oploc (location, UNKNOWN_LOCATION); |
13451 | binary_op_rich_location richloc (oploc, orig_op0, orig_op1, true); |
13452 | binary_op_error (&richloc, code, TREE_TYPE (op0), TREE_TYPE (op1)); |
13453 | return error_mark_node; |
13454 | } |
13455 | |
13456 | if (build_type == NULL_TREE) |
13457 | { |
13458 | build_type = result_type; |
13459 | if ((type0 != orig_type0 || type1 != orig_type1) |
13460 | && !boolean_op) |
13461 | { |
13462 | gcc_assert (may_need_excess_precision && common); |
13463 | semantic_result_type = c_common_type (t1: orig_type0, t2: orig_type1); |
13464 | } |
13465 | } |
13466 | |
13467 | if (!converted) |
13468 | { |
13469 | op0 = ep_convert_and_check (loc: location, type: result_type, expr: op0, |
13470 | semantic_type: semantic_result_type); |
13471 | op1 = ep_convert_and_check (loc: location, type: result_type, expr: op1, |
13472 | semantic_type: semantic_result_type); |
13473 | |
13474 | /* This can happen if one operand has a vector type, and the other |
13475 | has a different type. */ |
13476 | if (TREE_CODE (op0) == ERROR_MARK || TREE_CODE (op1) == ERROR_MARK) |
13477 | return error_mark_node; |
13478 | } |
13479 | |
13480 | if (sanitize_flags_p (flag: (SANITIZE_SHIFT |
13481 | | SANITIZE_DIVIDE |
13482 | | SANITIZE_FLOAT_DIVIDE |
13483 | | SANITIZE_SI_OVERFLOW)) |
13484 | && current_function_decl != NULL_TREE |
13485 | && (doing_div_or_mod || doing_shift) |
13486 | && !require_constant_value) |
13487 | { |
13488 | /* OP0 and/or OP1 might have side-effects. */ |
13489 | op0 = save_expr (op0); |
13490 | op1 = save_expr (op1); |
13491 | op0 = c_fully_fold (op0, false, NULL); |
13492 | op1 = c_fully_fold (op1, false, NULL); |
13493 | if (doing_div_or_mod && (sanitize_flags_p (flag: (SANITIZE_DIVIDE |
13494 | | SANITIZE_FLOAT_DIVIDE |
13495 | | SANITIZE_SI_OVERFLOW)))) |
13496 | instrument_expr = ubsan_instrument_division (location, op0, op1); |
13497 | else if (doing_shift && sanitize_flags_p (flag: SANITIZE_SHIFT)) |
13498 | instrument_expr = ubsan_instrument_shift (location, code, op0, op1); |
13499 | } |
13500 | |
13501 | /* Treat expressions in initializers specially as they can't trap. */ |
13502 | if (int_const_or_overflow) |
13503 | ret = (require_constant_value |
13504 | ? fold_build2_initializer_loc (location, resultcode, build_type, |
13505 | op0, op1) |
13506 | : fold_build2_loc (location, resultcode, build_type, op0, op1)); |
13507 | else |
13508 | ret = build2 (resultcode, build_type, op0, op1); |
13509 | if (final_type != NULL_TREE) |
13510 | ret = convert (final_type, ret); |
13511 | |
13512 | return_build_binary_op: |
13513 | gcc_assert (ret != error_mark_node); |
13514 | if (TREE_CODE (ret) == INTEGER_CST && !TREE_OVERFLOW (ret) && !int_const) |
13515 | ret = (int_operands |
13516 | ? note_integer_operands (expr: ret) |
13517 | : build1 (NOP_EXPR, TREE_TYPE (ret), ret)); |
13518 | else if (TREE_CODE (ret) != INTEGER_CST && int_operands |
13519 | && !in_late_binary_op) |
13520 | ret = note_integer_operands (expr: ret); |
13521 | protected_set_expr_location (ret, location); |
13522 | |
13523 | if (instrument_expr != NULL) |
13524 | ret = fold_build2 (COMPOUND_EXPR, TREE_TYPE (ret), |
13525 | instrument_expr, ret); |
13526 | |
13527 | if (semantic_result_type) |
13528 | ret = build1_loc (loc: location, code: EXCESS_PRECISION_EXPR, |
13529 | type: semantic_result_type, arg1: ret); |
13530 | |
13531 | return ret; |
13532 | } |
13533 | |
13534 | |
13535 | /* Convert EXPR to be a truth-value (type TYPE), validating its type for this |
13536 | purpose. LOCATION is the source location for the expression. */ |
13537 | |
13538 | tree |
13539 | c_objc_common_truthvalue_conversion (location_t location, tree expr, tree type) |
13540 | { |
13541 | bool int_const, int_operands; |
13542 | |
13543 | switch (TREE_CODE (TREE_TYPE (expr))) |
13544 | { |
13545 | case ARRAY_TYPE: |
13546 | error_at (location, "used array that cannot be converted to pointer where scalar is required" ); |
13547 | return error_mark_node; |
13548 | |
13549 | case RECORD_TYPE: |
13550 | error_at (location, "used struct type value where scalar is required" ); |
13551 | return error_mark_node; |
13552 | |
13553 | case UNION_TYPE: |
13554 | error_at (location, "used union type value where scalar is required" ); |
13555 | return error_mark_node; |
13556 | |
13557 | case VOID_TYPE: |
13558 | error_at (location, "void value not ignored as it ought to be" ); |
13559 | return error_mark_node; |
13560 | |
13561 | case POINTER_TYPE: |
13562 | if (reject_gcc_builtin (expr)) |
13563 | return error_mark_node; |
13564 | break; |
13565 | |
13566 | case FUNCTION_TYPE: |
13567 | gcc_unreachable (); |
13568 | |
13569 | case VECTOR_TYPE: |
13570 | error_at (location, "used vector type where scalar is required" ); |
13571 | return error_mark_node; |
13572 | |
13573 | default: |
13574 | break; |
13575 | } |
13576 | |
13577 | /* Conversion of a floating constant to boolean goes through here |
13578 | and yields an integer constant expression. Otherwise, the result |
13579 | is only an integer constant expression if the argument is. */ |
13580 | int_const = ((TREE_CODE (expr) == INTEGER_CST && !TREE_OVERFLOW (expr)) |
13581 | || ((TREE_CODE (expr) == REAL_CST |
13582 | || TREE_CODE (expr) == COMPLEX_CST) |
13583 | && (TREE_CODE (type) == BOOLEAN_TYPE |
13584 | || (TREE_CODE (type) == ENUMERAL_TYPE |
13585 | && ENUM_UNDERLYING_TYPE (type) != NULL_TREE |
13586 | && (TREE_CODE (ENUM_UNDERLYING_TYPE (type)) |
13587 | == BOOLEAN_TYPE))))); |
13588 | int_operands = EXPR_INT_CONST_OPERANDS (expr); |
13589 | if (int_operands && TREE_CODE (expr) != INTEGER_CST) |
13590 | { |
13591 | expr = remove_c_maybe_const_expr (expr); |
13592 | expr = build2 (NE_EXPR, type, expr, |
13593 | convert (TREE_TYPE (expr), integer_zero_node)); |
13594 | expr = note_integer_operands (expr); |
13595 | } |
13596 | else |
13597 | { |
13598 | /* ??? Should we also give an error for vectors rather than leaving |
13599 | those to give errors later? */ |
13600 | expr = c_common_truthvalue_conversion (location, expr); |
13601 | expr = fold_convert_loc (location, type, expr); |
13602 | } |
13603 | |
13604 | if (TREE_CODE (expr) == INTEGER_CST && int_operands && !int_const) |
13605 | { |
13606 | if (TREE_OVERFLOW (expr)) |
13607 | return expr; |
13608 | else |
13609 | return note_integer_operands (expr); |
13610 | } |
13611 | if (TREE_CODE (expr) == INTEGER_CST && !int_const) |
13612 | return build1 (NOP_EXPR, TREE_TYPE (expr), expr); |
13613 | return expr; |
13614 | } |
13615 | |
13616 | |
13617 | /* Convert EXPR to a contained DECL, updating *TC, *TI and *SE as |
13618 | required. */ |
13619 | |
13620 | tree |
13621 | c_expr_to_decl (tree expr, bool *tc ATTRIBUTE_UNUSED, bool *se) |
13622 | { |
13623 | if (TREE_CODE (expr) == COMPOUND_LITERAL_EXPR) |
13624 | { |
13625 | tree decl = COMPOUND_LITERAL_EXPR_DECL (expr); |
13626 | /* Executing a compound literal inside a function reinitializes |
13627 | it. */ |
13628 | if (!TREE_STATIC (decl)) |
13629 | *se = true; |
13630 | return decl; |
13631 | } |
13632 | else |
13633 | return expr; |
13634 | } |
13635 | |
13636 | /* Generate OMP construct CODE, with BODY and CLAUSES as its compound |
13637 | statement. LOC is the location of the construct. */ |
13638 | |
13639 | tree |
13640 | c_finish_omp_construct (location_t loc, enum tree_code code, tree body, |
13641 | tree clauses) |
13642 | { |
13643 | body = c_end_compound_stmt (loc, stmt: body, do_scope: true); |
13644 | |
13645 | tree stmt = make_node (code); |
13646 | TREE_TYPE (stmt) = void_type_node; |
13647 | OMP_BODY (stmt) = body; |
13648 | OMP_CLAUSES (stmt) = clauses; |
13649 | SET_EXPR_LOCATION (stmt, loc); |
13650 | |
13651 | return add_stmt (stmt); |
13652 | } |
13653 | |
13654 | /* Generate OACC_DATA, with CLAUSES and BLOCK as its compound |
13655 | statement. LOC is the location of the OACC_DATA. */ |
13656 | |
13657 | tree |
13658 | c_finish_oacc_data (location_t loc, tree clauses, tree block) |
13659 | { |
13660 | tree stmt; |
13661 | |
13662 | block = c_end_compound_stmt (loc, stmt: block, do_scope: true); |
13663 | |
13664 | stmt = make_node (OACC_DATA); |
13665 | TREE_TYPE (stmt) = void_type_node; |
13666 | OACC_DATA_CLAUSES (stmt) = clauses; |
13667 | OACC_DATA_BODY (stmt) = block; |
13668 | SET_EXPR_LOCATION (stmt, loc); |
13669 | |
13670 | return add_stmt (stmt); |
13671 | } |
13672 | |
13673 | /* Generate OACC_HOST_DATA, with CLAUSES and BLOCK as its compound |
13674 | statement. LOC is the location of the OACC_HOST_DATA. */ |
13675 | |
13676 | tree |
13677 | c_finish_oacc_host_data (location_t loc, tree clauses, tree block) |
13678 | { |
13679 | tree stmt; |
13680 | |
13681 | block = c_end_compound_stmt (loc, stmt: block, do_scope: true); |
13682 | |
13683 | stmt = make_node (OACC_HOST_DATA); |
13684 | TREE_TYPE (stmt) = void_type_node; |
13685 | OACC_HOST_DATA_CLAUSES (stmt) = clauses; |
13686 | OACC_HOST_DATA_BODY (stmt) = block; |
13687 | SET_EXPR_LOCATION (stmt, loc); |
13688 | |
13689 | return add_stmt (stmt); |
13690 | } |
13691 | |
13692 | /* Like c_begin_compound_stmt, except force the retention of the BLOCK. */ |
13693 | |
13694 | tree |
13695 | c_begin_omp_parallel (void) |
13696 | { |
13697 | tree block; |
13698 | |
13699 | keep_next_level (); |
13700 | block = c_begin_compound_stmt (do_scope: true); |
13701 | |
13702 | return block; |
13703 | } |
13704 | |
13705 | /* Generate OMP_PARALLEL, with CLAUSES and BLOCK as its compound |
13706 | statement. LOC is the location of the OMP_PARALLEL. */ |
13707 | |
13708 | tree |
13709 | c_finish_omp_parallel (location_t loc, tree clauses, tree block) |
13710 | { |
13711 | tree stmt; |
13712 | |
13713 | block = c_end_compound_stmt (loc, stmt: block, do_scope: true); |
13714 | |
13715 | stmt = make_node (OMP_PARALLEL); |
13716 | TREE_TYPE (stmt) = void_type_node; |
13717 | OMP_PARALLEL_CLAUSES (stmt) = clauses; |
13718 | OMP_PARALLEL_BODY (stmt) = block; |
13719 | SET_EXPR_LOCATION (stmt, loc); |
13720 | |
13721 | return add_stmt (stmt); |
13722 | } |
13723 | |
13724 | /* Like c_begin_compound_stmt, except force the retention of the BLOCK. */ |
13725 | |
13726 | tree |
13727 | c_begin_omp_task (void) |
13728 | { |
13729 | tree block; |
13730 | |
13731 | keep_next_level (); |
13732 | block = c_begin_compound_stmt (do_scope: true); |
13733 | |
13734 | return block; |
13735 | } |
13736 | |
13737 | /* Generate OMP_TASK, with CLAUSES and BLOCK as its compound |
13738 | statement. LOC is the location of the #pragma. */ |
13739 | |
13740 | tree |
13741 | c_finish_omp_task (location_t loc, tree clauses, tree block) |
13742 | { |
13743 | tree stmt; |
13744 | |
13745 | block = c_end_compound_stmt (loc, stmt: block, do_scope: true); |
13746 | |
13747 | stmt = make_node (OMP_TASK); |
13748 | TREE_TYPE (stmt) = void_type_node; |
13749 | OMP_TASK_CLAUSES (stmt) = clauses; |
13750 | OMP_TASK_BODY (stmt) = block; |
13751 | SET_EXPR_LOCATION (stmt, loc); |
13752 | |
13753 | return add_stmt (stmt); |
13754 | } |
13755 | |
13756 | /* Generate GOMP_cancel call for #pragma omp cancel. */ |
13757 | |
13758 | void |
13759 | c_finish_omp_cancel (location_t loc, tree clauses) |
13760 | { |
13761 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CANCEL); |
13762 | int mask = 0; |
13763 | if (omp_find_clause (clauses, kind: OMP_CLAUSE_PARALLEL)) |
13764 | mask = 1; |
13765 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_FOR)) |
13766 | mask = 2; |
13767 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_SECTIONS)) |
13768 | mask = 4; |
13769 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_TASKGROUP)) |
13770 | mask = 8; |
13771 | else |
13772 | { |
13773 | error_at (loc, "%<#pragma omp cancel%> must specify one of " |
13774 | "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> " |
13775 | "clauses" ); |
13776 | return; |
13777 | } |
13778 | tree ifc = omp_find_clause (clauses, kind: OMP_CLAUSE_IF); |
13779 | if (ifc != NULL_TREE) |
13780 | { |
13781 | if (OMP_CLAUSE_IF_MODIFIER (ifc) != ERROR_MARK |
13782 | && OMP_CLAUSE_IF_MODIFIER (ifc) != VOID_CST) |
13783 | error_at (OMP_CLAUSE_LOCATION (ifc), |
13784 | "expected %<cancel%> %<if%> clause modifier" ); |
13785 | else |
13786 | { |
13787 | tree ifc2 = omp_find_clause (OMP_CLAUSE_CHAIN (ifc), kind: OMP_CLAUSE_IF); |
13788 | if (ifc2 != NULL_TREE) |
13789 | { |
13790 | gcc_assert (OMP_CLAUSE_IF_MODIFIER (ifc) == VOID_CST |
13791 | && OMP_CLAUSE_IF_MODIFIER (ifc2) != ERROR_MARK |
13792 | && OMP_CLAUSE_IF_MODIFIER (ifc2) != VOID_CST); |
13793 | error_at (OMP_CLAUSE_LOCATION (ifc2), |
13794 | "expected %<cancel%> %<if%> clause modifier" ); |
13795 | } |
13796 | } |
13797 | |
13798 | tree type = TREE_TYPE (OMP_CLAUSE_IF_EXPR (ifc)); |
13799 | ifc = fold_build2_loc (OMP_CLAUSE_LOCATION (ifc), NE_EXPR, |
13800 | boolean_type_node, OMP_CLAUSE_IF_EXPR (ifc), |
13801 | build_zero_cst (type)); |
13802 | } |
13803 | else |
13804 | ifc = boolean_true_node; |
13805 | tree stmt = build_call_expr_loc (loc, fn, 2, |
13806 | build_int_cst (integer_type_node, mask), |
13807 | ifc); |
13808 | add_stmt (stmt); |
13809 | } |
13810 | |
13811 | /* Generate GOMP_cancellation_point call for |
13812 | #pragma omp cancellation point. */ |
13813 | |
13814 | void |
13815 | c_finish_omp_cancellation_point (location_t loc, tree clauses) |
13816 | { |
13817 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CANCELLATION_POINT); |
13818 | int mask = 0; |
13819 | if (omp_find_clause (clauses, kind: OMP_CLAUSE_PARALLEL)) |
13820 | mask = 1; |
13821 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_FOR)) |
13822 | mask = 2; |
13823 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_SECTIONS)) |
13824 | mask = 4; |
13825 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_TASKGROUP)) |
13826 | mask = 8; |
13827 | else |
13828 | { |
13829 | error_at (loc, "%<#pragma omp cancellation point%> must specify one of " |
13830 | "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> " |
13831 | "clauses" ); |
13832 | return; |
13833 | } |
13834 | tree stmt = build_call_expr_loc (loc, fn, 1, |
13835 | build_int_cst (integer_type_node, mask)); |
13836 | add_stmt (stmt); |
13837 | } |
13838 | |
13839 | /* Helper function for handle_omp_array_sections. Called recursively |
13840 | to handle multiple array-section-subscripts. C is the clause, |
13841 | T current expression (initially OMP_CLAUSE_DECL), which is either |
13842 | a TREE_LIST for array-section-subscript (TREE_PURPOSE is low-bound |
13843 | expression if specified, TREE_VALUE length expression if specified, |
13844 | TREE_CHAIN is what it has been specified after, or some decl. |
13845 | TYPES vector is populated with array section types, MAYBE_ZERO_LEN |
13846 | set to true if any of the array-section-subscript could have length |
13847 | of zero (explicit or implicit), FIRST_NON_ONE is the index of the |
13848 | first array-section-subscript which is known not to have length |
13849 | of one. Given say: |
13850 | map(a[:b][2:1][:c][:2][:d][e:f][2:5]) |
13851 | FIRST_NON_ONE will be 3, array-section-subscript [:b], [2:1] and [:c] |
13852 | all are or may have length of 1, array-section-subscript [:2] is the |
13853 | first one known not to have length 1. For array-section-subscript |
13854 | <= FIRST_NON_ONE we diagnose non-contiguous arrays if low bound isn't |
13855 | 0 or length isn't the array domain max + 1, for > FIRST_NON_ONE we |
13856 | can if MAYBE_ZERO_LEN is false. MAYBE_ZERO_LEN will be true in the above |
13857 | case though, as some lengths could be zero. */ |
13858 | |
13859 | static tree |
13860 | handle_omp_array_sections_1 (tree c, tree t, vec<tree> &types, |
13861 | bool &maybe_zero_len, unsigned int &first_non_one, |
13862 | enum c_omp_region_type ort) |
13863 | { |
13864 | tree ret, low_bound, length, type; |
13865 | bool openacc = (ort & C_ORT_ACC) != 0; |
13866 | if (TREE_CODE (t) != OMP_ARRAY_SECTION) |
13867 | { |
13868 | if (error_operand_p (t)) |
13869 | return error_mark_node; |
13870 | c_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
13871 | ret = t; |
13872 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
13873 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
13874 | && TYPE_ATOMIC (strip_array_types (TREE_TYPE (t)))) |
13875 | { |
13876 | error_at (OMP_CLAUSE_LOCATION (c), "%<_Atomic%> %qE in %qs clause" , |
13877 | t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13878 | return error_mark_node; |
13879 | } |
13880 | if (!ai.check_clause (c)) |
13881 | return error_mark_node; |
13882 | else if (ai.component_access_p () |
13883 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
13884 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO |
13885 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM)) |
13886 | t = ai.get_root_term (true); |
13887 | else |
13888 | t = ai.unconverted_ref_origin (); |
13889 | if (t == error_mark_node) |
13890 | return error_mark_node; |
13891 | if (!VAR_P (t) |
13892 | && (ort == C_ORT_ACC || !EXPR_P (t)) |
13893 | && TREE_CODE (t) != PARM_DECL) |
13894 | { |
13895 | if (DECL_P (t)) |
13896 | error_at (OMP_CLAUSE_LOCATION (c), |
13897 | "%qD is not a variable in %qs clause" , t, |
13898 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13899 | else |
13900 | error_at (OMP_CLAUSE_LOCATION (c), |
13901 | "%qE is not a variable in %qs clause" , t, |
13902 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13903 | return error_mark_node; |
13904 | } |
13905 | else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
13906 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
13907 | && TYPE_ATOMIC (TREE_TYPE (t))) |
13908 | { |
13909 | error_at (OMP_CLAUSE_LOCATION (c), "%<_Atomic%> %qD in %qs clause" , |
13910 | t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13911 | return error_mark_node; |
13912 | } |
13913 | else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
13914 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
13915 | && VAR_P (t) |
13916 | && DECL_THREAD_LOCAL_P (t)) |
13917 | { |
13918 | error_at (OMP_CLAUSE_LOCATION (c), |
13919 | "%qD is threadprivate variable in %qs clause" , t, |
13920 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13921 | return error_mark_node; |
13922 | } |
13923 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY |
13924 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND) |
13925 | && TYPE_ATOMIC (TREE_TYPE (t)) |
13926 | && POINTER_TYPE_P (TREE_TYPE (t))) |
13927 | { |
13928 | /* If the array section is pointer based and the pointer |
13929 | itself is _Atomic qualified, we need to atomically load |
13930 | the pointer. */ |
13931 | c_expr expr; |
13932 | memset (s: &expr, c: 0, n: sizeof (expr)); |
13933 | expr.value = ret; |
13934 | expr = convert_lvalue_to_rvalue (OMP_CLAUSE_LOCATION (c), |
13935 | exp: expr, convert_p: false, read_p: false); |
13936 | ret = expr.value; |
13937 | } |
13938 | return ret; |
13939 | } |
13940 | |
13941 | ret = handle_omp_array_sections_1 (c, TREE_OPERAND (t, 0), types, |
13942 | maybe_zero_len, first_non_one, ort); |
13943 | if (ret == error_mark_node || ret == NULL_TREE) |
13944 | return ret; |
13945 | |
13946 | type = TREE_TYPE (ret); |
13947 | low_bound = TREE_OPERAND (t, 1); |
13948 | length = TREE_OPERAND (t, 2); |
13949 | |
13950 | if (low_bound == error_mark_node || length == error_mark_node) |
13951 | return error_mark_node; |
13952 | |
13953 | if (low_bound && !INTEGRAL_TYPE_P (TREE_TYPE (low_bound))) |
13954 | { |
13955 | error_at (OMP_CLAUSE_LOCATION (c), |
13956 | "low bound %qE of array section does not have integral type" , |
13957 | low_bound); |
13958 | return error_mark_node; |
13959 | } |
13960 | if (length && !INTEGRAL_TYPE_P (TREE_TYPE (length))) |
13961 | { |
13962 | error_at (OMP_CLAUSE_LOCATION (c), |
13963 | "length %qE of array section does not have integral type" , |
13964 | length); |
13965 | return error_mark_node; |
13966 | } |
13967 | if (low_bound |
13968 | && TREE_CODE (low_bound) == INTEGER_CST |
13969 | && TYPE_PRECISION (TREE_TYPE (low_bound)) |
13970 | > TYPE_PRECISION (sizetype)) |
13971 | low_bound = fold_convert (sizetype, low_bound); |
13972 | if (length |
13973 | && TREE_CODE (length) == INTEGER_CST |
13974 | && TYPE_PRECISION (TREE_TYPE (length)) |
13975 | > TYPE_PRECISION (sizetype)) |
13976 | length = fold_convert (sizetype, length); |
13977 | if (low_bound == NULL_TREE) |
13978 | low_bound = integer_zero_node; |
13979 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
13980 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
13981 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)) |
13982 | { |
13983 | if (length != integer_one_node) |
13984 | { |
13985 | error_at (OMP_CLAUSE_LOCATION (c), |
13986 | "expected single pointer in %qs clause" , |
13987 | user_omp_clause_code_name (c, openacc)); |
13988 | return error_mark_node; |
13989 | } |
13990 | } |
13991 | if (length != NULL_TREE) |
13992 | { |
13993 | if (!integer_nonzerop (length)) |
13994 | { |
13995 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY |
13996 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
13997 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
13998 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
13999 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
14000 | { |
14001 | if (integer_zerop (length)) |
14002 | { |
14003 | error_at (OMP_CLAUSE_LOCATION (c), |
14004 | "zero length array section in %qs clause" , |
14005 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14006 | return error_mark_node; |
14007 | } |
14008 | } |
14009 | else |
14010 | maybe_zero_len = true; |
14011 | } |
14012 | if (first_non_one == types.length () |
14013 | && (TREE_CODE (length) != INTEGER_CST || integer_onep (length))) |
14014 | first_non_one++; |
14015 | } |
14016 | if (TREE_CODE (type) == ARRAY_TYPE) |
14017 | { |
14018 | if (length == NULL_TREE |
14019 | && (TYPE_DOMAIN (type) == NULL_TREE |
14020 | || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)) |
14021 | { |
14022 | error_at (OMP_CLAUSE_LOCATION (c), |
14023 | "for unknown bound array type length expression must " |
14024 | "be specified" ); |
14025 | return error_mark_node; |
14026 | } |
14027 | if (TREE_CODE (low_bound) == INTEGER_CST |
14028 | && tree_int_cst_sgn (low_bound) == -1) |
14029 | { |
14030 | error_at (OMP_CLAUSE_LOCATION (c), |
14031 | "negative low bound in array section in %qs clause" , |
14032 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14033 | return error_mark_node; |
14034 | } |
14035 | if (length != NULL_TREE |
14036 | && TREE_CODE (length) == INTEGER_CST |
14037 | && tree_int_cst_sgn (length) == -1) |
14038 | { |
14039 | error_at (OMP_CLAUSE_LOCATION (c), |
14040 | "negative length in array section in %qs clause" , |
14041 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14042 | return error_mark_node; |
14043 | } |
14044 | if (TYPE_DOMAIN (type) |
14045 | && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) |
14046 | && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) |
14047 | == INTEGER_CST) |
14048 | { |
14049 | tree size |
14050 | = fold_convert (sizetype, TYPE_MAX_VALUE (TYPE_DOMAIN (type))); |
14051 | size = size_binop (PLUS_EXPR, size, size_one_node); |
14052 | if (TREE_CODE (low_bound) == INTEGER_CST) |
14053 | { |
14054 | if (tree_int_cst_lt (t1: size, t2: low_bound)) |
14055 | { |
14056 | error_at (OMP_CLAUSE_LOCATION (c), |
14057 | "low bound %qE above array section size " |
14058 | "in %qs clause" , low_bound, |
14059 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14060 | return error_mark_node; |
14061 | } |
14062 | if (tree_int_cst_equal (size, low_bound)) |
14063 | { |
14064 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY |
14065 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
14066 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
14067 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
14068 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
14069 | { |
14070 | error_at (OMP_CLAUSE_LOCATION (c), |
14071 | "zero length array section in %qs clause" , |
14072 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14073 | return error_mark_node; |
14074 | } |
14075 | maybe_zero_len = true; |
14076 | } |
14077 | else if (length == NULL_TREE |
14078 | && first_non_one == types.length () |
14079 | && tree_int_cst_equal |
14080 | (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), |
14081 | low_bound)) |
14082 | first_non_one++; |
14083 | } |
14084 | else if (length == NULL_TREE) |
14085 | { |
14086 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
14087 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
14088 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
14089 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION |
14090 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION) |
14091 | maybe_zero_len = true; |
14092 | if (first_non_one == types.length ()) |
14093 | first_non_one++; |
14094 | } |
14095 | if (length && TREE_CODE (length) == INTEGER_CST) |
14096 | { |
14097 | if (tree_int_cst_lt (t1: size, t2: length)) |
14098 | { |
14099 | error_at (OMP_CLAUSE_LOCATION (c), |
14100 | "length %qE above array section size " |
14101 | "in %qs clause" , length, |
14102 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14103 | return error_mark_node; |
14104 | } |
14105 | if (TREE_CODE (low_bound) == INTEGER_CST) |
14106 | { |
14107 | tree lbpluslen |
14108 | = size_binop (PLUS_EXPR, |
14109 | fold_convert (sizetype, low_bound), |
14110 | fold_convert (sizetype, length)); |
14111 | if (TREE_CODE (lbpluslen) == INTEGER_CST |
14112 | && tree_int_cst_lt (t1: size, t2: lbpluslen)) |
14113 | { |
14114 | error_at (OMP_CLAUSE_LOCATION (c), |
14115 | "high bound %qE above array section size " |
14116 | "in %qs clause" , lbpluslen, |
14117 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14118 | return error_mark_node; |
14119 | } |
14120 | } |
14121 | } |
14122 | } |
14123 | else if (length == NULL_TREE) |
14124 | { |
14125 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
14126 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
14127 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
14128 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION |
14129 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION) |
14130 | maybe_zero_len = true; |
14131 | if (first_non_one == types.length ()) |
14132 | first_non_one++; |
14133 | } |
14134 | |
14135 | /* For [lb:] we will need to evaluate lb more than once. */ |
14136 | if (length == NULL_TREE && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) |
14137 | { |
14138 | tree lb = save_expr (low_bound); |
14139 | if (lb != low_bound) |
14140 | { |
14141 | TREE_OPERAND (t, 1) = lb; |
14142 | low_bound = lb; |
14143 | } |
14144 | } |
14145 | } |
14146 | else if (TREE_CODE (type) == POINTER_TYPE) |
14147 | { |
14148 | if (length == NULL_TREE) |
14149 | { |
14150 | if (TREE_CODE (ret) == PARM_DECL && C_ARRAY_PARAMETER (ret)) |
14151 | error_at (OMP_CLAUSE_LOCATION (c), |
14152 | "for array function parameter length expression " |
14153 | "must be specified" ); |
14154 | else |
14155 | error_at (OMP_CLAUSE_LOCATION (c), |
14156 | "for pointer type length expression must be specified" ); |
14157 | return error_mark_node; |
14158 | } |
14159 | if (length != NULL_TREE |
14160 | && TREE_CODE (length) == INTEGER_CST |
14161 | && tree_int_cst_sgn (length) == -1) |
14162 | { |
14163 | error_at (OMP_CLAUSE_LOCATION (c), |
14164 | "negative length in array section in %qs clause" , |
14165 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14166 | return error_mark_node; |
14167 | } |
14168 | /* If there is a pointer type anywhere but in the very first |
14169 | array-section-subscript, the array section could be non-contiguous. */ |
14170 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
14171 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
14172 | && TREE_CODE (TREE_OPERAND (t, 0)) == OMP_ARRAY_SECTION) |
14173 | { |
14174 | /* If any prior dimension has a non-one length, then deem this |
14175 | array section as non-contiguous. */ |
14176 | for (tree d = TREE_OPERAND (t, 0); |
14177 | TREE_CODE (d) == OMP_ARRAY_SECTION; |
14178 | d = TREE_OPERAND (d, 0)) |
14179 | { |
14180 | tree d_length = TREE_OPERAND (d, 2); |
14181 | if (d_length == NULL_TREE || !integer_onep (d_length)) |
14182 | { |
14183 | error_at (OMP_CLAUSE_LOCATION (c), |
14184 | "array section is not contiguous in %qs clause" , |
14185 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14186 | return error_mark_node; |
14187 | } |
14188 | } |
14189 | } |
14190 | } |
14191 | else |
14192 | { |
14193 | error_at (OMP_CLAUSE_LOCATION (c), |
14194 | "%qE does not have pointer or array type" , ret); |
14195 | return error_mark_node; |
14196 | } |
14197 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) |
14198 | types.safe_push (TREE_TYPE (ret)); |
14199 | /* We will need to evaluate lb more than once. */ |
14200 | tree lb = save_expr (low_bound); |
14201 | if (lb != low_bound) |
14202 | { |
14203 | TREE_OPERAND (t, 1) = lb; |
14204 | low_bound = lb; |
14205 | } |
14206 | ret = build_array_ref (OMP_CLAUSE_LOCATION (c), array: ret, index: low_bound); |
14207 | return ret; |
14208 | } |
14209 | |
14210 | /* Handle array sections for clause C. */ |
14211 | |
14212 | static bool |
14213 | handle_omp_array_sections (tree &c, enum c_omp_region_type ort) |
14214 | { |
14215 | bool maybe_zero_len = false; |
14216 | unsigned int first_non_one = 0; |
14217 | auto_vec<tree, 10> types; |
14218 | tree *tp = &OMP_CLAUSE_DECL (c); |
14219 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
14220 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY) |
14221 | && TREE_CODE (*tp) == TREE_LIST |
14222 | && TREE_PURPOSE (*tp) |
14223 | && TREE_CODE (TREE_PURPOSE (*tp)) == TREE_VEC) |
14224 | tp = &TREE_VALUE (*tp); |
14225 | tree first = handle_omp_array_sections_1 (c, t: *tp, types, |
14226 | maybe_zero_len, first_non_one, |
14227 | ort); |
14228 | if (first == error_mark_node) |
14229 | return true; |
14230 | if (first == NULL_TREE) |
14231 | return false; |
14232 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
14233 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY) |
14234 | { |
14235 | tree t = *tp; |
14236 | tree tem = NULL_TREE; |
14237 | /* Need to evaluate side effects in the length expressions |
14238 | if any. */ |
14239 | while (TREE_CODE (t) == TREE_LIST) |
14240 | { |
14241 | if (TREE_VALUE (t) && TREE_SIDE_EFFECTS (TREE_VALUE (t))) |
14242 | { |
14243 | if (tem == NULL_TREE) |
14244 | tem = TREE_VALUE (t); |
14245 | else |
14246 | tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), |
14247 | TREE_VALUE (t), tem); |
14248 | } |
14249 | t = TREE_CHAIN (t); |
14250 | } |
14251 | if (tem) |
14252 | first = build2 (COMPOUND_EXPR, TREE_TYPE (first), tem, first); |
14253 | first = c_fully_fold (first, false, NULL, true); |
14254 | *tp = first; |
14255 | } |
14256 | else |
14257 | { |
14258 | unsigned int num = types.length (), i; |
14259 | tree t, side_effects = NULL_TREE, size = NULL_TREE; |
14260 | tree condition = NULL_TREE; |
14261 | |
14262 | if (int_size_in_bytes (TREE_TYPE (first)) <= 0) |
14263 | maybe_zero_len = true; |
14264 | |
14265 | for (i = num, t = OMP_CLAUSE_DECL (c); i > 0; |
14266 | t = TREE_OPERAND (t, 0)) |
14267 | { |
14268 | tree low_bound = TREE_OPERAND (t, 1); |
14269 | tree length = TREE_OPERAND (t, 2); |
14270 | |
14271 | i--; |
14272 | if (low_bound |
14273 | && TREE_CODE (low_bound) == INTEGER_CST |
14274 | && TYPE_PRECISION (TREE_TYPE (low_bound)) |
14275 | > TYPE_PRECISION (sizetype)) |
14276 | low_bound = fold_convert (sizetype, low_bound); |
14277 | if (length |
14278 | && TREE_CODE (length) == INTEGER_CST |
14279 | && TYPE_PRECISION (TREE_TYPE (length)) |
14280 | > TYPE_PRECISION (sizetype)) |
14281 | length = fold_convert (sizetype, length); |
14282 | if (low_bound == NULL_TREE) |
14283 | low_bound = integer_zero_node; |
14284 | if (!maybe_zero_len && i > first_non_one) |
14285 | { |
14286 | if (integer_nonzerop (low_bound)) |
14287 | goto do_warn_noncontiguous; |
14288 | if (length != NULL_TREE |
14289 | && TREE_CODE (length) == INTEGER_CST |
14290 | && TYPE_DOMAIN (types[i]) |
14291 | && TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])) |
14292 | && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (types[i]))) |
14293 | == INTEGER_CST) |
14294 | { |
14295 | tree size; |
14296 | size = size_binop (PLUS_EXPR, |
14297 | TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])), |
14298 | size_one_node); |
14299 | if (!tree_int_cst_equal (length, size)) |
14300 | { |
14301 | do_warn_noncontiguous: |
14302 | error_at (OMP_CLAUSE_LOCATION (c), |
14303 | "array section is not contiguous in %qs " |
14304 | "clause" , |
14305 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14306 | return true; |
14307 | } |
14308 | } |
14309 | if (length != NULL_TREE |
14310 | && TREE_SIDE_EFFECTS (length)) |
14311 | { |
14312 | if (side_effects == NULL_TREE) |
14313 | side_effects = length; |
14314 | else |
14315 | side_effects = build2 (COMPOUND_EXPR, |
14316 | TREE_TYPE (side_effects), |
14317 | length, side_effects); |
14318 | } |
14319 | } |
14320 | else |
14321 | { |
14322 | tree l; |
14323 | |
14324 | if (i > first_non_one |
14325 | && ((length && integer_nonzerop (length)) |
14326 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
14327 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
14328 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)) |
14329 | continue; |
14330 | if (length) |
14331 | l = fold_convert (sizetype, length); |
14332 | else |
14333 | { |
14334 | l = size_binop (PLUS_EXPR, |
14335 | TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])), |
14336 | size_one_node); |
14337 | l = size_binop (MINUS_EXPR, l, |
14338 | fold_convert (sizetype, low_bound)); |
14339 | } |
14340 | if (i > first_non_one) |
14341 | { |
14342 | l = fold_build2 (NE_EXPR, boolean_type_node, l, |
14343 | size_zero_node); |
14344 | if (condition == NULL_TREE) |
14345 | condition = l; |
14346 | else |
14347 | condition = fold_build2 (BIT_AND_EXPR, boolean_type_node, |
14348 | l, condition); |
14349 | } |
14350 | else if (size == NULL_TREE) |
14351 | { |
14352 | size = size_in_bytes (TREE_TYPE (types[i])); |
14353 | tree eltype = TREE_TYPE (types[num - 1]); |
14354 | while (TREE_CODE (eltype) == ARRAY_TYPE) |
14355 | eltype = TREE_TYPE (eltype); |
14356 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
14357 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
14358 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
14359 | { |
14360 | if (integer_zerop (size) |
14361 | || integer_zerop (size_in_bytes (t: eltype))) |
14362 | { |
14363 | error_at (OMP_CLAUSE_LOCATION (c), |
14364 | "zero length array section in %qs clause" , |
14365 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14366 | return error_mark_node; |
14367 | } |
14368 | size = size_binop (EXACT_DIV_EXPR, size, |
14369 | size_in_bytes (eltype)); |
14370 | } |
14371 | size = size_binop (MULT_EXPR, size, l); |
14372 | if (condition) |
14373 | size = fold_build3 (COND_EXPR, sizetype, condition, |
14374 | size, size_zero_node); |
14375 | } |
14376 | else |
14377 | size = size_binop (MULT_EXPR, size, l); |
14378 | } |
14379 | } |
14380 | if (side_effects) |
14381 | size = build2 (COMPOUND_EXPR, sizetype, side_effects, size); |
14382 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
14383 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
14384 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
14385 | { |
14386 | size = size_binop (MINUS_EXPR, size, size_one_node); |
14387 | size = c_fully_fold (size, false, NULL); |
14388 | size = save_expr (size); |
14389 | tree index_type = build_index_type (size); |
14390 | tree eltype = TREE_TYPE (first); |
14391 | while (TREE_CODE (eltype) == ARRAY_TYPE) |
14392 | eltype = TREE_TYPE (eltype); |
14393 | tree type = build_array_type (eltype, index_type); |
14394 | tree ptype = build_pointer_type (eltype); |
14395 | if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) |
14396 | t = build_fold_addr_expr (t); |
14397 | tree t2 = build_fold_addr_expr (first); |
14398 | t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c), |
14399 | ptrdiff_type_node, t2); |
14400 | t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR, |
14401 | ptrdiff_type_node, t2, |
14402 | fold_convert_loc (OMP_CLAUSE_LOCATION (c), |
14403 | ptrdiff_type_node, t)); |
14404 | t2 = c_fully_fold (t2, false, NULL); |
14405 | if (tree_fits_shwi_p (t2)) |
14406 | t = build2 (MEM_REF, type, t, |
14407 | build_int_cst (ptype, tree_to_shwi (t2))); |
14408 | else |
14409 | { |
14410 | t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c), sizetype, t2); |
14411 | t = build2_loc (OMP_CLAUSE_LOCATION (c), code: POINTER_PLUS_EXPR, |
14412 | TREE_TYPE (t), arg0: t, arg1: t2); |
14413 | t = build2 (MEM_REF, type, t, build_int_cst (ptype, 0)); |
14414 | } |
14415 | OMP_CLAUSE_DECL (c) = t; |
14416 | return false; |
14417 | } |
14418 | first = c_fully_fold (first, false, NULL); |
14419 | OMP_CLAUSE_DECL (c) = first; |
14420 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
14421 | return false; |
14422 | /* Don't set OMP_CLAUSE_SIZE for bare attach/detach clauses. */ |
14423 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
14424 | || (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH |
14425 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH |
14426 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DETACH)) |
14427 | { |
14428 | if (size) |
14429 | size = c_fully_fold (size, false, NULL); |
14430 | OMP_CLAUSE_SIZE (c) = size; |
14431 | } |
14432 | |
14433 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
14434 | return false; |
14435 | |
14436 | auto_vec<omp_addr_token *, 10> addr_tokens; |
14437 | |
14438 | if (!omp_parse_expr (addr_tokens, first)) |
14439 | return true; |
14440 | |
14441 | c_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
14442 | |
14443 | tree nc = ai.expand_map_clause (c, first, addr_tokens, ort); |
14444 | if (nc != error_mark_node) |
14445 | { |
14446 | using namespace omp_addr_tokenizer; |
14447 | |
14448 | if (ai.maybe_zero_length_array_section (c)) |
14449 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
14450 | |
14451 | /* !!! If we're accessing a base decl via chained access |
14452 | methods (e.g. multiple indirections), duplicate clause |
14453 | detection won't work properly. Skip it in that case. */ |
14454 | if ((addr_tokens[0]->type == STRUCTURE_BASE |
14455 | || addr_tokens[0]->type == ARRAY_BASE) |
14456 | && addr_tokens[0]->u.structure_base_kind == BASE_DECL |
14457 | && addr_tokens[1]->type == ACCESS_METHOD |
14458 | && omp_access_chain_p (addr_tokens, 1)) |
14459 | c = nc; |
14460 | |
14461 | return false; |
14462 | } |
14463 | } |
14464 | return false; |
14465 | } |
14466 | |
14467 | /* Helper function of finish_omp_clauses. Clone STMT as if we were making |
14468 | an inline call. But, remap |
14469 | the OMP_DECL1 VAR_DECL (omp_out resp. omp_orig) to PLACEHOLDER |
14470 | and OMP_DECL2 VAR_DECL (omp_in resp. omp_priv) to DECL. */ |
14471 | |
14472 | static tree |
14473 | c_clone_omp_udr (tree stmt, tree omp_decl1, tree omp_decl2, |
14474 | tree decl, tree placeholder) |
14475 | { |
14476 | copy_body_data id; |
14477 | hash_map<tree, tree> decl_map; |
14478 | |
14479 | decl_map.put (k: omp_decl1, v: placeholder); |
14480 | decl_map.put (k: omp_decl2, v: decl); |
14481 | memset (s: &id, c: 0, n: sizeof (id)); |
14482 | id.src_fn = DECL_CONTEXT (omp_decl1); |
14483 | id.dst_fn = current_function_decl; |
14484 | id.src_cfun = DECL_STRUCT_FUNCTION (id.src_fn); |
14485 | id.decl_map = &decl_map; |
14486 | |
14487 | id.copy_decl = copy_decl_no_change; |
14488 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; |
14489 | id.transform_new_cfg = true; |
14490 | id.transform_return_to_modify = false; |
14491 | id.eh_lp_nr = 0; |
14492 | walk_tree (&stmt, copy_tree_body_r, &id, NULL); |
14493 | return stmt; |
14494 | } |
14495 | |
14496 | /* Helper function of c_finish_omp_clauses, called via walk_tree. |
14497 | Find OMP_CLAUSE_PLACEHOLDER (passed in DATA) in *TP. */ |
14498 | |
14499 | static tree |
14500 | c_find_omp_placeholder_r (tree *tp, int *, void *data) |
14501 | { |
14502 | if (*tp == (tree) data) |
14503 | return *tp; |
14504 | return NULL_TREE; |
14505 | } |
14506 | |
14507 | /* Similarly, but also walk aggregate fields. */ |
14508 | |
14509 | struct c_find_omp_var_s { tree var; hash_set<tree> *pset; }; |
14510 | |
14511 | static tree |
14512 | c_find_omp_var_r (tree *tp, int *, void *data) |
14513 | { |
14514 | if (*tp == ((struct c_find_omp_var_s *) data)->var) |
14515 | return *tp; |
14516 | if (RECORD_OR_UNION_TYPE_P (*tp)) |
14517 | { |
14518 | tree field; |
14519 | hash_set<tree> *pset = ((struct c_find_omp_var_s *) data)->pset; |
14520 | |
14521 | for (field = TYPE_FIELDS (*tp); field; |
14522 | field = DECL_CHAIN (field)) |
14523 | if (TREE_CODE (field) == FIELD_DECL) |
14524 | { |
14525 | tree ret = walk_tree (&DECL_FIELD_OFFSET (field), |
14526 | c_find_omp_var_r, data, pset); |
14527 | if (ret) |
14528 | return ret; |
14529 | ret = walk_tree (&DECL_SIZE (field), c_find_omp_var_r, data, pset); |
14530 | if (ret) |
14531 | return ret; |
14532 | ret = walk_tree (&DECL_SIZE_UNIT (field), c_find_omp_var_r, data, |
14533 | pset); |
14534 | if (ret) |
14535 | return ret; |
14536 | ret = walk_tree (&TREE_TYPE (field), c_find_omp_var_r, data, pset); |
14537 | if (ret) |
14538 | return ret; |
14539 | } |
14540 | } |
14541 | else if (INTEGRAL_TYPE_P (*tp)) |
14542 | return walk_tree (&TYPE_MAX_VALUE (*tp), c_find_omp_var_r, data, |
14543 | ((struct c_find_omp_var_s *) data)->pset); |
14544 | return NULL_TREE; |
14545 | } |
14546 | |
14547 | /* Finish OpenMP iterators ITER. Return true if they are errorneous |
14548 | and clauses containing them should be removed. */ |
14549 | |
14550 | static bool |
14551 | c_omp_finish_iterators (tree iter) |
14552 | { |
14553 | bool ret = false; |
14554 | for (tree it = iter; it; it = TREE_CHAIN (it)) |
14555 | { |
14556 | tree var = TREE_VEC_ELT (it, 0); |
14557 | tree begin = TREE_VEC_ELT (it, 1); |
14558 | tree end = TREE_VEC_ELT (it, 2); |
14559 | tree step = TREE_VEC_ELT (it, 3); |
14560 | tree orig_step; |
14561 | tree type = TREE_TYPE (var); |
14562 | location_t loc = DECL_SOURCE_LOCATION (var); |
14563 | if (type == error_mark_node) |
14564 | { |
14565 | ret = true; |
14566 | continue; |
14567 | } |
14568 | if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) |
14569 | { |
14570 | error_at (loc, "iterator %qD has neither integral nor pointer type" , |
14571 | var); |
14572 | ret = true; |
14573 | continue; |
14574 | } |
14575 | else if (TYPE_ATOMIC (type)) |
14576 | { |
14577 | error_at (loc, "iterator %qD has %<_Atomic%> qualified type" , var); |
14578 | ret = true; |
14579 | continue; |
14580 | } |
14581 | else if (TYPE_READONLY (type)) |
14582 | { |
14583 | error_at (loc, "iterator %qD has const qualified type" , var); |
14584 | ret = true; |
14585 | continue; |
14586 | } |
14587 | else if (step == error_mark_node |
14588 | || TREE_TYPE (step) == error_mark_node) |
14589 | { |
14590 | ret = true; |
14591 | continue; |
14592 | } |
14593 | else if (!INTEGRAL_TYPE_P (TREE_TYPE (step))) |
14594 | { |
14595 | error_at (EXPR_LOC_OR_LOC (step, loc), |
14596 | "iterator step with non-integral type" ); |
14597 | ret = true; |
14598 | continue; |
14599 | } |
14600 | begin = c_fully_fold (build_c_cast (loc, type, expr: begin), false, NULL); |
14601 | end = c_fully_fold (build_c_cast (loc, type, expr: end), false, NULL); |
14602 | orig_step = save_expr (c_fully_fold (step, false, NULL)); |
14603 | tree stype = POINTER_TYPE_P (type) ? sizetype : type; |
14604 | step = c_fully_fold (build_c_cast (loc, type: stype, expr: orig_step), false, NULL); |
14605 | if (POINTER_TYPE_P (type)) |
14606 | { |
14607 | begin = save_expr (begin); |
14608 | step = pointer_int_sum (loc, PLUS_EXPR, begin, step); |
14609 | step = fold_build2_loc (loc, MINUS_EXPR, sizetype, |
14610 | fold_convert (sizetype, step), |
14611 | fold_convert (sizetype, begin)); |
14612 | step = fold_convert (ssizetype, step); |
14613 | } |
14614 | if (integer_zerop (step)) |
14615 | { |
14616 | error_at (loc, "iterator %qD has zero step" , var); |
14617 | ret = true; |
14618 | continue; |
14619 | } |
14620 | |
14621 | if (begin == error_mark_node |
14622 | || end == error_mark_node |
14623 | || step == error_mark_node |
14624 | || orig_step == error_mark_node) |
14625 | { |
14626 | ret = true; |
14627 | continue; |
14628 | } |
14629 | hash_set<tree> pset; |
14630 | tree it2; |
14631 | for (it2 = TREE_CHAIN (it); it2; it2 = TREE_CHAIN (it2)) |
14632 | { |
14633 | tree var2 = TREE_VEC_ELT (it2, 0); |
14634 | tree begin2 = TREE_VEC_ELT (it2, 1); |
14635 | tree end2 = TREE_VEC_ELT (it2, 2); |
14636 | tree step2 = TREE_VEC_ELT (it2, 3); |
14637 | tree type2 = TREE_TYPE (var2); |
14638 | location_t loc2 = DECL_SOURCE_LOCATION (var2); |
14639 | struct c_find_omp_var_s data = { .var: var, .pset: &pset }; |
14640 | if (walk_tree (&type2, c_find_omp_var_r, &data, &pset)) |
14641 | { |
14642 | error_at (loc2, |
14643 | "type of iterator %qD refers to outer iterator %qD" , |
14644 | var2, var); |
14645 | break; |
14646 | } |
14647 | else if (walk_tree (&begin2, c_find_omp_var_r, &data, &pset)) |
14648 | { |
14649 | error_at (EXPR_LOC_OR_LOC (begin2, loc2), |
14650 | "begin expression refers to outer iterator %qD" , var); |
14651 | break; |
14652 | } |
14653 | else if (walk_tree (&end2, c_find_omp_var_r, &data, &pset)) |
14654 | { |
14655 | error_at (EXPR_LOC_OR_LOC (end2, loc2), |
14656 | "end expression refers to outer iterator %qD" , var); |
14657 | break; |
14658 | } |
14659 | else if (walk_tree (&step2, c_find_omp_var_r, &data, &pset)) |
14660 | { |
14661 | error_at (EXPR_LOC_OR_LOC (step2, loc2), |
14662 | "step expression refers to outer iterator %qD" , var); |
14663 | break; |
14664 | } |
14665 | } |
14666 | if (it2) |
14667 | { |
14668 | ret = true; |
14669 | continue; |
14670 | } |
14671 | TREE_VEC_ELT (it, 1) = begin; |
14672 | TREE_VEC_ELT (it, 2) = end; |
14673 | TREE_VEC_ELT (it, 3) = step; |
14674 | TREE_VEC_ELT (it, 4) = orig_step; |
14675 | } |
14676 | return ret; |
14677 | } |
14678 | |
14679 | /* Ensure that pointers are used in OpenACC attach and detach clauses. |
14680 | Return true if an error has been detected. */ |
14681 | |
14682 | static bool |
14683 | c_oacc_check_attachments (tree c) |
14684 | { |
14685 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
14686 | return false; |
14687 | |
14688 | /* OpenACC attach / detach clauses must be pointers. */ |
14689 | if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
14690 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
14691 | { |
14692 | tree t = OMP_CLAUSE_DECL (c); |
14693 | |
14694 | while (TREE_CODE (t) == OMP_ARRAY_SECTION) |
14695 | t = TREE_OPERAND (t, 0); |
14696 | |
14697 | if (TREE_CODE (TREE_TYPE (t)) != POINTER_TYPE) |
14698 | { |
14699 | error_at (OMP_CLAUSE_LOCATION (c), "expected pointer in %qs clause" , |
14700 | user_omp_clause_code_name (c, true)); |
14701 | return true; |
14702 | } |
14703 | } |
14704 | |
14705 | return false; |
14706 | } |
14707 | |
14708 | /* For all elements of CLAUSES, validate them against their constraints. |
14709 | Remove any elements from the list that are invalid. */ |
14710 | |
14711 | tree |
14712 | c_finish_omp_clauses (tree clauses, enum c_omp_region_type ort) |
14713 | { |
14714 | bitmap_head generic_head, firstprivate_head, lastprivate_head; |
14715 | bitmap_head aligned_head, map_head, map_field_head, map_firstprivate_head; |
14716 | bitmap_head oacc_reduction_head, is_on_device_head; |
14717 | tree c, t, type, *pc; |
14718 | tree simdlen = NULL_TREE, safelen = NULL_TREE; |
14719 | bool branch_seen = false; |
14720 | bool copyprivate_seen = false; |
14721 | bool mergeable_seen = false; |
14722 | tree *detach_seen = NULL; |
14723 | bool linear_variable_step_check = false; |
14724 | tree *nowait_clause = NULL; |
14725 | tree ordered_clause = NULL_TREE; |
14726 | tree schedule_clause = NULL_TREE; |
14727 | bool oacc_async = false; |
14728 | tree last_iterators = NULL_TREE; |
14729 | bool last_iterators_remove = false; |
14730 | tree *nogroup_seen = NULL; |
14731 | tree *order_clause = NULL; |
14732 | /* 1 if normal/task reduction has been seen, -1 if inscan reduction |
14733 | has been seen, -2 if mixed inscan/normal reduction diagnosed. */ |
14734 | int reduction_seen = 0; |
14735 | bool allocate_seen = false; |
14736 | bool implicit_moved = false; |
14737 | bool target_in_reduction_seen = false; |
14738 | bool openacc = (ort & C_ORT_ACC) != 0; |
14739 | |
14740 | bitmap_obstack_initialize (NULL); |
14741 | bitmap_initialize (head: &generic_head, obstack: &bitmap_default_obstack); |
14742 | bitmap_initialize (head: &firstprivate_head, obstack: &bitmap_default_obstack); |
14743 | bitmap_initialize (head: &lastprivate_head, obstack: &bitmap_default_obstack); |
14744 | bitmap_initialize (head: &aligned_head, obstack: &bitmap_default_obstack); |
14745 | /* If ort == C_ORT_OMP_DECLARE_SIMD used as uniform_head instead. */ |
14746 | bitmap_initialize (head: &map_head, obstack: &bitmap_default_obstack); |
14747 | bitmap_initialize (head: &map_field_head, obstack: &bitmap_default_obstack); |
14748 | bitmap_initialize (head: &map_firstprivate_head, obstack: &bitmap_default_obstack); |
14749 | /* If ort == C_ORT_OMP used as nontemporal_head or use_device_xxx_head |
14750 | instead and for ort == C_ORT_OMP_TARGET used as in_reduction_head. */ |
14751 | bitmap_initialize (head: &oacc_reduction_head, obstack: &bitmap_default_obstack); |
14752 | bitmap_initialize (head: &is_on_device_head, obstack: &bitmap_default_obstack); |
14753 | |
14754 | if (openacc) |
14755 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
14756 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ASYNC) |
14757 | { |
14758 | oacc_async = true; |
14759 | break; |
14760 | } |
14761 | |
14762 | tree *grp_start_p = NULL, grp_sentinel = NULL_TREE; |
14763 | |
14764 | for (pc = &clauses, c = clauses; c ; c = *pc) |
14765 | { |
14766 | bool remove = false; |
14767 | bool need_complete = false; |
14768 | bool need_implicitly_determined = false; |
14769 | |
14770 | /* We've reached the end of a list of expanded nodes. Reset the group |
14771 | start pointer. */ |
14772 | if (c == grp_sentinel) |
14773 | grp_start_p = NULL; |
14774 | |
14775 | switch (OMP_CLAUSE_CODE (c)) |
14776 | { |
14777 | case OMP_CLAUSE_SHARED: |
14778 | need_implicitly_determined = true; |
14779 | goto check_dup_generic; |
14780 | |
14781 | case OMP_CLAUSE_PRIVATE: |
14782 | need_complete = true; |
14783 | need_implicitly_determined = true; |
14784 | goto check_dup_generic; |
14785 | |
14786 | case OMP_CLAUSE_REDUCTION: |
14787 | if (reduction_seen == 0) |
14788 | reduction_seen = OMP_CLAUSE_REDUCTION_INSCAN (c) ? -1 : 1; |
14789 | else if (reduction_seen != -2 |
14790 | && reduction_seen != (OMP_CLAUSE_REDUCTION_INSCAN (c) |
14791 | ? -1 : 1)) |
14792 | { |
14793 | error_at (OMP_CLAUSE_LOCATION (c), |
14794 | "%<inscan%> and non-%<inscan%> %<reduction%> clauses " |
14795 | "on the same construct" ); |
14796 | reduction_seen = -2; |
14797 | } |
14798 | /* FALLTHRU */ |
14799 | case OMP_CLAUSE_IN_REDUCTION: |
14800 | case OMP_CLAUSE_TASK_REDUCTION: |
14801 | need_implicitly_determined = true; |
14802 | t = OMP_CLAUSE_DECL (c); |
14803 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
14804 | { |
14805 | if (handle_omp_array_sections (c, ort)) |
14806 | { |
14807 | remove = true; |
14808 | break; |
14809 | } |
14810 | |
14811 | t = OMP_CLAUSE_DECL (c); |
14812 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
14813 | && OMP_CLAUSE_REDUCTION_INSCAN (c)) |
14814 | { |
14815 | error_at (OMP_CLAUSE_LOCATION (c), |
14816 | "%<inscan%> %<reduction%> clause with array " |
14817 | "section" ); |
14818 | remove = true; |
14819 | break; |
14820 | } |
14821 | } |
14822 | t = require_complete_type (OMP_CLAUSE_LOCATION (c), value: t); |
14823 | if (t == error_mark_node) |
14824 | { |
14825 | remove = true; |
14826 | break; |
14827 | } |
14828 | if (oacc_async) |
14829 | c_mark_addressable (exp: t); |
14830 | type = TREE_TYPE (t); |
14831 | if (TREE_CODE (t) == MEM_REF) |
14832 | type = TREE_TYPE (type); |
14833 | if (TREE_CODE (type) == ARRAY_TYPE) |
14834 | { |
14835 | tree oatype = type; |
14836 | gcc_assert (TREE_CODE (t) != MEM_REF); |
14837 | while (TREE_CODE (type) == ARRAY_TYPE) |
14838 | type = TREE_TYPE (type); |
14839 | if (integer_zerop (TYPE_SIZE_UNIT (type))) |
14840 | { |
14841 | error_at (OMP_CLAUSE_LOCATION (c), |
14842 | "%qD in %<reduction%> clause is a zero size array" , |
14843 | t); |
14844 | remove = true; |
14845 | break; |
14846 | } |
14847 | tree size = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (oatype), |
14848 | TYPE_SIZE_UNIT (type)); |
14849 | if (integer_zerop (size)) |
14850 | { |
14851 | error_at (OMP_CLAUSE_LOCATION (c), |
14852 | "%qD in %<reduction%> clause is a zero size array" , |
14853 | t); |
14854 | remove = true; |
14855 | break; |
14856 | } |
14857 | size = size_binop (MINUS_EXPR, size, size_one_node); |
14858 | size = save_expr (size); |
14859 | tree index_type = build_index_type (size); |
14860 | tree atype = build_array_type (TYPE_MAIN_VARIANT (type), |
14861 | index_type); |
14862 | atype = c_build_qualified_type (atype, TYPE_QUALS (type)); |
14863 | tree ptype = build_pointer_type (type); |
14864 | if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) |
14865 | t = build_fold_addr_expr (t); |
14866 | t = build2 (MEM_REF, atype, t, build_int_cst (ptype, 0)); |
14867 | OMP_CLAUSE_DECL (c) = t; |
14868 | } |
14869 | if (TYPE_ATOMIC (type)) |
14870 | { |
14871 | error_at (OMP_CLAUSE_LOCATION (c), |
14872 | "%<_Atomic%> %qE in %<reduction%> clause" , t); |
14873 | remove = true; |
14874 | break; |
14875 | } |
14876 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
14877 | || OMP_CLAUSE_REDUCTION_TASK (c)) |
14878 | { |
14879 | /* Disallow zero sized or potentially zero sized task |
14880 | reductions. */ |
14881 | if (integer_zerop (TYPE_SIZE_UNIT (type))) |
14882 | { |
14883 | error_at (OMP_CLAUSE_LOCATION (c), |
14884 | "zero sized type %qT in %qs clause" , type, |
14885 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14886 | remove = true; |
14887 | break; |
14888 | } |
14889 | else if (TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST) |
14890 | { |
14891 | error_at (OMP_CLAUSE_LOCATION (c), |
14892 | "variable sized type %qT in %qs clause" , type, |
14893 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14894 | remove = true; |
14895 | break; |
14896 | } |
14897 | } |
14898 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE |
14899 | && (FLOAT_TYPE_P (type) |
14900 | || TREE_CODE (type) == COMPLEX_TYPE)) |
14901 | { |
14902 | enum tree_code r_code = OMP_CLAUSE_REDUCTION_CODE (c); |
14903 | const char *r_name = NULL; |
14904 | |
14905 | switch (r_code) |
14906 | { |
14907 | case PLUS_EXPR: |
14908 | case MULT_EXPR: |
14909 | case MINUS_EXPR: |
14910 | case TRUTH_ANDIF_EXPR: |
14911 | case TRUTH_ORIF_EXPR: |
14912 | break; |
14913 | case MIN_EXPR: |
14914 | if (TREE_CODE (type) == COMPLEX_TYPE) |
14915 | r_name = "min" ; |
14916 | break; |
14917 | case MAX_EXPR: |
14918 | if (TREE_CODE (type) == COMPLEX_TYPE) |
14919 | r_name = "max" ; |
14920 | break; |
14921 | case BIT_AND_EXPR: |
14922 | r_name = "&" ; |
14923 | break; |
14924 | case BIT_XOR_EXPR: |
14925 | r_name = "^" ; |
14926 | break; |
14927 | case BIT_IOR_EXPR: |
14928 | r_name = "|" ; |
14929 | break; |
14930 | default: |
14931 | gcc_unreachable (); |
14932 | } |
14933 | if (r_name) |
14934 | { |
14935 | error_at (OMP_CLAUSE_LOCATION (c), |
14936 | "%qE has invalid type for %<reduction(%s)%>" , |
14937 | t, r_name); |
14938 | remove = true; |
14939 | break; |
14940 | } |
14941 | } |
14942 | else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == error_mark_node) |
14943 | { |
14944 | error_at (OMP_CLAUSE_LOCATION (c), |
14945 | "user defined reduction not found for %qE" , t); |
14946 | remove = true; |
14947 | break; |
14948 | } |
14949 | else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
14950 | { |
14951 | tree list = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
14952 | type = TYPE_MAIN_VARIANT (type); |
14953 | tree placeholder = build_decl (OMP_CLAUSE_LOCATION (c), |
14954 | VAR_DECL, NULL_TREE, type); |
14955 | tree decl_placeholder = NULL_TREE; |
14956 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = placeholder; |
14957 | DECL_ARTIFICIAL (placeholder) = 1; |
14958 | DECL_IGNORED_P (placeholder) = 1; |
14959 | if (TREE_CODE (t) == MEM_REF) |
14960 | { |
14961 | decl_placeholder = build_decl (OMP_CLAUSE_LOCATION (c), |
14962 | VAR_DECL, NULL_TREE, type); |
14963 | OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = decl_placeholder; |
14964 | DECL_ARTIFICIAL (decl_placeholder) = 1; |
14965 | DECL_IGNORED_P (decl_placeholder) = 1; |
14966 | } |
14967 | if (TREE_ADDRESSABLE (TREE_VEC_ELT (list, 0))) |
14968 | c_mark_addressable (exp: placeholder); |
14969 | if (TREE_ADDRESSABLE (TREE_VEC_ELT (list, 1))) |
14970 | c_mark_addressable (exp: decl_placeholder ? decl_placeholder |
14971 | : OMP_CLAUSE_DECL (c)); |
14972 | OMP_CLAUSE_REDUCTION_MERGE (c) |
14973 | = c_clone_omp_udr (TREE_VEC_ELT (list, 2), |
14974 | TREE_VEC_ELT (list, 0), |
14975 | TREE_VEC_ELT (list, 1), |
14976 | decl: decl_placeholder ? decl_placeholder |
14977 | : OMP_CLAUSE_DECL (c), placeholder); |
14978 | OMP_CLAUSE_REDUCTION_MERGE (c) |
14979 | = build3_loc (OMP_CLAUSE_LOCATION (c), code: BIND_EXPR, |
14980 | void_type_node, NULL_TREE, |
14981 | OMP_CLAUSE_REDUCTION_MERGE (c), NULL_TREE); |
14982 | TREE_SIDE_EFFECTS (OMP_CLAUSE_REDUCTION_MERGE (c)) = 1; |
14983 | if (TREE_VEC_LENGTH (list) == 6) |
14984 | { |
14985 | if (TREE_ADDRESSABLE (TREE_VEC_ELT (list, 3))) |
14986 | c_mark_addressable (exp: decl_placeholder ? decl_placeholder |
14987 | : OMP_CLAUSE_DECL (c)); |
14988 | if (TREE_ADDRESSABLE (TREE_VEC_ELT (list, 4))) |
14989 | c_mark_addressable (exp: placeholder); |
14990 | tree init = TREE_VEC_ELT (list, 5); |
14991 | if (init == error_mark_node) |
14992 | init = DECL_INITIAL (TREE_VEC_ELT (list, 3)); |
14993 | OMP_CLAUSE_REDUCTION_INIT (c) |
14994 | = c_clone_omp_udr (stmt: init, TREE_VEC_ELT (list, 4), |
14995 | TREE_VEC_ELT (list, 3), |
14996 | decl: decl_placeholder ? decl_placeholder |
14997 | : OMP_CLAUSE_DECL (c), placeholder); |
14998 | if (TREE_VEC_ELT (list, 5) == error_mark_node) |
14999 | { |
15000 | tree v = decl_placeholder ? decl_placeholder : t; |
15001 | OMP_CLAUSE_REDUCTION_INIT (c) |
15002 | = build2 (INIT_EXPR, TREE_TYPE (v), v, |
15003 | OMP_CLAUSE_REDUCTION_INIT (c)); |
15004 | } |
15005 | if (walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c), |
15006 | c_find_omp_placeholder_r, |
15007 | placeholder, NULL)) |
15008 | OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1; |
15009 | } |
15010 | else |
15011 | { |
15012 | tree init; |
15013 | tree v = decl_placeholder ? decl_placeholder : t; |
15014 | if (AGGREGATE_TYPE_P (TREE_TYPE (v))) |
15015 | init = build_constructor (TREE_TYPE (v), NULL); |
15016 | else |
15017 | init = fold_convert (TREE_TYPE (v), integer_zero_node); |
15018 | OMP_CLAUSE_REDUCTION_INIT (c) |
15019 | = build2 (INIT_EXPR, TREE_TYPE (v), v, init); |
15020 | } |
15021 | OMP_CLAUSE_REDUCTION_INIT (c) |
15022 | = build3_loc (OMP_CLAUSE_LOCATION (c), code: BIND_EXPR, |
15023 | void_type_node, NULL_TREE, |
15024 | OMP_CLAUSE_REDUCTION_INIT (c), NULL_TREE); |
15025 | TREE_SIDE_EFFECTS (OMP_CLAUSE_REDUCTION_INIT (c)) = 1; |
15026 | } |
15027 | if (TREE_CODE (t) == MEM_REF) |
15028 | { |
15029 | if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (t))) == NULL_TREE |
15030 | || TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (t)))) |
15031 | != INTEGER_CST) |
15032 | { |
15033 | sorry ("variable length element type in array " |
15034 | "%<reduction%> clause" ); |
15035 | remove = true; |
15036 | break; |
15037 | } |
15038 | t = TREE_OPERAND (t, 0); |
15039 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
15040 | t = TREE_OPERAND (t, 0); |
15041 | if (TREE_CODE (t) == ADDR_EXPR) |
15042 | t = TREE_OPERAND (t, 0); |
15043 | } |
15044 | goto check_dup_generic_t; |
15045 | |
15046 | case OMP_CLAUSE_COPYPRIVATE: |
15047 | copyprivate_seen = true; |
15048 | if (nowait_clause) |
15049 | { |
15050 | error_at (OMP_CLAUSE_LOCATION (*nowait_clause), |
15051 | "%<nowait%> clause must not be used together " |
15052 | "with %<copyprivate%>" ); |
15053 | *nowait_clause = OMP_CLAUSE_CHAIN (*nowait_clause); |
15054 | nowait_clause = NULL; |
15055 | } |
15056 | goto check_dup_generic; |
15057 | |
15058 | case OMP_CLAUSE_COPYIN: |
15059 | t = OMP_CLAUSE_DECL (c); |
15060 | if (!VAR_P (t) || !DECL_THREAD_LOCAL_P (t)) |
15061 | { |
15062 | error_at (OMP_CLAUSE_LOCATION (c), |
15063 | "%qE must be %<threadprivate%> for %<copyin%>" , t); |
15064 | remove = true; |
15065 | break; |
15066 | } |
15067 | goto check_dup_generic; |
15068 | |
15069 | case OMP_CLAUSE_LINEAR: |
15070 | if (ort != C_ORT_OMP_DECLARE_SIMD) |
15071 | need_implicitly_determined = true; |
15072 | t = OMP_CLAUSE_DECL (c); |
15073 | if (ort != C_ORT_OMP_DECLARE_SIMD |
15074 | && OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_DEFAULT |
15075 | && OMP_CLAUSE_LINEAR_OLD_LINEAR_MODIFIER (c)) |
15076 | { |
15077 | error_at (OMP_CLAUSE_LOCATION (c), |
15078 | "modifier should not be specified in %<linear%> " |
15079 | "clause on %<simd%> or %<for%> constructs when not " |
15080 | "using OpenMP 5.2 modifiers" ); |
15081 | OMP_CLAUSE_LINEAR_KIND (c) = OMP_CLAUSE_LINEAR_DEFAULT; |
15082 | } |
15083 | if (!INTEGRAL_TYPE_P (TREE_TYPE (t)) |
15084 | && TREE_CODE (TREE_TYPE (t)) != POINTER_TYPE) |
15085 | { |
15086 | error_at (OMP_CLAUSE_LOCATION (c), |
15087 | "linear clause applied to non-integral non-pointer " |
15088 | "variable with type %qT" , TREE_TYPE (t)); |
15089 | remove = true; |
15090 | break; |
15091 | } |
15092 | if (TYPE_ATOMIC (TREE_TYPE (t))) |
15093 | { |
15094 | error_at (OMP_CLAUSE_LOCATION (c), |
15095 | "%<_Atomic%> %qD in %<linear%> clause" , t); |
15096 | remove = true; |
15097 | break; |
15098 | } |
15099 | if (ort == C_ORT_OMP_DECLARE_SIMD) |
15100 | { |
15101 | tree s = OMP_CLAUSE_LINEAR_STEP (c); |
15102 | if (TREE_CODE (s) == PARM_DECL) |
15103 | { |
15104 | OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) = 1; |
15105 | /* map_head bitmap is used as uniform_head if |
15106 | declare_simd. */ |
15107 | if (!bitmap_bit_p (&map_head, DECL_UID (s))) |
15108 | linear_variable_step_check = true; |
15109 | goto check_dup_generic; |
15110 | } |
15111 | if (TREE_CODE (s) != INTEGER_CST) |
15112 | { |
15113 | error_at (OMP_CLAUSE_LOCATION (c), |
15114 | "%<linear%> clause step %qE is neither constant " |
15115 | "nor a parameter" , s); |
15116 | remove = true; |
15117 | break; |
15118 | } |
15119 | } |
15120 | if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c))) == POINTER_TYPE) |
15121 | { |
15122 | tree s = OMP_CLAUSE_LINEAR_STEP (c); |
15123 | s = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR, |
15124 | OMP_CLAUSE_DECL (c), s); |
15125 | s = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR, |
15126 | sizetype, fold_convert (sizetype, s), |
15127 | fold_convert |
15128 | (sizetype, OMP_CLAUSE_DECL (c))); |
15129 | if (s == error_mark_node) |
15130 | s = size_one_node; |
15131 | OMP_CLAUSE_LINEAR_STEP (c) = s; |
15132 | } |
15133 | else |
15134 | OMP_CLAUSE_LINEAR_STEP (c) |
15135 | = fold_convert (TREE_TYPE (t), OMP_CLAUSE_LINEAR_STEP (c)); |
15136 | goto check_dup_generic; |
15137 | |
15138 | check_dup_generic: |
15139 | t = OMP_CLAUSE_DECL (c); |
15140 | check_dup_generic_t: |
15141 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15142 | { |
15143 | error_at (OMP_CLAUSE_LOCATION (c), |
15144 | "%qE is not a variable in clause %qs" , t, |
15145 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15146 | remove = true; |
15147 | } |
15148 | else if ((openacc && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) |
15149 | || (ort == C_ORT_OMP |
15150 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR |
15151 | || (OMP_CLAUSE_CODE (c) |
15152 | == OMP_CLAUSE_USE_DEVICE_ADDR))) |
15153 | || (ort == C_ORT_OMP_TARGET |
15154 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)) |
15155 | { |
15156 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
15157 | && (bitmap_bit_p (&generic_head, DECL_UID (t)) |
15158 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)))) |
15159 | { |
15160 | error_at (OMP_CLAUSE_LOCATION (c), |
15161 | "%qD appears more than once in data-sharing " |
15162 | "clauses" , t); |
15163 | remove = true; |
15164 | break; |
15165 | } |
15166 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION) |
15167 | target_in_reduction_seen = true; |
15168 | if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
15169 | { |
15170 | error_at (OMP_CLAUSE_LOCATION (c), |
15171 | openacc |
15172 | ? "%qD appears more than once in reduction clauses" |
15173 | : "%qD appears more than once in data clauses" , |
15174 | t); |
15175 | remove = true; |
15176 | } |
15177 | else |
15178 | bitmap_set_bit (&oacc_reduction_head, DECL_UID (t)); |
15179 | } |
15180 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
15181 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
15182 | || bitmap_bit_p (&lastprivate_head, DECL_UID (t)) |
15183 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
15184 | { |
15185 | error_at (OMP_CLAUSE_LOCATION (c), |
15186 | "%qE appears more than once in data clauses" , t); |
15187 | remove = true; |
15188 | } |
15189 | else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE |
15190 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR |
15191 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR) |
15192 | && bitmap_bit_p (&map_head, DECL_UID (t))) |
15193 | { |
15194 | if (openacc) |
15195 | error_at (OMP_CLAUSE_LOCATION (c), |
15196 | "%qD appears more than once in data clauses" , t); |
15197 | else |
15198 | error_at (OMP_CLAUSE_LOCATION (c), |
15199 | "%qD appears both in data and map clauses" , t); |
15200 | remove = true; |
15201 | } |
15202 | else |
15203 | bitmap_set_bit (&generic_head, DECL_UID (t)); |
15204 | break; |
15205 | |
15206 | case OMP_CLAUSE_FIRSTPRIVATE: |
15207 | if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) && !implicit_moved) |
15208 | { |
15209 | move_implicit: |
15210 | implicit_moved = true; |
15211 | /* Move firstprivate and map clauses with |
15212 | OMP_CLAUSE_{FIRSTPRIVATE,MAP}_IMPLICIT set to the end of |
15213 | clauses chain. */ |
15214 | tree cl1 = NULL_TREE, cl2 = NULL_TREE; |
15215 | tree *pc1 = pc, *pc2 = &cl1, *pc3 = &cl2; |
15216 | while (*pc1) |
15217 | if (OMP_CLAUSE_CODE (*pc1) == OMP_CLAUSE_FIRSTPRIVATE |
15218 | && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (*pc1)) |
15219 | { |
15220 | *pc3 = *pc1; |
15221 | pc3 = &OMP_CLAUSE_CHAIN (*pc3); |
15222 | *pc1 = OMP_CLAUSE_CHAIN (*pc1); |
15223 | } |
15224 | else if (OMP_CLAUSE_CODE (*pc1) == OMP_CLAUSE_MAP |
15225 | && OMP_CLAUSE_MAP_IMPLICIT (*pc1)) |
15226 | { |
15227 | *pc2 = *pc1; |
15228 | pc2 = &OMP_CLAUSE_CHAIN (*pc2); |
15229 | *pc1 = OMP_CLAUSE_CHAIN (*pc1); |
15230 | } |
15231 | else |
15232 | pc1 = &OMP_CLAUSE_CHAIN (*pc1); |
15233 | *pc3 = NULL; |
15234 | *pc2 = cl2; |
15235 | *pc1 = cl1; |
15236 | continue; |
15237 | } |
15238 | t = OMP_CLAUSE_DECL (c); |
15239 | need_complete = true; |
15240 | need_implicitly_determined = true; |
15241 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15242 | { |
15243 | error_at (OMP_CLAUSE_LOCATION (c), |
15244 | "%qE is not a variable in clause %<firstprivate%>" , t); |
15245 | remove = true; |
15246 | } |
15247 | else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) |
15248 | && !OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) |
15249 | && bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
15250 | remove = true; |
15251 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
15252 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
15253 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
15254 | { |
15255 | error_at (OMP_CLAUSE_LOCATION (c), |
15256 | "%qE appears more than once in data clauses" , t); |
15257 | remove = true; |
15258 | } |
15259 | else if (bitmap_bit_p (&map_head, DECL_UID (t)) |
15260 | || bitmap_bit_p (&map_field_head, DECL_UID (t))) |
15261 | { |
15262 | if (openacc) |
15263 | error_at (OMP_CLAUSE_LOCATION (c), |
15264 | "%qD appears more than once in data clauses" , t); |
15265 | else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) |
15266 | && !OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c)) |
15267 | /* Silently drop the clause. */; |
15268 | else |
15269 | error_at (OMP_CLAUSE_LOCATION (c), |
15270 | "%qD appears both in data and map clauses" , t); |
15271 | remove = true; |
15272 | } |
15273 | else |
15274 | bitmap_set_bit (&firstprivate_head, DECL_UID (t)); |
15275 | break; |
15276 | |
15277 | case OMP_CLAUSE_LASTPRIVATE: |
15278 | t = OMP_CLAUSE_DECL (c); |
15279 | need_complete = true; |
15280 | need_implicitly_determined = true; |
15281 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15282 | { |
15283 | error_at (OMP_CLAUSE_LOCATION (c), |
15284 | "%qE is not a variable in clause %<lastprivate%>" , t); |
15285 | remove = true; |
15286 | } |
15287 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
15288 | || bitmap_bit_p (&lastprivate_head, DECL_UID (t))) |
15289 | { |
15290 | error_at (OMP_CLAUSE_LOCATION (c), |
15291 | "%qE appears more than once in data clauses" , t); |
15292 | remove = true; |
15293 | } |
15294 | else |
15295 | bitmap_set_bit (&lastprivate_head, DECL_UID (t)); |
15296 | break; |
15297 | |
15298 | case OMP_CLAUSE_ALIGNED: |
15299 | t = OMP_CLAUSE_DECL (c); |
15300 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15301 | { |
15302 | error_at (OMP_CLAUSE_LOCATION (c), |
15303 | "%qE is not a variable in %<aligned%> clause" , t); |
15304 | remove = true; |
15305 | } |
15306 | else if (!POINTER_TYPE_P (TREE_TYPE (t)) |
15307 | && TREE_CODE (TREE_TYPE (t)) != ARRAY_TYPE) |
15308 | { |
15309 | error_at (OMP_CLAUSE_LOCATION (c), |
15310 | "%qE in %<aligned%> clause is neither a pointer nor " |
15311 | "an array" , t); |
15312 | remove = true; |
15313 | } |
15314 | else if (TYPE_ATOMIC (TREE_TYPE (t))) |
15315 | { |
15316 | error_at (OMP_CLAUSE_LOCATION (c), |
15317 | "%<_Atomic%> %qD in %<aligned%> clause" , t); |
15318 | remove = true; |
15319 | break; |
15320 | } |
15321 | else if (bitmap_bit_p (&aligned_head, DECL_UID (t))) |
15322 | { |
15323 | error_at (OMP_CLAUSE_LOCATION (c), |
15324 | "%qE appears more than once in %<aligned%> clauses" , |
15325 | t); |
15326 | remove = true; |
15327 | } |
15328 | else |
15329 | bitmap_set_bit (&aligned_head, DECL_UID (t)); |
15330 | break; |
15331 | |
15332 | case OMP_CLAUSE_NONTEMPORAL: |
15333 | t = OMP_CLAUSE_DECL (c); |
15334 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15335 | { |
15336 | error_at (OMP_CLAUSE_LOCATION (c), |
15337 | "%qE is not a variable in %<nontemporal%> clause" , t); |
15338 | remove = true; |
15339 | } |
15340 | else if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
15341 | { |
15342 | error_at (OMP_CLAUSE_LOCATION (c), |
15343 | "%qE appears more than once in %<nontemporal%> " |
15344 | "clauses" , t); |
15345 | remove = true; |
15346 | } |
15347 | else |
15348 | bitmap_set_bit (&oacc_reduction_head, DECL_UID (t)); |
15349 | break; |
15350 | |
15351 | case OMP_CLAUSE_ALLOCATE: |
15352 | t = OMP_CLAUSE_DECL (c); |
15353 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15354 | { |
15355 | error_at (OMP_CLAUSE_LOCATION (c), |
15356 | "%qE is not a variable in %<allocate%> clause" , t); |
15357 | remove = true; |
15358 | } |
15359 | else if (bitmap_bit_p (&aligned_head, DECL_UID (t))) |
15360 | { |
15361 | warning_at (OMP_CLAUSE_LOCATION (c), 0, |
15362 | "%qE appears more than once in %<allocate%> clauses" , |
15363 | t); |
15364 | remove = true; |
15365 | } |
15366 | else |
15367 | { |
15368 | bitmap_set_bit (&aligned_head, DECL_UID (t)); |
15369 | if (!OMP_CLAUSE_ALLOCATE_COMBINED (c)) |
15370 | allocate_seen = true; |
15371 | } |
15372 | break; |
15373 | |
15374 | case OMP_CLAUSE_DOACROSS: |
15375 | t = OMP_CLAUSE_DECL (c); |
15376 | if (t == NULL_TREE) |
15377 | break; |
15378 | if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK) |
15379 | { |
15380 | gcc_assert (TREE_CODE (t) == TREE_LIST); |
15381 | for (; t; t = TREE_CHAIN (t)) |
15382 | { |
15383 | tree decl = TREE_VALUE (t); |
15384 | if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) |
15385 | { |
15386 | tree offset = TREE_PURPOSE (t); |
15387 | bool neg = wi::neg_p (x: wi::to_wide (t: offset)); |
15388 | offset = fold_unary (ABS_EXPR, TREE_TYPE (offset), offset); |
15389 | tree t2 = pointer_int_sum (OMP_CLAUSE_LOCATION (c), |
15390 | neg ? MINUS_EXPR : PLUS_EXPR, |
15391 | decl, offset); |
15392 | t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR, |
15393 | sizetype, |
15394 | fold_convert (sizetype, t2), |
15395 | fold_convert (sizetype, decl)); |
15396 | if (t2 == error_mark_node) |
15397 | { |
15398 | remove = true; |
15399 | break; |
15400 | } |
15401 | TREE_PURPOSE (t) = t2; |
15402 | } |
15403 | } |
15404 | break; |
15405 | } |
15406 | gcc_unreachable (); |
15407 | case OMP_CLAUSE_DEPEND: |
15408 | case OMP_CLAUSE_AFFINITY: |
15409 | t = OMP_CLAUSE_DECL (c); |
15410 | if (TREE_CODE (t) == TREE_LIST |
15411 | && TREE_PURPOSE (t) |
15412 | && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC) |
15413 | { |
15414 | if (TREE_PURPOSE (t) != last_iterators) |
15415 | last_iterators_remove |
15416 | = c_omp_finish_iterators (TREE_PURPOSE (t)); |
15417 | last_iterators = TREE_PURPOSE (t); |
15418 | t = TREE_VALUE (t); |
15419 | if (last_iterators_remove) |
15420 | t = error_mark_node; |
15421 | } |
15422 | else |
15423 | last_iterators = NULL_TREE; |
15424 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
15425 | { |
15426 | if (handle_omp_array_sections (c, ort)) |
15427 | remove = true; |
15428 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
15429 | && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_DEPOBJ) |
15430 | { |
15431 | error_at (OMP_CLAUSE_LOCATION (c), |
15432 | "%<depend%> clause with %<depobj%> dependence " |
15433 | "type on array section" ); |
15434 | remove = true; |
15435 | } |
15436 | break; |
15437 | } |
15438 | if (t == error_mark_node) |
15439 | remove = true; |
15440 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
15441 | && t == ridpointers[RID_OMP_ALL_MEMORY]) |
15442 | { |
15443 | if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_OUT |
15444 | && OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_INOUT) |
15445 | { |
15446 | error_at (OMP_CLAUSE_LOCATION (c), |
15447 | "%<omp_all_memory%> used with %<depend%> kind " |
15448 | "other than %<out%> or %<inout%>" ); |
15449 | remove = true; |
15450 | } |
15451 | } |
15452 | else if (!lvalue_p (ref: t)) |
15453 | { |
15454 | error_at (OMP_CLAUSE_LOCATION (c), |
15455 | "%qE is not lvalue expression nor array section in " |
15456 | "%qs clause" , t, |
15457 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15458 | remove = true; |
15459 | } |
15460 | else if (TREE_CODE (t) == COMPONENT_REF |
15461 | && DECL_C_BIT_FIELD (TREE_OPERAND (t, 1))) |
15462 | { |
15463 | gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
15464 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY); |
15465 | error_at (OMP_CLAUSE_LOCATION (c), |
15466 | "bit-field %qE in %qs clause" , t, |
15467 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15468 | remove = true; |
15469 | } |
15470 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
15471 | && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_DEPOBJ) |
15472 | { |
15473 | if (!c_omp_depend_t_p (TREE_TYPE (t))) |
15474 | { |
15475 | error_at (OMP_CLAUSE_LOCATION (c), |
15476 | "%qE does not have %<omp_depend_t%> type in " |
15477 | "%<depend%> clause with %<depobj%> dependence " |
15478 | "type" , t); |
15479 | remove = true; |
15480 | } |
15481 | } |
15482 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
15483 | && c_omp_depend_t_p (TREE_TYPE (t))) |
15484 | { |
15485 | error_at (OMP_CLAUSE_LOCATION (c), |
15486 | "%qE should not have %<omp_depend_t%> type in " |
15487 | "%<depend%> clause with dependence type other than " |
15488 | "%<depobj%>" , t); |
15489 | remove = true; |
15490 | } |
15491 | if (!remove) |
15492 | { |
15493 | if (t == ridpointers[RID_OMP_ALL_MEMORY]) |
15494 | t = null_pointer_node; |
15495 | else |
15496 | { |
15497 | tree addr = build_unary_op (OMP_CLAUSE_LOCATION (c), |
15498 | code: ADDR_EXPR, xarg: t, noconvert: false); |
15499 | if (addr == error_mark_node) |
15500 | { |
15501 | remove = true; |
15502 | break; |
15503 | } |
15504 | t = build_indirect_ref (OMP_CLAUSE_LOCATION (c), ptr: addr, |
15505 | errstring: RO_UNARY_STAR); |
15506 | if (t == error_mark_node) |
15507 | { |
15508 | remove = true; |
15509 | break; |
15510 | } |
15511 | } |
15512 | if (TREE_CODE (OMP_CLAUSE_DECL (c)) == TREE_LIST |
15513 | && TREE_PURPOSE (OMP_CLAUSE_DECL (c)) |
15514 | && (TREE_CODE (TREE_PURPOSE (OMP_CLAUSE_DECL (c))) |
15515 | == TREE_VEC)) |
15516 | TREE_VALUE (OMP_CLAUSE_DECL (c)) = t; |
15517 | else |
15518 | OMP_CLAUSE_DECL (c) = t; |
15519 | } |
15520 | break; |
15521 | |
15522 | case OMP_CLAUSE_MAP: |
15523 | if (OMP_CLAUSE_MAP_IMPLICIT (c) && !implicit_moved) |
15524 | goto move_implicit; |
15525 | /* FALLTHRU */ |
15526 | case OMP_CLAUSE_TO: |
15527 | case OMP_CLAUSE_FROM: |
15528 | case OMP_CLAUSE__CACHE_: |
15529 | { |
15530 | using namespace omp_addr_tokenizer; |
15531 | auto_vec<omp_addr_token *, 10> addr_tokens; |
15532 | |
15533 | t = OMP_CLAUSE_DECL (c); |
15534 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
15535 | { |
15536 | grp_start_p = pc; |
15537 | grp_sentinel = OMP_CLAUSE_CHAIN (c); |
15538 | |
15539 | if (handle_omp_array_sections (c, ort)) |
15540 | remove = true; |
15541 | else |
15542 | { |
15543 | t = OMP_CLAUSE_DECL (c); |
15544 | if (!omp_mappable_type (TREE_TYPE (t))) |
15545 | { |
15546 | error_at (OMP_CLAUSE_LOCATION (c), |
15547 | "array section does not have mappable type " |
15548 | "in %qs clause" , |
15549 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15550 | remove = true; |
15551 | } |
15552 | else if (TYPE_ATOMIC (TREE_TYPE (t))) |
15553 | { |
15554 | error_at (OMP_CLAUSE_LOCATION (c), |
15555 | "%<_Atomic%> %qE in %qs clause" , t, |
15556 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15557 | remove = true; |
15558 | } |
15559 | while (TREE_CODE (t) == ARRAY_REF) |
15560 | t = TREE_OPERAND (t, 0); |
15561 | |
15562 | c_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
15563 | |
15564 | if (!omp_parse_expr (addr_tokens, t)) |
15565 | { |
15566 | sorry_at (OMP_CLAUSE_LOCATION (c), |
15567 | "unsupported map expression %qE" , |
15568 | OMP_CLAUSE_DECL (c)); |
15569 | remove = true; |
15570 | break; |
15571 | } |
15572 | |
15573 | /* This check is to determine if this will be the only map |
15574 | node created for this clause. Otherwise, we'll check |
15575 | the following FIRSTPRIVATE_POINTER or ATTACH_DETACH |
15576 | node on the next iteration(s) of the loop. */ |
15577 | if (addr_tokens.length () >= 4 |
15578 | && addr_tokens[0]->type == STRUCTURE_BASE |
15579 | && addr_tokens[0]->u.structure_base_kind == BASE_DECL |
15580 | && addr_tokens[1]->type == ACCESS_METHOD |
15581 | && addr_tokens[2]->type == COMPONENT_SELECTOR |
15582 | && addr_tokens[3]->type == ACCESS_METHOD |
15583 | && (addr_tokens[3]->u.access_kind == ACCESS_DIRECT |
15584 | || (addr_tokens[3]->u.access_kind |
15585 | == ACCESS_INDEXED_ARRAY))) |
15586 | { |
15587 | tree rt = addr_tokens[1]->expr; |
15588 | |
15589 | gcc_assert (DECL_P (rt)); |
15590 | |
15591 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15592 | && OMP_CLAUSE_MAP_IMPLICIT (c) |
15593 | && (bitmap_bit_p (&map_head, DECL_UID (rt)) |
15594 | || bitmap_bit_p (&map_field_head, DECL_UID (rt)) |
15595 | || bitmap_bit_p (&map_firstprivate_head, |
15596 | DECL_UID (rt)))) |
15597 | { |
15598 | remove = true; |
15599 | break; |
15600 | } |
15601 | if (bitmap_bit_p (&map_field_head, DECL_UID (rt))) |
15602 | break; |
15603 | if (bitmap_bit_p (&map_head, DECL_UID (rt))) |
15604 | { |
15605 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
15606 | error_at (OMP_CLAUSE_LOCATION (c), |
15607 | "%qD appears more than once in motion " |
15608 | "clauses" , rt); |
15609 | else if (openacc) |
15610 | error_at (OMP_CLAUSE_LOCATION (c), |
15611 | "%qD appears more than once in data " |
15612 | "clauses" , rt); |
15613 | else |
15614 | error_at (OMP_CLAUSE_LOCATION (c), |
15615 | "%qD appears more than once in map " |
15616 | "clauses" , rt); |
15617 | remove = true; |
15618 | } |
15619 | else |
15620 | { |
15621 | bitmap_set_bit (&map_head, DECL_UID (rt)); |
15622 | bitmap_set_bit (&map_field_head, DECL_UID (rt)); |
15623 | } |
15624 | } |
15625 | } |
15626 | if (c_oacc_check_attachments (c)) |
15627 | remove = true; |
15628 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15629 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
15630 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
15631 | && !OMP_CLAUSE_SIZE (c)) |
15632 | /* In this case, we have a single array element which is a |
15633 | pointer, and we already set OMP_CLAUSE_SIZE in |
15634 | handle_omp_array_sections above. For attach/detach |
15635 | clauses, reset the OMP_CLAUSE_SIZE (representing a bias) |
15636 | to zero here. */ |
15637 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
15638 | break; |
15639 | } |
15640 | else if (!omp_parse_expr (addr_tokens, t)) |
15641 | { |
15642 | sorry_at (OMP_CLAUSE_LOCATION (c), |
15643 | "unsupported map expression %qE" , |
15644 | OMP_CLAUSE_DECL (c)); |
15645 | remove = true; |
15646 | break; |
15647 | } |
15648 | if (t == error_mark_node) |
15649 | { |
15650 | remove = true; |
15651 | break; |
15652 | } |
15653 | /* OpenACC attach / detach clauses must be pointers. */ |
15654 | if (c_oacc_check_attachments (c)) |
15655 | { |
15656 | remove = true; |
15657 | break; |
15658 | } |
15659 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15660 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
15661 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
15662 | && !OMP_CLAUSE_SIZE (c)) |
15663 | /* For attach/detach clauses, set OMP_CLAUSE_SIZE (representing a |
15664 | bias) to zero here, so it is not set erroneously to the pointer |
15665 | size later on in gimplify.cc. */ |
15666 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
15667 | |
15668 | c_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
15669 | |
15670 | if (!ai.check_clause (c)) |
15671 | { |
15672 | remove = true; |
15673 | break; |
15674 | } |
15675 | |
15676 | if (!ai.map_supported_p ()) |
15677 | { |
15678 | sorry_at (OMP_CLAUSE_LOCATION (c), |
15679 | "unsupported map expression %qE" , |
15680 | OMP_CLAUSE_DECL (c)); |
15681 | remove = true; |
15682 | break; |
15683 | } |
15684 | |
15685 | gcc_assert ((addr_tokens[0]->type == ARRAY_BASE |
15686 | || addr_tokens[0]->type == STRUCTURE_BASE) |
15687 | && addr_tokens[1]->type == ACCESS_METHOD); |
15688 | |
15689 | t = addr_tokens[1]->expr; |
15690 | |
15691 | if (addr_tokens[0]->u.structure_base_kind != BASE_DECL) |
15692 | goto skip_decl_checks; |
15693 | |
15694 | /* For OpenMP, we can access a struct "t" and "t.d" on the same |
15695 | mapping. OpenACC allows multiple fields of the same structure |
15696 | to be written. */ |
15697 | if (addr_tokens[0]->type == STRUCTURE_BASE |
15698 | && (bitmap_bit_p (&map_field_head, DECL_UID (t)) |
15699 | || (!openacc && bitmap_bit_p (&map_head, DECL_UID (t))))) |
15700 | goto skip_decl_checks; |
15701 | |
15702 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15703 | { |
15704 | if (ort != C_ORT_ACC && EXPR_P (t)) |
15705 | break; |
15706 | |
15707 | error_at (OMP_CLAUSE_LOCATION (c), |
15708 | "%qE is not a variable in %qs clause" , t, |
15709 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15710 | remove = true; |
15711 | } |
15712 | else if (VAR_P (t) && DECL_THREAD_LOCAL_P (t)) |
15713 | { |
15714 | error_at (OMP_CLAUSE_LOCATION (c), |
15715 | "%qD is threadprivate variable in %qs clause" , t, |
15716 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15717 | remove = true; |
15718 | } |
15719 | else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
15720 | || (OMP_CLAUSE_MAP_KIND (c) |
15721 | != GOMP_MAP_FIRSTPRIVATE_POINTER)) |
15722 | && !c_mark_addressable (exp: t)) |
15723 | remove = true; |
15724 | else if (!(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15725 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER |
15726 | || (OMP_CLAUSE_MAP_KIND (c) |
15727 | == GOMP_MAP_FIRSTPRIVATE_POINTER) |
15728 | || (OMP_CLAUSE_MAP_KIND (c) |
15729 | == GOMP_MAP_FORCE_DEVICEPTR))) |
15730 | && t == OMP_CLAUSE_DECL (c) |
15731 | && !omp_mappable_type (TREE_TYPE (t))) |
15732 | { |
15733 | error_at (OMP_CLAUSE_LOCATION (c), |
15734 | "%qD does not have a mappable type in %qs clause" , t, |
15735 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15736 | remove = true; |
15737 | } |
15738 | else if (TREE_TYPE (t) == error_mark_node) |
15739 | remove = true; |
15740 | else if (TYPE_ATOMIC (strip_array_types (TREE_TYPE (t)))) |
15741 | { |
15742 | error_at (OMP_CLAUSE_LOCATION (c), |
15743 | "%<_Atomic%> %qE in %qs clause" , t, |
15744 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15745 | remove = true; |
15746 | } |
15747 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15748 | && OMP_CLAUSE_MAP_IMPLICIT (c) |
15749 | && (bitmap_bit_p (&map_head, DECL_UID (t)) |
15750 | || bitmap_bit_p (&map_field_head, DECL_UID (t)) |
15751 | || bitmap_bit_p (&map_firstprivate_head, |
15752 | DECL_UID (t)))) |
15753 | remove = true; |
15754 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15755 | && (OMP_CLAUSE_MAP_KIND (c) |
15756 | == GOMP_MAP_FIRSTPRIVATE_POINTER)) |
15757 | { |
15758 | if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
15759 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
15760 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
15761 | { |
15762 | error_at (OMP_CLAUSE_LOCATION (c), |
15763 | "%qD appears more than once in data clauses" , t); |
15764 | remove = true; |
15765 | } |
15766 | else if (bitmap_bit_p (&map_head, DECL_UID (t)) |
15767 | && !bitmap_bit_p (&map_field_head, DECL_UID (t)) |
15768 | && openacc) |
15769 | { |
15770 | error_at (OMP_CLAUSE_LOCATION (c), |
15771 | "%qD appears more than once in data clauses" , t); |
15772 | remove = true; |
15773 | } |
15774 | else |
15775 | bitmap_set_bit (&map_firstprivate_head, DECL_UID (t)); |
15776 | } |
15777 | else if (bitmap_bit_p (&map_head, DECL_UID (t)) |
15778 | && !bitmap_bit_p (&map_field_head, DECL_UID (t)) |
15779 | && ort != C_ORT_OMP |
15780 | && ort != C_ORT_OMP_EXIT_DATA) |
15781 | { |
15782 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
15783 | error_at (OMP_CLAUSE_LOCATION (c), |
15784 | "%qD appears more than once in motion clauses" , t); |
15785 | else if (openacc) |
15786 | error_at (OMP_CLAUSE_LOCATION (c), |
15787 | "%qD appears more than once in data clauses" , t); |
15788 | else |
15789 | error_at (OMP_CLAUSE_LOCATION (c), |
15790 | "%qD appears more than once in map clauses" , t); |
15791 | remove = true; |
15792 | } |
15793 | else if (openacc && bitmap_bit_p (&generic_head, DECL_UID (t))) |
15794 | { |
15795 | error_at (OMP_CLAUSE_LOCATION (c), |
15796 | "%qD appears more than once in data clauses" , t); |
15797 | remove = true; |
15798 | } |
15799 | else if (bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
15800 | || bitmap_bit_p (&is_on_device_head, DECL_UID (t))) |
15801 | { |
15802 | if (openacc) |
15803 | error_at (OMP_CLAUSE_LOCATION (c), |
15804 | "%qD appears more than once in data clauses" , t); |
15805 | else |
15806 | error_at (OMP_CLAUSE_LOCATION (c), |
15807 | "%qD appears both in data and map clauses" , t); |
15808 | remove = true; |
15809 | } |
15810 | else if (!omp_access_chain_p (addr_tokens, 1)) |
15811 | { |
15812 | bitmap_set_bit (&map_head, DECL_UID (t)); |
15813 | if (t != OMP_CLAUSE_DECL (c) |
15814 | && TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF) |
15815 | bitmap_set_bit (&map_field_head, DECL_UID (t)); |
15816 | } |
15817 | |
15818 | skip_decl_checks: |
15819 | /* If we call omp_expand_map_clause in handle_omp_array_sections, |
15820 | the containing loop (here) iterates through the new nodes |
15821 | created by that expansion. Avoid expanding those again (just |
15822 | by checking the node type). */ |
15823 | if (!remove |
15824 | && ort != C_ORT_DECLARE_SIMD |
15825 | && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
15826 | || (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER |
15827 | && (OMP_CLAUSE_MAP_KIND (c) |
15828 | != GOMP_MAP_FIRSTPRIVATE_REFERENCE) |
15829 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER |
15830 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH |
15831 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH |
15832 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH))) |
15833 | { |
15834 | grp_start_p = pc; |
15835 | grp_sentinel = OMP_CLAUSE_CHAIN (c); |
15836 | tree nc = ai.expand_map_clause (c, OMP_CLAUSE_DECL (c), |
15837 | addr_tokens, ort); |
15838 | if (nc != error_mark_node) |
15839 | c = nc; |
15840 | } |
15841 | } |
15842 | break; |
15843 | |
15844 | case OMP_CLAUSE_ENTER: |
15845 | case OMP_CLAUSE_LINK: |
15846 | t = OMP_CLAUSE_DECL (c); |
15847 | const char *cname; |
15848 | cname = omp_clause_code_name[OMP_CLAUSE_CODE (c)]; |
15849 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER |
15850 | && OMP_CLAUSE_ENTER_TO (c)) |
15851 | cname = "to" ; |
15852 | if (TREE_CODE (t) == FUNCTION_DECL |
15853 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER) |
15854 | ; |
15855 | else if (!VAR_P (t)) |
15856 | { |
15857 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER) |
15858 | error_at (OMP_CLAUSE_LOCATION (c), |
15859 | "%qE is neither a variable nor a function name in " |
15860 | "clause %qs" , t, cname); |
15861 | else |
15862 | error_at (OMP_CLAUSE_LOCATION (c), |
15863 | "%qE is not a variable in clause %qs" , t, cname); |
15864 | remove = true; |
15865 | } |
15866 | else if (DECL_THREAD_LOCAL_P (t)) |
15867 | { |
15868 | error_at (OMP_CLAUSE_LOCATION (c), |
15869 | "%qD is threadprivate variable in %qs clause" , t, |
15870 | cname); |
15871 | remove = true; |
15872 | } |
15873 | else if (!omp_mappable_type (TREE_TYPE (t))) |
15874 | { |
15875 | error_at (OMP_CLAUSE_LOCATION (c), |
15876 | "%qD does not have a mappable type in %qs clause" , t, |
15877 | cname); |
15878 | remove = true; |
15879 | } |
15880 | if (remove) |
15881 | break; |
15882 | if (bitmap_bit_p (&generic_head, DECL_UID (t))) |
15883 | { |
15884 | error_at (OMP_CLAUSE_LOCATION (c), |
15885 | "%qE appears more than once on the same " |
15886 | "%<declare target%> directive" , t); |
15887 | remove = true; |
15888 | } |
15889 | else |
15890 | bitmap_set_bit (&generic_head, DECL_UID (t)); |
15891 | break; |
15892 | |
15893 | case OMP_CLAUSE_UNIFORM: |
15894 | t = OMP_CLAUSE_DECL (c); |
15895 | if (TREE_CODE (t) != PARM_DECL) |
15896 | { |
15897 | if (DECL_P (t)) |
15898 | error_at (OMP_CLAUSE_LOCATION (c), |
15899 | "%qD is not an argument in %<uniform%> clause" , t); |
15900 | else |
15901 | error_at (OMP_CLAUSE_LOCATION (c), |
15902 | "%qE is not an argument in %<uniform%> clause" , t); |
15903 | remove = true; |
15904 | break; |
15905 | } |
15906 | /* map_head bitmap is used as uniform_head if declare_simd. */ |
15907 | bitmap_set_bit (&map_head, DECL_UID (t)); |
15908 | goto check_dup_generic; |
15909 | |
15910 | case OMP_CLAUSE_IS_DEVICE_PTR: |
15911 | case OMP_CLAUSE_USE_DEVICE_PTR: |
15912 | t = OMP_CLAUSE_DECL (c); |
15913 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR) |
15914 | bitmap_set_bit (&is_on_device_head, DECL_UID (t)); |
15915 | if (TREE_CODE (TREE_TYPE (t)) != POINTER_TYPE) |
15916 | { |
15917 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR |
15918 | && !openacc) |
15919 | { |
15920 | error_at (OMP_CLAUSE_LOCATION (c), |
15921 | "%qs variable is not a pointer" , |
15922 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15923 | remove = true; |
15924 | } |
15925 | else if (TREE_CODE (TREE_TYPE (t)) != ARRAY_TYPE) |
15926 | { |
15927 | error_at (OMP_CLAUSE_LOCATION (c), |
15928 | "%qs variable is neither a pointer nor an array" , |
15929 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15930 | remove = true; |
15931 | } |
15932 | } |
15933 | goto check_dup_generic; |
15934 | |
15935 | case OMP_CLAUSE_HAS_DEVICE_ADDR: |
15936 | t = OMP_CLAUSE_DECL (c); |
15937 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
15938 | { |
15939 | if (handle_omp_array_sections (c, ort)) |
15940 | remove = true; |
15941 | else |
15942 | { |
15943 | t = OMP_CLAUSE_DECL (c); |
15944 | while (TREE_CODE (t) == ARRAY_REF) |
15945 | t = TREE_OPERAND (t, 0); |
15946 | } |
15947 | } |
15948 | bitmap_set_bit (&is_on_device_head, DECL_UID (t)); |
15949 | if (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
15950 | c_mark_addressable (exp: t); |
15951 | goto check_dup_generic_t; |
15952 | |
15953 | case OMP_CLAUSE_USE_DEVICE_ADDR: |
15954 | t = OMP_CLAUSE_DECL (c); |
15955 | if (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
15956 | c_mark_addressable (exp: t); |
15957 | goto check_dup_generic; |
15958 | |
15959 | case OMP_CLAUSE_NOWAIT: |
15960 | if (copyprivate_seen) |
15961 | { |
15962 | error_at (OMP_CLAUSE_LOCATION (c), |
15963 | "%<nowait%> clause must not be used together " |
15964 | "with %<copyprivate%>" ); |
15965 | remove = true; |
15966 | break; |
15967 | } |
15968 | nowait_clause = pc; |
15969 | pc = &OMP_CLAUSE_CHAIN (c); |
15970 | continue; |
15971 | |
15972 | case OMP_CLAUSE_ORDER: |
15973 | if (ordered_clause) |
15974 | { |
15975 | error_at (OMP_CLAUSE_LOCATION (c), |
15976 | "%<order%> clause must not be used together " |
15977 | "with %<ordered%>" ); |
15978 | remove = true; |
15979 | break; |
15980 | } |
15981 | else if (order_clause) |
15982 | { |
15983 | /* Silently remove duplicates. */ |
15984 | remove = true; |
15985 | break; |
15986 | } |
15987 | order_clause = pc; |
15988 | pc = &OMP_CLAUSE_CHAIN (c); |
15989 | continue; |
15990 | |
15991 | case OMP_CLAUSE_DETACH: |
15992 | t = OMP_CLAUSE_DECL (c); |
15993 | if (detach_seen) |
15994 | { |
15995 | error_at (OMP_CLAUSE_LOCATION (c), |
15996 | "too many %qs clauses on a task construct" , |
15997 | "detach" ); |
15998 | remove = true; |
15999 | break; |
16000 | } |
16001 | detach_seen = pc; |
16002 | pc = &OMP_CLAUSE_CHAIN (c); |
16003 | c_mark_addressable (exp: t); |
16004 | continue; |
16005 | |
16006 | case OMP_CLAUSE_IF: |
16007 | case OMP_CLAUSE_SELF: |
16008 | case OMP_CLAUSE_NUM_THREADS: |
16009 | case OMP_CLAUSE_NUM_TEAMS: |
16010 | case OMP_CLAUSE_THREAD_LIMIT: |
16011 | case OMP_CLAUSE_DEFAULT: |
16012 | case OMP_CLAUSE_UNTIED: |
16013 | case OMP_CLAUSE_COLLAPSE: |
16014 | case OMP_CLAUSE_FINAL: |
16015 | case OMP_CLAUSE_DEVICE: |
16016 | case OMP_CLAUSE_DIST_SCHEDULE: |
16017 | case OMP_CLAUSE_PARALLEL: |
16018 | case OMP_CLAUSE_FOR: |
16019 | case OMP_CLAUSE_SECTIONS: |
16020 | case OMP_CLAUSE_TASKGROUP: |
16021 | case OMP_CLAUSE_PROC_BIND: |
16022 | case OMP_CLAUSE_DEVICE_TYPE: |
16023 | case OMP_CLAUSE_PRIORITY: |
16024 | case OMP_CLAUSE_GRAINSIZE: |
16025 | case OMP_CLAUSE_NUM_TASKS: |
16026 | case OMP_CLAUSE_THREADS: |
16027 | case OMP_CLAUSE_SIMD: |
16028 | case OMP_CLAUSE_HINT: |
16029 | case OMP_CLAUSE_FILTER: |
16030 | case OMP_CLAUSE_DEFAULTMAP: |
16031 | case OMP_CLAUSE_BIND: |
16032 | case OMP_CLAUSE_NUM_GANGS: |
16033 | case OMP_CLAUSE_NUM_WORKERS: |
16034 | case OMP_CLAUSE_VECTOR_LENGTH: |
16035 | case OMP_CLAUSE_ASYNC: |
16036 | case OMP_CLAUSE_WAIT: |
16037 | case OMP_CLAUSE_AUTO: |
16038 | case OMP_CLAUSE_INDEPENDENT: |
16039 | case OMP_CLAUSE_SEQ: |
16040 | case OMP_CLAUSE_GANG: |
16041 | case OMP_CLAUSE_WORKER: |
16042 | case OMP_CLAUSE_VECTOR: |
16043 | case OMP_CLAUSE_TILE: |
16044 | case OMP_CLAUSE_IF_PRESENT: |
16045 | case OMP_CLAUSE_FINALIZE: |
16046 | case OMP_CLAUSE_NOHOST: |
16047 | case OMP_CLAUSE_INDIRECT: |
16048 | pc = &OMP_CLAUSE_CHAIN (c); |
16049 | continue; |
16050 | |
16051 | case OMP_CLAUSE_MERGEABLE: |
16052 | mergeable_seen = true; |
16053 | pc = &OMP_CLAUSE_CHAIN (c); |
16054 | continue; |
16055 | |
16056 | case OMP_CLAUSE_NOGROUP: |
16057 | nogroup_seen = pc; |
16058 | pc = &OMP_CLAUSE_CHAIN (c); |
16059 | continue; |
16060 | |
16061 | case OMP_CLAUSE_SCHEDULE: |
16062 | schedule_clause = c; |
16063 | pc = &OMP_CLAUSE_CHAIN (c); |
16064 | continue; |
16065 | |
16066 | case OMP_CLAUSE_ORDERED: |
16067 | ordered_clause = c; |
16068 | if (order_clause) |
16069 | { |
16070 | error_at (OMP_CLAUSE_LOCATION (*order_clause), |
16071 | "%<order%> clause must not be used together " |
16072 | "with %<ordered%>" ); |
16073 | *order_clause = OMP_CLAUSE_CHAIN (*order_clause); |
16074 | order_clause = NULL; |
16075 | } |
16076 | pc = &OMP_CLAUSE_CHAIN (c); |
16077 | continue; |
16078 | |
16079 | case OMP_CLAUSE_SAFELEN: |
16080 | safelen = c; |
16081 | pc = &OMP_CLAUSE_CHAIN (c); |
16082 | continue; |
16083 | case OMP_CLAUSE_SIMDLEN: |
16084 | simdlen = c; |
16085 | pc = &OMP_CLAUSE_CHAIN (c); |
16086 | continue; |
16087 | |
16088 | case OMP_CLAUSE_INBRANCH: |
16089 | case OMP_CLAUSE_NOTINBRANCH: |
16090 | if (branch_seen) |
16091 | { |
16092 | error_at (OMP_CLAUSE_LOCATION (c), |
16093 | "%<inbranch%> clause is incompatible with " |
16094 | "%<notinbranch%>" ); |
16095 | remove = true; |
16096 | break; |
16097 | } |
16098 | branch_seen = true; |
16099 | pc = &OMP_CLAUSE_CHAIN (c); |
16100 | continue; |
16101 | |
16102 | case OMP_CLAUSE_INCLUSIVE: |
16103 | case OMP_CLAUSE_EXCLUSIVE: |
16104 | need_complete = true; |
16105 | need_implicitly_determined = true; |
16106 | t = OMP_CLAUSE_DECL (c); |
16107 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
16108 | { |
16109 | error_at (OMP_CLAUSE_LOCATION (c), |
16110 | "%qE is not a variable in clause %qs" , t, |
16111 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
16112 | remove = true; |
16113 | } |
16114 | break; |
16115 | |
16116 | default: |
16117 | gcc_unreachable (); |
16118 | } |
16119 | |
16120 | if (!remove) |
16121 | { |
16122 | t = OMP_CLAUSE_DECL (c); |
16123 | |
16124 | if (need_complete) |
16125 | { |
16126 | t = require_complete_type (OMP_CLAUSE_LOCATION (c), value: t); |
16127 | if (t == error_mark_node) |
16128 | remove = true; |
16129 | } |
16130 | |
16131 | if (need_implicitly_determined) |
16132 | { |
16133 | const char *share_name = NULL; |
16134 | |
16135 | if (VAR_P (t) && DECL_THREAD_LOCAL_P (t)) |
16136 | share_name = "threadprivate" ; |
16137 | else switch (c_omp_predetermined_sharing (t)) |
16138 | { |
16139 | case OMP_CLAUSE_DEFAULT_UNSPECIFIED: |
16140 | break; |
16141 | case OMP_CLAUSE_DEFAULT_SHARED: |
16142 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
16143 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) |
16144 | && c_omp_predefined_variable (t)) |
16145 | /* The __func__ variable and similar function-local |
16146 | predefined variables may be listed in a shared or |
16147 | firstprivate clause. */ |
16148 | break; |
16149 | share_name = "shared" ; |
16150 | break; |
16151 | case OMP_CLAUSE_DEFAULT_PRIVATE: |
16152 | share_name = "private" ; |
16153 | break; |
16154 | default: |
16155 | gcc_unreachable (); |
16156 | } |
16157 | if (share_name) |
16158 | { |
16159 | error_at (OMP_CLAUSE_LOCATION (c), |
16160 | "%qE is predetermined %qs for %qs" , |
16161 | t, share_name, |
16162 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
16163 | remove = true; |
16164 | } |
16165 | else if (TREE_READONLY (t) |
16166 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED |
16167 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE) |
16168 | { |
16169 | error_at (OMP_CLAUSE_LOCATION (c), |
16170 | "%<const%> qualified %qE may appear only in " |
16171 | "%<shared%> or %<firstprivate%> clauses" , t); |
16172 | remove = true; |
16173 | } |
16174 | } |
16175 | } |
16176 | |
16177 | if (remove) |
16178 | { |
16179 | if (grp_start_p) |
16180 | { |
16181 | /* If we found a clause to remove, we want to remove the whole |
16182 | expanded group, otherwise gimplify |
16183 | (omp_resolve_clause_dependencies) can get confused. */ |
16184 | *grp_start_p = grp_sentinel; |
16185 | pc = grp_start_p; |
16186 | grp_start_p = NULL; |
16187 | } |
16188 | else |
16189 | *pc = OMP_CLAUSE_CHAIN (c); |
16190 | } |
16191 | else |
16192 | pc = &OMP_CLAUSE_CHAIN (c); |
16193 | } |
16194 | |
16195 | if (simdlen |
16196 | && safelen |
16197 | && tree_int_cst_lt (OMP_CLAUSE_SAFELEN_EXPR (safelen), |
16198 | OMP_CLAUSE_SIMDLEN_EXPR (simdlen))) |
16199 | { |
16200 | error_at (OMP_CLAUSE_LOCATION (simdlen), |
16201 | "%<simdlen%> clause value is bigger than " |
16202 | "%<safelen%> clause value" ); |
16203 | OMP_CLAUSE_SIMDLEN_EXPR (simdlen) |
16204 | = OMP_CLAUSE_SAFELEN_EXPR (safelen); |
16205 | } |
16206 | |
16207 | if (ordered_clause |
16208 | && schedule_clause |
16209 | && (OMP_CLAUSE_SCHEDULE_KIND (schedule_clause) |
16210 | & OMP_CLAUSE_SCHEDULE_NONMONOTONIC)) |
16211 | { |
16212 | error_at (OMP_CLAUSE_LOCATION (schedule_clause), |
16213 | "%<nonmonotonic%> schedule modifier specified together " |
16214 | "with %<ordered%> clause" ); |
16215 | OMP_CLAUSE_SCHEDULE_KIND (schedule_clause) |
16216 | = (enum omp_clause_schedule_kind) |
16217 | (OMP_CLAUSE_SCHEDULE_KIND (schedule_clause) |
16218 | & ~OMP_CLAUSE_SCHEDULE_NONMONOTONIC); |
16219 | } |
16220 | |
16221 | if (reduction_seen < 0 && ordered_clause) |
16222 | { |
16223 | error_at (OMP_CLAUSE_LOCATION (ordered_clause), |
16224 | "%qs clause specified together with %<inscan%> " |
16225 | "%<reduction%> clause" , "ordered" ); |
16226 | reduction_seen = -2; |
16227 | } |
16228 | |
16229 | if (reduction_seen < 0 && schedule_clause) |
16230 | { |
16231 | error_at (OMP_CLAUSE_LOCATION (schedule_clause), |
16232 | "%qs clause specified together with %<inscan%> " |
16233 | "%<reduction%> clause" , "schedule" ); |
16234 | reduction_seen = -2; |
16235 | } |
16236 | |
16237 | if (linear_variable_step_check |
16238 | || reduction_seen == -2 |
16239 | || allocate_seen |
16240 | || target_in_reduction_seen) |
16241 | for (pc = &clauses, c = clauses; c ; c = *pc) |
16242 | { |
16243 | bool remove = false; |
16244 | if (allocate_seen) |
16245 | switch (OMP_CLAUSE_CODE (c)) |
16246 | { |
16247 | case OMP_CLAUSE_REDUCTION: |
16248 | case OMP_CLAUSE_IN_REDUCTION: |
16249 | case OMP_CLAUSE_TASK_REDUCTION: |
16250 | if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF) |
16251 | { |
16252 | t = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0); |
16253 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
16254 | t = TREE_OPERAND (t, 0); |
16255 | if (TREE_CODE (t) == ADDR_EXPR |
16256 | || INDIRECT_REF_P (t)) |
16257 | t = TREE_OPERAND (t, 0); |
16258 | if (DECL_P (t)) |
16259 | bitmap_clear_bit (&aligned_head, DECL_UID (t)); |
16260 | break; |
16261 | } |
16262 | /* FALLTHRU */ |
16263 | case OMP_CLAUSE_PRIVATE: |
16264 | case OMP_CLAUSE_FIRSTPRIVATE: |
16265 | case OMP_CLAUSE_LASTPRIVATE: |
16266 | case OMP_CLAUSE_LINEAR: |
16267 | if (DECL_P (OMP_CLAUSE_DECL (c))) |
16268 | bitmap_clear_bit (&aligned_head, |
16269 | DECL_UID (OMP_CLAUSE_DECL (c))); |
16270 | break; |
16271 | default: |
16272 | break; |
16273 | } |
16274 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR |
16275 | && OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) |
16276 | && !bitmap_bit_p (&map_head, |
16277 | DECL_UID (OMP_CLAUSE_LINEAR_STEP (c)))) |
16278 | { |
16279 | error_at (OMP_CLAUSE_LOCATION (c), |
16280 | "%<linear%> clause step is a parameter %qD not " |
16281 | "specified in %<uniform%> clause" , |
16282 | OMP_CLAUSE_LINEAR_STEP (c)); |
16283 | remove = true; |
16284 | } |
16285 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
16286 | && reduction_seen == -2) |
16287 | OMP_CLAUSE_REDUCTION_INSCAN (c) = 0; |
16288 | if (target_in_reduction_seen |
16289 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP) |
16290 | { |
16291 | tree t = OMP_CLAUSE_DECL (c); |
16292 | while (handled_component_p (t) |
16293 | || INDIRECT_REF_P (t) |
16294 | || TREE_CODE (t) == ADDR_EXPR |
16295 | || TREE_CODE (t) == MEM_REF |
16296 | || TREE_CODE (t) == NON_LVALUE_EXPR) |
16297 | t = TREE_OPERAND (t, 0); |
16298 | if (DECL_P (t) |
16299 | && bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
16300 | OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1; |
16301 | } |
16302 | |
16303 | if (remove) |
16304 | *pc = OMP_CLAUSE_CHAIN (c); |
16305 | else |
16306 | pc = &OMP_CLAUSE_CHAIN (c); |
16307 | } |
16308 | |
16309 | if (allocate_seen) |
16310 | for (pc = &clauses, c = clauses; c ; c = *pc) |
16311 | { |
16312 | bool remove = false; |
16313 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE |
16314 | && !OMP_CLAUSE_ALLOCATE_COMBINED (c) |
16315 | && bitmap_bit_p (&aligned_head, DECL_UID (OMP_CLAUSE_DECL (c)))) |
16316 | { |
16317 | error_at (OMP_CLAUSE_LOCATION (c), |
16318 | "%qD specified in %<allocate%> clause but not in " |
16319 | "an explicit privatization clause" , OMP_CLAUSE_DECL (c)); |
16320 | remove = true; |
16321 | } |
16322 | if (remove) |
16323 | *pc = OMP_CLAUSE_CHAIN (c); |
16324 | else |
16325 | pc = &OMP_CLAUSE_CHAIN (c); |
16326 | } |
16327 | |
16328 | if (nogroup_seen && reduction_seen) |
16329 | { |
16330 | error_at (OMP_CLAUSE_LOCATION (*nogroup_seen), |
16331 | "%<nogroup%> clause must not be used together with " |
16332 | "%<reduction%> clause" ); |
16333 | *nogroup_seen = OMP_CLAUSE_CHAIN (*nogroup_seen); |
16334 | } |
16335 | |
16336 | if (detach_seen) |
16337 | { |
16338 | if (mergeable_seen) |
16339 | { |
16340 | error_at (OMP_CLAUSE_LOCATION (*detach_seen), |
16341 | "%<detach%> clause must not be used together with " |
16342 | "%<mergeable%> clause" ); |
16343 | *detach_seen = OMP_CLAUSE_CHAIN (*detach_seen); |
16344 | } |
16345 | else |
16346 | { |
16347 | tree detach_decl = OMP_CLAUSE_DECL (*detach_seen); |
16348 | |
16349 | for (pc = &clauses, c = clauses; c ; c = *pc) |
16350 | { |
16351 | bool remove = false; |
16352 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
16353 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE |
16354 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
16355 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE) |
16356 | && OMP_CLAUSE_DECL (c) == detach_decl) |
16357 | { |
16358 | error_at (OMP_CLAUSE_LOCATION (c), |
16359 | "the event handle of a %<detach%> clause " |
16360 | "should not be in a data-sharing clause" ); |
16361 | remove = true; |
16362 | } |
16363 | if (remove) |
16364 | *pc = OMP_CLAUSE_CHAIN (c); |
16365 | else |
16366 | pc = &OMP_CLAUSE_CHAIN (c); |
16367 | } |
16368 | } |
16369 | } |
16370 | |
16371 | bitmap_obstack_release (NULL); |
16372 | return clauses; |
16373 | } |
16374 | |
16375 | /* Return code to initialize DST with a copy constructor from SRC. |
16376 | C doesn't have copy constructors nor assignment operators, only for |
16377 | _Atomic vars we need to perform __atomic_load from src into a temporary |
16378 | followed by __atomic_store of the temporary to dst. */ |
16379 | |
16380 | tree |
16381 | c_omp_clause_copy_ctor (tree clause, tree dst, tree src) |
16382 | { |
16383 | if (!really_atomic_lvalue (expr: dst) && !really_atomic_lvalue (expr: src)) |
16384 | return build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); |
16385 | |
16386 | location_t loc = OMP_CLAUSE_LOCATION (clause); |
16387 | tree type = TREE_TYPE (dst); |
16388 | tree nonatomic_type = build_qualified_type (type, TYPE_UNQUALIFIED); |
16389 | tree tmp = create_tmp_var (nonatomic_type); |
16390 | tree tmp_addr = build_fold_addr_expr (tmp); |
16391 | TREE_ADDRESSABLE (tmp) = 1; |
16392 | suppress_warning (tmp); |
16393 | tree src_addr = build_fold_addr_expr (src); |
16394 | tree dst_addr = build_fold_addr_expr (dst); |
16395 | tree seq_cst = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST); |
16396 | vec<tree, va_gc> *params; |
16397 | /* Expansion of a generic atomic load may require an addition |
16398 | element, so allocate enough to prevent a resize. */ |
16399 | vec_alloc (v&: params, nelems: 4); |
16400 | |
16401 | /* Build __atomic_load (&src, &tmp, SEQ_CST); */ |
16402 | tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_LOAD); |
16403 | params->quick_push (obj: src_addr); |
16404 | params->quick_push (obj: tmp_addr); |
16405 | params->quick_push (obj: seq_cst); |
16406 | tree load = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
16407 | |
16408 | vec_alloc (v&: params, nelems: 4); |
16409 | |
16410 | /* Build __atomic_store (&dst, &tmp, SEQ_CST); */ |
16411 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_STORE); |
16412 | params->quick_push (obj: dst_addr); |
16413 | params->quick_push (obj: tmp_addr); |
16414 | params->quick_push (obj: seq_cst); |
16415 | tree store = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
16416 | return build2 (COMPOUND_EXPR, void_type_node, load, store); |
16417 | } |
16418 | |
16419 | /* Create a transaction node. */ |
16420 | |
16421 | tree |
16422 | c_finish_transaction (location_t loc, tree block, int flags) |
16423 | { |
16424 | tree stmt = build_stmt (loc, TRANSACTION_EXPR, block); |
16425 | if (flags & TM_STMT_ATTR_OUTER) |
16426 | TRANSACTION_EXPR_OUTER (stmt) = 1; |
16427 | if (flags & TM_STMT_ATTR_RELAXED) |
16428 | TRANSACTION_EXPR_RELAXED (stmt) = 1; |
16429 | return add_stmt (stmt); |
16430 | } |
16431 | |
16432 | /* Make a variant type in the proper way for C/C++, propagating qualifiers |
16433 | down to the element type of an array. If ORIG_QUAL_TYPE is not |
16434 | NULL, then it should be used as the qualified type |
16435 | ORIG_QUAL_INDIRECT levels down in array type derivation (to |
16436 | preserve information about the typedef name from which an array |
16437 | type was derived). */ |
16438 | |
16439 | tree |
16440 | c_build_qualified_type (tree type, int type_quals, tree orig_qual_type, |
16441 | size_t orig_qual_indirect) |
16442 | { |
16443 | if (type == error_mark_node) |
16444 | return type; |
16445 | |
16446 | if (TREE_CODE (type) == ARRAY_TYPE) |
16447 | { |
16448 | tree t; |
16449 | tree element_type = c_build_qualified_type (TREE_TYPE (type), |
16450 | type_quals, orig_qual_type, |
16451 | orig_qual_indirect: orig_qual_indirect - 1); |
16452 | |
16453 | /* See if we already have an identically qualified type. */ |
16454 | if (orig_qual_type && orig_qual_indirect == 0) |
16455 | t = orig_qual_type; |
16456 | else |
16457 | for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) |
16458 | { |
16459 | if (TYPE_QUALS (strip_array_types (t)) == type_quals |
16460 | && TYPE_NAME (t) == TYPE_NAME (type) |
16461 | && TYPE_CONTEXT (t) == TYPE_CONTEXT (type) |
16462 | && attribute_list_equal (TYPE_ATTRIBUTES (t), |
16463 | TYPE_ATTRIBUTES (type))) |
16464 | break; |
16465 | } |
16466 | if (!t) |
16467 | { |
16468 | tree domain = TYPE_DOMAIN (type); |
16469 | |
16470 | t = build_variant_type_copy (type); |
16471 | TREE_TYPE (t) = element_type; |
16472 | TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (element_type); |
16473 | |
16474 | if (TYPE_STRUCTURAL_EQUALITY_P (element_type) |
16475 | || (domain && TYPE_STRUCTURAL_EQUALITY_P (domain))) |
16476 | SET_TYPE_STRUCTURAL_EQUALITY (t); |
16477 | else if (TYPE_CANONICAL (element_type) != element_type |
16478 | || (domain && TYPE_CANONICAL (domain) != domain)) |
16479 | { |
16480 | tree unqualified_canon |
16481 | = build_array_type (TYPE_CANONICAL (element_type), |
16482 | domain? TYPE_CANONICAL (domain) |
16483 | : NULL_TREE); |
16484 | if (TYPE_REVERSE_STORAGE_ORDER (type)) |
16485 | { |
16486 | unqualified_canon |
16487 | = build_distinct_type_copy (unqualified_canon); |
16488 | TYPE_REVERSE_STORAGE_ORDER (unqualified_canon) = 1; |
16489 | } |
16490 | TYPE_CANONICAL (t) |
16491 | = c_build_qualified_type (type: unqualified_canon, type_quals); |
16492 | } |
16493 | else |
16494 | TYPE_CANONICAL (t) = t; |
16495 | } |
16496 | return t; |
16497 | } |
16498 | |
16499 | /* A restrict-qualified pointer type must be a pointer to object or |
16500 | incomplete type. Note that the use of POINTER_TYPE_P also allows |
16501 | REFERENCE_TYPEs, which is appropriate for C++. */ |
16502 | if ((type_quals & TYPE_QUAL_RESTRICT) |
16503 | && (!POINTER_TYPE_P (type) |
16504 | || !C_TYPE_OBJECT_OR_INCOMPLETE_P (TREE_TYPE (type)))) |
16505 | { |
16506 | error ("invalid use of %<restrict%>" ); |
16507 | type_quals &= ~TYPE_QUAL_RESTRICT; |
16508 | } |
16509 | |
16510 | tree var_type = (orig_qual_type && orig_qual_indirect == 0 |
16511 | ? orig_qual_type |
16512 | : build_qualified_type (type, type_quals)); |
16513 | /* A variant type does not inherit the list of incomplete vars from the |
16514 | type main variant. */ |
16515 | if ((RECORD_OR_UNION_TYPE_P (var_type) |
16516 | || TREE_CODE (var_type) == ENUMERAL_TYPE) |
16517 | && TYPE_MAIN_VARIANT (var_type) != var_type) |
16518 | C_TYPE_INCOMPLETE_VARS (var_type) = 0; |
16519 | return var_type; |
16520 | } |
16521 | |
16522 | /* Build a VA_ARG_EXPR for the C parser. */ |
16523 | |
16524 | tree |
16525 | c_build_va_arg (location_t loc1, tree expr, location_t loc2, tree type) |
16526 | { |
16527 | if (error_operand_p (t: type)) |
16528 | return error_mark_node; |
16529 | /* VA_ARG_EXPR cannot be used for a scalar va_list with reverse storage |
16530 | order because it takes the address of the expression. */ |
16531 | else if (handled_component_p (t: expr) |
16532 | && reverse_storage_order_for_component_p (t: expr)) |
16533 | { |
16534 | error_at (loc1, "cannot use %<va_arg%> with reverse storage order" ); |
16535 | return error_mark_node; |
16536 | } |
16537 | else if (!COMPLETE_TYPE_P (type)) |
16538 | { |
16539 | error_at (loc2, "second argument to %<va_arg%> is of incomplete " |
16540 | "type %qT" , type); |
16541 | return error_mark_node; |
16542 | } |
16543 | else if (TREE_CODE (type) == FUNCTION_TYPE) |
16544 | { |
16545 | error_at (loc2, "second argument to %<va_arg%> is a function type %qT" , |
16546 | type); |
16547 | return error_mark_node; |
16548 | } |
16549 | else if (warn_cxx_compat && TREE_CODE (type) == ENUMERAL_TYPE) |
16550 | warning_at (loc2, OPT_Wc___compat, |
16551 | "C++ requires promoted type, not enum type, in %<va_arg%>" ); |
16552 | return build_va_arg (loc2, expr, type); |
16553 | } |
16554 | |
16555 | /* Return truthvalue of whether T1 is the same tree structure as T2. |
16556 | Return 1 if they are the same. Return false if they are different. */ |
16557 | |
16558 | bool |
16559 | c_tree_equal (tree t1, tree t2) |
16560 | { |
16561 | enum tree_code code1, code2; |
16562 | |
16563 | if (t1 == t2) |
16564 | return true; |
16565 | if (!t1 || !t2) |
16566 | return false; |
16567 | |
16568 | for (code1 = TREE_CODE (t1); code1 == NON_LVALUE_EXPR; |
16569 | code1 = TREE_CODE (t1)) |
16570 | t1 = TREE_OPERAND (t1, 0); |
16571 | for (code2 = TREE_CODE (t2); code2 == NON_LVALUE_EXPR; |
16572 | code2 = TREE_CODE (t2)) |
16573 | t2 = TREE_OPERAND (t2, 0); |
16574 | |
16575 | /* They might have become equal now. */ |
16576 | if (t1 == t2) |
16577 | return true; |
16578 | |
16579 | if (code1 != code2) |
16580 | return false; |
16581 | |
16582 | if (CONSTANT_CLASS_P (t1) && !comptypes (TREE_TYPE (t1), TREE_TYPE (t2))) |
16583 | return false; |
16584 | |
16585 | switch (code1) |
16586 | { |
16587 | case INTEGER_CST: |
16588 | return wi::to_wide (t: t1) == wi::to_wide (t: t2); |
16589 | |
16590 | case REAL_CST: |
16591 | return real_equal (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2)); |
16592 | |
16593 | case STRING_CST: |
16594 | return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) |
16595 | && !memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), |
16596 | TREE_STRING_LENGTH (t1)); |
16597 | |
16598 | case FIXED_CST: |
16599 | return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), |
16600 | TREE_FIXED_CST (t2)); |
16601 | |
16602 | case COMPLEX_CST: |
16603 | return c_tree_equal (TREE_REALPART (t1), TREE_REALPART (t2)) |
16604 | && c_tree_equal (TREE_IMAGPART (t1), TREE_IMAGPART (t2)); |
16605 | |
16606 | case VECTOR_CST: |
16607 | return operand_equal_p (t1, t2, flags: OEP_ONLY_CONST); |
16608 | |
16609 | case CONSTRUCTOR: |
16610 | /* We need to do this when determining whether or not two |
16611 | non-type pointer to member function template arguments |
16612 | are the same. */ |
16613 | if (!comptypes (TREE_TYPE (t1), TREE_TYPE (t2)) |
16614 | || CONSTRUCTOR_NELTS (t1) != CONSTRUCTOR_NELTS (t2)) |
16615 | return false; |
16616 | { |
16617 | tree field, value; |
16618 | unsigned int i; |
16619 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, field, value) |
16620 | { |
16621 | constructor_elt *elt2 = CONSTRUCTOR_ELT (t2, i); |
16622 | if (!c_tree_equal (t1: field, t2: elt2->index) |
16623 | || !c_tree_equal (t1: value, t2: elt2->value)) |
16624 | return false; |
16625 | } |
16626 | } |
16627 | return true; |
16628 | |
16629 | case TREE_LIST: |
16630 | if (!c_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))) |
16631 | return false; |
16632 | if (!c_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2))) |
16633 | return false; |
16634 | return c_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2)); |
16635 | |
16636 | case SAVE_EXPR: |
16637 | return c_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); |
16638 | |
16639 | case CALL_EXPR: |
16640 | { |
16641 | tree arg1, arg2; |
16642 | call_expr_arg_iterator iter1, iter2; |
16643 | if (!c_tree_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2))) |
16644 | return false; |
16645 | for (arg1 = first_call_expr_arg (exp: t1, iter: &iter1), |
16646 | arg2 = first_call_expr_arg (exp: t2, iter: &iter2); |
16647 | arg1 && arg2; |
16648 | arg1 = next_call_expr_arg (iter: &iter1), |
16649 | arg2 = next_call_expr_arg (iter: &iter2)) |
16650 | if (!c_tree_equal (t1: arg1, t2: arg2)) |
16651 | return false; |
16652 | if (arg1 || arg2) |
16653 | return false; |
16654 | return true; |
16655 | } |
16656 | |
16657 | case TARGET_EXPR: |
16658 | { |
16659 | tree o1 = TREE_OPERAND (t1, 0); |
16660 | tree o2 = TREE_OPERAND (t2, 0); |
16661 | |
16662 | /* Special case: if either target is an unallocated VAR_DECL, |
16663 | it means that it's going to be unified with whatever the |
16664 | TARGET_EXPR is really supposed to initialize, so treat it |
16665 | as being equivalent to anything. */ |
16666 | if (VAR_P (o1) && DECL_NAME (o1) == NULL_TREE |
16667 | && !DECL_RTL_SET_P (o1)) |
16668 | /*Nop*/; |
16669 | else if (VAR_P (o2) && DECL_NAME (o2) == NULL_TREE |
16670 | && !DECL_RTL_SET_P (o2)) |
16671 | /*Nop*/; |
16672 | else if (!c_tree_equal (t1: o1, t2: o2)) |
16673 | return false; |
16674 | |
16675 | return c_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)); |
16676 | } |
16677 | |
16678 | case COMPONENT_REF: |
16679 | if (TREE_OPERAND (t1, 1) != TREE_OPERAND (t2, 1)) |
16680 | return false; |
16681 | return c_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); |
16682 | |
16683 | case PARM_DECL: |
16684 | case VAR_DECL: |
16685 | case CONST_DECL: |
16686 | case FIELD_DECL: |
16687 | case FUNCTION_DECL: |
16688 | case IDENTIFIER_NODE: |
16689 | case SSA_NAME: |
16690 | return false; |
16691 | |
16692 | case TREE_VEC: |
16693 | { |
16694 | unsigned ix; |
16695 | if (TREE_VEC_LENGTH (t1) != TREE_VEC_LENGTH (t2)) |
16696 | return false; |
16697 | for (ix = TREE_VEC_LENGTH (t1); ix--;) |
16698 | if (!c_tree_equal (TREE_VEC_ELT (t1, ix), |
16699 | TREE_VEC_ELT (t2, ix))) |
16700 | return false; |
16701 | return true; |
16702 | } |
16703 | |
16704 | CASE_CONVERT: |
16705 | if (!comptypes (TREE_TYPE (t1), TREE_TYPE (t2))) |
16706 | return false; |
16707 | break; |
16708 | |
16709 | default: |
16710 | break; |
16711 | } |
16712 | |
16713 | switch (TREE_CODE_CLASS (code1)) |
16714 | { |
16715 | case tcc_unary: |
16716 | case tcc_binary: |
16717 | case tcc_comparison: |
16718 | case tcc_expression: |
16719 | case tcc_vl_exp: |
16720 | case tcc_reference: |
16721 | case tcc_statement: |
16722 | { |
16723 | int i, n = TREE_OPERAND_LENGTH (t1); |
16724 | |
16725 | switch (code1) |
16726 | { |
16727 | case PREINCREMENT_EXPR: |
16728 | case PREDECREMENT_EXPR: |
16729 | case POSTINCREMENT_EXPR: |
16730 | case POSTDECREMENT_EXPR: |
16731 | n = 1; |
16732 | break; |
16733 | case ARRAY_REF: |
16734 | n = 2; |
16735 | break; |
16736 | default: |
16737 | break; |
16738 | } |
16739 | |
16740 | if (TREE_CODE_CLASS (code1) == tcc_vl_exp |
16741 | && n != TREE_OPERAND_LENGTH (t2)) |
16742 | return false; |
16743 | |
16744 | for (i = 0; i < n; ++i) |
16745 | if (!c_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i))) |
16746 | return false; |
16747 | |
16748 | return true; |
16749 | } |
16750 | |
16751 | case tcc_type: |
16752 | return comptypes (type1: t1, type2: t2); |
16753 | default: |
16754 | gcc_unreachable (); |
16755 | } |
16756 | } |
16757 | |
16758 | /* Returns true when the function declaration FNDECL is implicit, |
16759 | introduced as a result of a call to an otherwise undeclared |
16760 | function, and false otherwise. */ |
16761 | |
16762 | bool |
16763 | c_decl_implicit (const_tree fndecl) |
16764 | { |
16765 | return C_DECL_IMPLICIT (fndecl); |
16766 | } |
16767 | |