1 | /* Language-dependent node constructors for parse phase of GNU compiler. |
2 | Copyright (C) 1987-2017 Free Software Foundation, Inc. |
3 | Hacked by Michael Tiemann (tiemann@cygnus.com) |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify |
8 | it under the terms of the GNU General Public License as published by |
9 | the Free Software Foundation; either version 3, or (at your option) |
10 | any later version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
15 | GNU General Public License for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "tree.h" |
25 | #include "cp-tree.h" |
26 | #include "gimple-expr.h" |
27 | #include "cgraph.h" |
28 | #include "stor-layout.h" |
29 | #include "print-tree.h" |
30 | #include "tree-iterator.h" |
31 | #include "tree-inline.h" |
32 | #include "debug.h" |
33 | #include "convert.h" |
34 | #include "gimplify.h" |
35 | #include "stringpool.h" |
36 | #include "attribs.h" |
37 | #include "flags.h" |
38 | |
39 | static tree bot_manip (tree *, int *, void *); |
40 | static tree bot_replace (tree *, int *, void *); |
41 | static hashval_t list_hash_pieces (tree, tree, tree); |
42 | static tree build_target_expr (tree, tree, tsubst_flags_t); |
43 | static tree count_trees_r (tree *, int *, void *); |
44 | static tree verify_stmt_tree_r (tree *, int *, void *); |
45 | static tree build_local_temp (tree); |
46 | |
47 | static tree handle_init_priority_attribute (tree *, tree, tree, int, bool *); |
48 | static tree handle_abi_tag_attribute (tree *, tree, tree, int, bool *); |
49 | |
50 | /* If REF is an lvalue, returns the kind of lvalue that REF is. |
51 | Otherwise, returns clk_none. */ |
52 | |
53 | cp_lvalue_kind |
54 | lvalue_kind (const_tree ref) |
55 | { |
56 | cp_lvalue_kind op1_lvalue_kind = clk_none; |
57 | cp_lvalue_kind op2_lvalue_kind = clk_none; |
58 | |
59 | /* Expressions of reference type are sometimes wrapped in |
60 | INDIRECT_REFs. INDIRECT_REFs are just internal compiler |
61 | representation, not part of the language, so we have to look |
62 | through them. */ |
63 | if (REFERENCE_REF_P (ref)) |
64 | return lvalue_kind (TREE_OPERAND (ref, 0)); |
65 | |
66 | if (TREE_TYPE (ref) |
67 | && TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE) |
68 | { |
69 | /* unnamed rvalue references are rvalues */ |
70 | if (TYPE_REF_IS_RVALUE (TREE_TYPE (ref)) |
71 | && TREE_CODE (ref) != PARM_DECL |
72 | && !VAR_P (ref) |
73 | && TREE_CODE (ref) != COMPONENT_REF |
74 | /* Functions are always lvalues. */ |
75 | && TREE_CODE (TREE_TYPE (TREE_TYPE (ref))) != FUNCTION_TYPE) |
76 | return clk_rvalueref; |
77 | |
78 | /* lvalue references and named rvalue references are lvalues. */ |
79 | return clk_ordinary; |
80 | } |
81 | |
82 | if (ref == current_class_ptr) |
83 | return clk_none; |
84 | |
85 | switch (TREE_CODE (ref)) |
86 | { |
87 | case SAVE_EXPR: |
88 | return clk_none; |
89 | /* preincrements and predecrements are valid lvals, provided |
90 | what they refer to are valid lvals. */ |
91 | case PREINCREMENT_EXPR: |
92 | case PREDECREMENT_EXPR: |
93 | case TRY_CATCH_EXPR: |
94 | case REALPART_EXPR: |
95 | case IMAGPART_EXPR: |
96 | return lvalue_kind (TREE_OPERAND (ref, 0)); |
97 | |
98 | case MEMBER_REF: |
99 | case DOTSTAR_EXPR: |
100 | if (TREE_CODE (ref) == MEMBER_REF) |
101 | op1_lvalue_kind = clk_ordinary; |
102 | else |
103 | op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0)); |
104 | if (TYPE_PTRMEMFUNC_P (TREE_TYPE (TREE_OPERAND (ref, 1)))) |
105 | op1_lvalue_kind = clk_none; |
106 | return op1_lvalue_kind; |
107 | |
108 | case COMPONENT_REF: |
109 | if (BASELINK_P (TREE_OPERAND (ref, 1))) |
110 | { |
111 | tree fn = BASELINK_FUNCTIONS (TREE_OPERAND (ref, 1)); |
112 | |
113 | /* For static member function recurse on the BASELINK, we can get |
114 | here e.g. from reference_binding. If BASELINK_FUNCTIONS is |
115 | OVERLOAD, the overload is resolved first if possible through |
116 | resolve_address_of_overloaded_function. */ |
117 | if (TREE_CODE (fn) == FUNCTION_DECL && DECL_STATIC_FUNCTION_P (fn)) |
118 | return lvalue_kind (TREE_OPERAND (ref, 1)); |
119 | } |
120 | op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0)); |
121 | /* Look at the member designator. */ |
122 | if (!op1_lvalue_kind) |
123 | ; |
124 | else if (is_overloaded_fn (TREE_OPERAND (ref, 1))) |
125 | /* The "field" can be a FUNCTION_DECL or an OVERLOAD in some |
126 | situations. If we're seeing a COMPONENT_REF, it's a non-static |
127 | member, so it isn't an lvalue. */ |
128 | op1_lvalue_kind = clk_none; |
129 | else if (TREE_CODE (TREE_OPERAND (ref, 1)) != FIELD_DECL) |
130 | /* This can be IDENTIFIER_NODE in a template. */; |
131 | else if (DECL_C_BIT_FIELD (TREE_OPERAND (ref, 1))) |
132 | { |
133 | /* Clear the ordinary bit. If this object was a class |
134 | rvalue we want to preserve that information. */ |
135 | op1_lvalue_kind &= ~clk_ordinary; |
136 | /* The lvalue is for a bitfield. */ |
137 | op1_lvalue_kind |= clk_bitfield; |
138 | } |
139 | else if (DECL_PACKED (TREE_OPERAND (ref, 1))) |
140 | op1_lvalue_kind |= clk_packed; |
141 | |
142 | return op1_lvalue_kind; |
143 | |
144 | case STRING_CST: |
145 | case COMPOUND_LITERAL_EXPR: |
146 | return clk_ordinary; |
147 | |
148 | case CONST_DECL: |
149 | /* CONST_DECL without TREE_STATIC are enumeration values and |
150 | thus not lvalues. With TREE_STATIC they are used by ObjC++ |
151 | in objc_build_string_object and need to be considered as |
152 | lvalues. */ |
153 | if (! TREE_STATIC (ref)) |
154 | return clk_none; |
155 | /* FALLTHRU */ |
156 | case VAR_DECL: |
157 | if (VAR_P (ref) && DECL_HAS_VALUE_EXPR_P (ref)) |
158 | return lvalue_kind (DECL_VALUE_EXPR (CONST_CAST_TREE (ref))); |
159 | |
160 | if (TREE_READONLY (ref) && ! TREE_STATIC (ref) |
161 | && DECL_LANG_SPECIFIC (ref) |
162 | && DECL_IN_AGGR_P (ref)) |
163 | return clk_none; |
164 | /* FALLTHRU */ |
165 | case INDIRECT_REF: |
166 | case ARROW_EXPR: |
167 | case ARRAY_REF: |
168 | case PARM_DECL: |
169 | case RESULT_DECL: |
170 | case PLACEHOLDER_EXPR: |
171 | return clk_ordinary; |
172 | |
173 | /* A scope ref in a template, left as SCOPE_REF to support later |
174 | access checking. */ |
175 | case SCOPE_REF: |
176 | gcc_assert (!type_dependent_expression_p (CONST_CAST_TREE (ref))); |
177 | { |
178 | tree op = TREE_OPERAND (ref, 1); |
179 | if (TREE_CODE (op) == FIELD_DECL) |
180 | return (DECL_C_BIT_FIELD (op) ? clk_bitfield : clk_ordinary); |
181 | else |
182 | return lvalue_kind (op); |
183 | } |
184 | |
185 | case MAX_EXPR: |
186 | case MIN_EXPR: |
187 | /* Disallow <? and >? as lvalues if either argument side-effects. */ |
188 | if (TREE_SIDE_EFFECTS (TREE_OPERAND (ref, 0)) |
189 | || TREE_SIDE_EFFECTS (TREE_OPERAND (ref, 1))) |
190 | return clk_none; |
191 | op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0)); |
192 | op2_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 1)); |
193 | break; |
194 | |
195 | case COND_EXPR: |
196 | op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 1) |
197 | ? TREE_OPERAND (ref, 1) |
198 | : TREE_OPERAND (ref, 0)); |
199 | op2_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 2)); |
200 | break; |
201 | |
202 | case MODOP_EXPR: |
203 | /* We expect to see unlowered MODOP_EXPRs only during |
204 | template processing. */ |
205 | gcc_assert (processing_template_decl); |
206 | return clk_ordinary; |
207 | |
208 | case MODIFY_EXPR: |
209 | case TYPEID_EXPR: |
210 | return clk_ordinary; |
211 | |
212 | case COMPOUND_EXPR: |
213 | return lvalue_kind (TREE_OPERAND (ref, 1)); |
214 | |
215 | case TARGET_EXPR: |
216 | return clk_class; |
217 | |
218 | case VA_ARG_EXPR: |
219 | return (CLASS_TYPE_P (TREE_TYPE (ref)) ? clk_class : clk_none); |
220 | |
221 | case CALL_EXPR: |
222 | /* We can see calls outside of TARGET_EXPR in templates. */ |
223 | if (CLASS_TYPE_P (TREE_TYPE (ref))) |
224 | return clk_class; |
225 | return clk_none; |
226 | |
227 | case FUNCTION_DECL: |
228 | /* All functions (except non-static-member functions) are |
229 | lvalues. */ |
230 | return (DECL_NONSTATIC_MEMBER_FUNCTION_P (ref) |
231 | ? clk_none : clk_ordinary); |
232 | |
233 | case BASELINK: |
234 | /* We now represent a reference to a single static member function |
235 | with a BASELINK. */ |
236 | /* This CONST_CAST is okay because BASELINK_FUNCTIONS returns |
237 | its argument unmodified and we assign it to a const_tree. */ |
238 | return lvalue_kind (BASELINK_FUNCTIONS (CONST_CAST_TREE (ref))); |
239 | |
240 | case NON_DEPENDENT_EXPR: |
241 | return lvalue_kind (TREE_OPERAND (ref, 0)); |
242 | |
243 | default: |
244 | if (!TREE_TYPE (ref)) |
245 | return clk_none; |
246 | if (CLASS_TYPE_P (TREE_TYPE (ref)) |
247 | || TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE) |
248 | return clk_class; |
249 | break; |
250 | } |
251 | |
252 | /* If one operand is not an lvalue at all, then this expression is |
253 | not an lvalue. */ |
254 | if (!op1_lvalue_kind || !op2_lvalue_kind) |
255 | return clk_none; |
256 | |
257 | /* Otherwise, it's an lvalue, and it has all the odd properties |
258 | contributed by either operand. */ |
259 | op1_lvalue_kind = op1_lvalue_kind | op2_lvalue_kind; |
260 | /* It's not an ordinary lvalue if it involves any other kind. */ |
261 | if ((op1_lvalue_kind & ~clk_ordinary) != clk_none) |
262 | op1_lvalue_kind &= ~clk_ordinary; |
263 | /* It can't be both a pseudo-lvalue and a non-addressable lvalue. |
264 | A COND_EXPR of those should be wrapped in a TARGET_EXPR. */ |
265 | if ((op1_lvalue_kind & (clk_rvalueref|clk_class)) |
266 | && (op1_lvalue_kind & (clk_bitfield|clk_packed))) |
267 | op1_lvalue_kind = clk_none; |
268 | return op1_lvalue_kind; |
269 | } |
270 | |
271 | /* Returns the kind of lvalue that REF is, in the sense of [basic.lval]. */ |
272 | |
273 | cp_lvalue_kind |
274 | real_lvalue_p (const_tree ref) |
275 | { |
276 | cp_lvalue_kind kind = lvalue_kind (ref); |
277 | if (kind & (clk_rvalueref|clk_class)) |
278 | return clk_none; |
279 | else |
280 | return kind; |
281 | } |
282 | |
283 | /* c-common wants us to return bool. */ |
284 | |
285 | bool |
286 | lvalue_p (const_tree t) |
287 | { |
288 | return real_lvalue_p (t); |
289 | } |
290 | |
291 | /* This differs from lvalue_p in that xvalues are included. */ |
292 | |
293 | bool |
294 | glvalue_p (const_tree ref) |
295 | { |
296 | cp_lvalue_kind kind = lvalue_kind (ref); |
297 | if (kind & clk_class) |
298 | return false; |
299 | else |
300 | return (kind != clk_none); |
301 | } |
302 | |
303 | /* This differs from glvalue_p in that class prvalues are included. */ |
304 | |
305 | bool |
306 | obvalue_p (const_tree ref) |
307 | { |
308 | return (lvalue_kind (ref) != clk_none); |
309 | } |
310 | |
311 | /* Returns true if REF is an xvalue (the result of dereferencing an rvalue |
312 | reference), false otherwise. */ |
313 | |
314 | bool |
315 | xvalue_p (const_tree ref) |
316 | { |
317 | return (lvalue_kind (ref) == clk_rvalueref); |
318 | } |
319 | |
320 | /* True if REF is a bit-field. */ |
321 | |
322 | bool |
323 | bitfield_p (const_tree ref) |
324 | { |
325 | return (lvalue_kind (ref) & clk_bitfield); |
326 | } |
327 | |
328 | /* C++-specific version of stabilize_reference. */ |
329 | |
330 | tree |
331 | cp_stabilize_reference (tree ref) |
332 | { |
333 | switch (TREE_CODE (ref)) |
334 | { |
335 | case NON_DEPENDENT_EXPR: |
336 | /* We aren't actually evaluating this. */ |
337 | return ref; |
338 | |
339 | /* We need to treat specially anything stabilize_reference doesn't |
340 | handle specifically. */ |
341 | case VAR_DECL: |
342 | case PARM_DECL: |
343 | case RESULT_DECL: |
344 | CASE_CONVERT: |
345 | case FLOAT_EXPR: |
346 | case FIX_TRUNC_EXPR: |
347 | case INDIRECT_REF: |
348 | case COMPONENT_REF: |
349 | case BIT_FIELD_REF: |
350 | case ARRAY_REF: |
351 | case ARRAY_RANGE_REF: |
352 | case ERROR_MARK: |
353 | break; |
354 | default: |
355 | cp_lvalue_kind kind = lvalue_kind (ref); |
356 | if ((kind & ~clk_class) != clk_none) |
357 | { |
358 | tree type = unlowered_expr_type (ref); |
359 | bool rval = !!(kind & clk_rvalueref); |
360 | type = cp_build_reference_type (type, rval); |
361 | /* This inhibits warnings in, eg, cxx_mark_addressable |
362 | (c++/60955). */ |
363 | warning_sentinel s (extra_warnings); |
364 | ref = build_static_cast (type, ref, tf_error); |
365 | } |
366 | } |
367 | |
368 | return stabilize_reference (ref); |
369 | } |
370 | |
371 | /* Test whether DECL is a builtin that may appear in a |
372 | constant-expression. */ |
373 | |
374 | bool |
375 | builtin_valid_in_constant_expr_p (const_tree decl) |
376 | { |
377 | if (!(TREE_CODE (decl) == FUNCTION_DECL |
378 | && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)) |
379 | /* Not a built-in. */ |
380 | return false; |
381 | switch (DECL_FUNCTION_CODE (decl)) |
382 | { |
383 | /* These always have constant results like the corresponding |
384 | macros/symbol. */ |
385 | case BUILT_IN_FILE: |
386 | case BUILT_IN_FUNCTION: |
387 | case BUILT_IN_LINE: |
388 | |
389 | /* The following built-ins are valid in constant expressions |
390 | when their arguments are. */ |
391 | case BUILT_IN_ADD_OVERFLOW_P: |
392 | case BUILT_IN_SUB_OVERFLOW_P: |
393 | case BUILT_IN_MUL_OVERFLOW_P: |
394 | |
395 | /* These have constant results even if their operands are |
396 | non-constant. */ |
397 | case BUILT_IN_CONSTANT_P: |
398 | case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE: |
399 | return true; |
400 | default: |
401 | return false; |
402 | } |
403 | } |
404 | |
405 | /* Build a TARGET_EXPR, initializing the DECL with the VALUE. */ |
406 | |
407 | static tree |
408 | build_target_expr (tree decl, tree value, tsubst_flags_t complain) |
409 | { |
410 | tree t; |
411 | tree type = TREE_TYPE (decl); |
412 | |
413 | value = mark_rvalue_use (value); |
414 | |
415 | gcc_checking_assert (VOID_TYPE_P (TREE_TYPE (value)) |
416 | || TREE_TYPE (decl) == TREE_TYPE (value) |
417 | /* On ARM ctors return 'this'. */ |
418 | || (TYPE_PTR_P (TREE_TYPE (value)) |
419 | && TREE_CODE (value) == CALL_EXPR) |
420 | || useless_type_conversion_p (TREE_TYPE (decl), |
421 | TREE_TYPE (value))); |
422 | |
423 | if (complain & tf_no_cleanup) |
424 | /* The caller is building a new-expr and does not need a cleanup. */ |
425 | t = NULL_TREE; |
426 | else |
427 | { |
428 | t = cxx_maybe_build_cleanup (decl, complain); |
429 | if (t == error_mark_node) |
430 | return error_mark_node; |
431 | } |
432 | t = build4 (TARGET_EXPR, type, decl, value, t, NULL_TREE); |
433 | if (EXPR_HAS_LOCATION (value)) |
434 | SET_EXPR_LOCATION (t, EXPR_LOCATION (value)); |
435 | /* We always set TREE_SIDE_EFFECTS so that expand_expr does not |
436 | ignore the TARGET_EXPR. If there really turn out to be no |
437 | side-effects, then the optimizer should be able to get rid of |
438 | whatever code is generated anyhow. */ |
439 | TREE_SIDE_EFFECTS (t) = 1; |
440 | |
441 | return t; |
442 | } |
443 | |
444 | /* Return an undeclared local temporary of type TYPE for use in building a |
445 | TARGET_EXPR. */ |
446 | |
447 | static tree |
448 | build_local_temp (tree type) |
449 | { |
450 | tree slot = build_decl (input_location, |
451 | VAR_DECL, NULL_TREE, type); |
452 | DECL_ARTIFICIAL (slot) = 1; |
453 | DECL_IGNORED_P (slot) = 1; |
454 | DECL_CONTEXT (slot) = current_function_decl; |
455 | layout_decl (slot, 0); |
456 | return slot; |
457 | } |
458 | |
459 | /* Set various status flags when building an AGGR_INIT_EXPR object T. */ |
460 | |
461 | static void |
462 | process_aggr_init_operands (tree t) |
463 | { |
464 | bool side_effects; |
465 | |
466 | side_effects = TREE_SIDE_EFFECTS (t); |
467 | if (!side_effects) |
468 | { |
469 | int i, n; |
470 | n = TREE_OPERAND_LENGTH (t); |
471 | for (i = 1; i < n; i++) |
472 | { |
473 | tree op = TREE_OPERAND (t, i); |
474 | if (op && TREE_SIDE_EFFECTS (op)) |
475 | { |
476 | side_effects = 1; |
477 | break; |
478 | } |
479 | } |
480 | } |
481 | TREE_SIDE_EFFECTS (t) = side_effects; |
482 | } |
483 | |
484 | /* Build an AGGR_INIT_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE, |
485 | FN, and SLOT. NARGS is the number of call arguments which are specified |
486 | as a tree array ARGS. */ |
487 | |
488 | static tree |
489 | build_aggr_init_array (tree return_type, tree fn, tree slot, int nargs, |
490 | tree *args) |
491 | { |
492 | tree t; |
493 | int i; |
494 | |
495 | t = build_vl_exp (AGGR_INIT_EXPR, nargs + 3); |
496 | TREE_TYPE (t) = return_type; |
497 | AGGR_INIT_EXPR_FN (t) = fn; |
498 | AGGR_INIT_EXPR_SLOT (t) = slot; |
499 | for (i = 0; i < nargs; i++) |
500 | AGGR_INIT_EXPR_ARG (t, i) = args[i]; |
501 | process_aggr_init_operands (t); |
502 | return t; |
503 | } |
504 | |
505 | /* INIT is a CALL_EXPR or AGGR_INIT_EXPR which needs info about its |
506 | target. TYPE is the type to be initialized. |
507 | |
508 | Build an AGGR_INIT_EXPR to represent the initialization. This function |
509 | differs from build_cplus_new in that an AGGR_INIT_EXPR can only be used |
510 | to initialize another object, whereas a TARGET_EXPR can either |
511 | initialize another object or create its own temporary object, and as a |
512 | result building up a TARGET_EXPR requires that the type's destructor be |
513 | callable. */ |
514 | |
515 | tree |
516 | build_aggr_init_expr (tree type, tree init) |
517 | { |
518 | tree fn; |
519 | tree slot; |
520 | tree rval; |
521 | int is_ctor; |
522 | |
523 | /* Don't build AGGR_INIT_EXPR in a template. */ |
524 | if (processing_template_decl) |
525 | return init; |
526 | |
527 | fn = cp_get_callee (init); |
528 | if (fn == NULL_TREE) |
529 | return convert (type, init); |
530 | |
531 | is_ctor = (TREE_CODE (fn) == ADDR_EXPR |
532 | && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL |
533 | && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0))); |
534 | |
535 | /* We split the CALL_EXPR into its function and its arguments here. |
536 | Then, in expand_expr, we put them back together. The reason for |
537 | this is that this expression might be a default argument |
538 | expression. In that case, we need a new temporary every time the |
539 | expression is used. That's what break_out_target_exprs does; it |
540 | replaces every AGGR_INIT_EXPR with a copy that uses a fresh |
541 | temporary slot. Then, expand_expr builds up a call-expression |
542 | using the new slot. */ |
543 | |
544 | /* If we don't need to use a constructor to create an object of this |
545 | type, don't mess with AGGR_INIT_EXPR. */ |
546 | if (is_ctor || TREE_ADDRESSABLE (type)) |
547 | { |
548 | slot = build_local_temp (type); |
549 | |
550 | if (TREE_CODE (init) == CALL_EXPR) |
551 | { |
552 | rval = build_aggr_init_array (void_type_node, fn, slot, |
553 | call_expr_nargs (init), |
554 | CALL_EXPR_ARGP (init)); |
555 | AGGR_INIT_FROM_THUNK_P (rval) |
556 | = CALL_FROM_THUNK_P (init); |
557 | } |
558 | else |
559 | { |
560 | rval = build_aggr_init_array (void_type_node, fn, slot, |
561 | aggr_init_expr_nargs (init), |
562 | AGGR_INIT_EXPR_ARGP (init)); |
563 | AGGR_INIT_FROM_THUNK_P (rval) |
564 | = AGGR_INIT_FROM_THUNK_P (init); |
565 | } |
566 | TREE_SIDE_EFFECTS (rval) = 1; |
567 | AGGR_INIT_VIA_CTOR_P (rval) = is_ctor; |
568 | TREE_NOTHROW (rval) = TREE_NOTHROW (init); |
569 | CALL_EXPR_OPERATOR_SYNTAX (rval) = CALL_EXPR_OPERATOR_SYNTAX (init); |
570 | CALL_EXPR_ORDERED_ARGS (rval) = CALL_EXPR_ORDERED_ARGS (init); |
571 | CALL_EXPR_REVERSE_ARGS (rval) = CALL_EXPR_REVERSE_ARGS (init); |
572 | } |
573 | else |
574 | rval = init; |
575 | |
576 | return rval; |
577 | } |
578 | |
579 | /* INIT is a CALL_EXPR or AGGR_INIT_EXPR which needs info about its |
580 | target. TYPE is the type that this initialization should appear to |
581 | have. |
582 | |
583 | Build an encapsulation of the initialization to perform |
584 | and return it so that it can be processed by language-independent |
585 | and language-specific expression expanders. */ |
586 | |
587 | tree |
588 | build_cplus_new (tree type, tree init, tsubst_flags_t complain) |
589 | { |
590 | tree rval = build_aggr_init_expr (type, init); |
591 | tree slot; |
592 | |
593 | if (!complete_type_or_maybe_complain (type, init, complain)) |
594 | return error_mark_node; |
595 | |
596 | /* Make sure that we're not trying to create an instance of an |
597 | abstract class. */ |
598 | if (abstract_virtuals_error_sfinae (NULL_TREE, type, complain)) |
599 | return error_mark_node; |
600 | |
601 | if (TREE_CODE (rval) == AGGR_INIT_EXPR) |
602 | slot = AGGR_INIT_EXPR_SLOT (rval); |
603 | else if (TREE_CODE (rval) == CALL_EXPR |
604 | || TREE_CODE (rval) == CONSTRUCTOR) |
605 | slot = build_local_temp (type); |
606 | else |
607 | return rval; |
608 | |
609 | rval = build_target_expr (slot, rval, complain); |
610 | |
611 | if (rval != error_mark_node) |
612 | TARGET_EXPR_IMPLICIT_P (rval) = 1; |
613 | |
614 | return rval; |
615 | } |
616 | |
617 | /* Subroutine of build_vec_init_expr: Build up a single element |
618 | intialization as a proxy for the full array initialization to get things |
619 | marked as used and any appropriate diagnostics. |
620 | |
621 | Since we're deferring building the actual constructor calls until |
622 | gimplification time, we need to build one now and throw it away so |
623 | that the relevant constructor gets mark_used before cgraph decides |
624 | what functions are needed. Here we assume that init is either |
625 | NULL_TREE, void_type_node (indicating value-initialization), or |
626 | another array to copy. */ |
627 | |
628 | static tree |
629 | build_vec_init_elt (tree type, tree init, tsubst_flags_t complain) |
630 | { |
631 | tree inner_type = strip_array_types (type); |
632 | vec<tree, va_gc> *argvec; |
633 | |
634 | if (integer_zerop (array_type_nelts_total (type)) |
635 | || !CLASS_TYPE_P (inner_type)) |
636 | /* No interesting initialization to do. */ |
637 | return integer_zero_node; |
638 | else if (init == void_type_node) |
639 | return build_value_init (inner_type, complain); |
640 | |
641 | gcc_assert (init == NULL_TREE |
642 | || (same_type_ignoring_top_level_qualifiers_p |
643 | (type, TREE_TYPE (init)))); |
644 | |
645 | argvec = make_tree_vector (); |
646 | if (init) |
647 | { |
648 | tree init_type = strip_array_types (TREE_TYPE (init)); |
649 | tree dummy = build_dummy_object (init_type); |
650 | if (!lvalue_p (init)) |
651 | dummy = move (dummy); |
652 | argvec->quick_push (dummy); |
653 | } |
654 | init = build_special_member_call (NULL_TREE, complete_ctor_identifier, |
655 | &argvec, inner_type, LOOKUP_NORMAL, |
656 | complain); |
657 | release_tree_vector (argvec); |
658 | |
659 | /* For a trivial constructor, build_over_call creates a TARGET_EXPR. But |
660 | we don't want one here because we aren't creating a temporary. */ |
661 | if (TREE_CODE (init) == TARGET_EXPR) |
662 | init = TARGET_EXPR_INITIAL (init); |
663 | |
664 | return init; |
665 | } |
666 | |
667 | /* Return a TARGET_EXPR which expresses the initialization of an array to |
668 | be named later, either default-initialization or copy-initialization |
669 | from another array of the same type. */ |
670 | |
671 | tree |
672 | build_vec_init_expr (tree type, tree init, tsubst_flags_t complain) |
673 | { |
674 | tree slot; |
675 | bool value_init = false; |
676 | tree elt_init = build_vec_init_elt (type, init, complain); |
677 | |
678 | if (init == void_type_node) |
679 | { |
680 | value_init = true; |
681 | init = NULL_TREE; |
682 | } |
683 | |
684 | slot = build_local_temp (type); |
685 | init = build2 (VEC_INIT_EXPR, type, slot, init); |
686 | TREE_SIDE_EFFECTS (init) = true; |
687 | SET_EXPR_LOCATION (init, input_location); |
688 | |
689 | if (cxx_dialect >= cxx11 |
690 | && potential_constant_expression (elt_init)) |
691 | VEC_INIT_EXPR_IS_CONSTEXPR (init) = true; |
692 | VEC_INIT_EXPR_VALUE_INIT (init) = value_init; |
693 | |
694 | return init; |
695 | } |
696 | |
697 | /* Give a helpful diagnostic for a non-constexpr VEC_INIT_EXPR in a context |
698 | that requires a constant expression. */ |
699 | |
700 | void |
701 | diagnose_non_constexpr_vec_init (tree expr) |
702 | { |
703 | tree type = TREE_TYPE (VEC_INIT_EXPR_SLOT (expr)); |
704 | tree init, elt_init; |
705 | if (VEC_INIT_EXPR_VALUE_INIT (expr)) |
706 | init = void_type_node; |
707 | else |
708 | init = VEC_INIT_EXPR_INIT (expr); |
709 | |
710 | elt_init = build_vec_init_elt (type, init, tf_warning_or_error); |
711 | require_potential_constant_expression (elt_init); |
712 | } |
713 | |
714 | tree |
715 | build_array_copy (tree init) |
716 | { |
717 | return build_vec_init_expr (TREE_TYPE (init), init, tf_warning_or_error); |
718 | } |
719 | |
720 | /* Build a TARGET_EXPR using INIT to initialize a new temporary of the |
721 | indicated TYPE. */ |
722 | |
723 | tree |
724 | build_target_expr_with_type (tree init, tree type, tsubst_flags_t complain) |
725 | { |
726 | gcc_assert (!VOID_TYPE_P (type)); |
727 | |
728 | if (TREE_CODE (init) == TARGET_EXPR |
729 | || init == error_mark_node) |
730 | return init; |
731 | else if (CLASS_TYPE_P (type) && type_has_nontrivial_copy_init (type) |
732 | && !VOID_TYPE_P (TREE_TYPE (init)) |
733 | && TREE_CODE (init) != COND_EXPR |
734 | && TREE_CODE (init) != CONSTRUCTOR |
735 | && TREE_CODE (init) != VA_ARG_EXPR) |
736 | /* We need to build up a copy constructor call. A void initializer |
737 | means we're being called from bot_manip. COND_EXPR is a special |
738 | case because we already have copies on the arms and we don't want |
739 | another one here. A CONSTRUCTOR is aggregate initialization, which |
740 | is handled separately. A VA_ARG_EXPR is magic creation of an |
741 | aggregate; there's no additional work to be done. */ |
742 | return force_rvalue (init, complain); |
743 | |
744 | return force_target_expr (type, init, complain); |
745 | } |
746 | |
747 | /* Like the above function, but without the checking. This function should |
748 | only be used by code which is deliberately trying to subvert the type |
749 | system, such as call_builtin_trap. Or build_over_call, to avoid |
750 | infinite recursion. */ |
751 | |
752 | tree |
753 | force_target_expr (tree type, tree init, tsubst_flags_t complain) |
754 | { |
755 | tree slot; |
756 | |
757 | gcc_assert (!VOID_TYPE_P (type)); |
758 | |
759 | slot = build_local_temp (type); |
760 | return build_target_expr (slot, init, complain); |
761 | } |
762 | |
763 | /* Like build_target_expr_with_type, but use the type of INIT. */ |
764 | |
765 | tree |
766 | get_target_expr_sfinae (tree init, tsubst_flags_t complain) |
767 | { |
768 | if (TREE_CODE (init) == AGGR_INIT_EXPR) |
769 | return build_target_expr (AGGR_INIT_EXPR_SLOT (init), init, complain); |
770 | else if (TREE_CODE (init) == VEC_INIT_EXPR) |
771 | return build_target_expr (VEC_INIT_EXPR_SLOT (init), init, complain); |
772 | else |
773 | { |
774 | init = convert_bitfield_to_declared_type (init); |
775 | return build_target_expr_with_type (init, TREE_TYPE (init), complain); |
776 | } |
777 | } |
778 | |
779 | tree |
780 | get_target_expr (tree init) |
781 | { |
782 | return get_target_expr_sfinae (init, tf_warning_or_error); |
783 | } |
784 | |
785 | /* If EXPR is a bitfield reference, convert it to the declared type of |
786 | the bitfield, and return the resulting expression. Otherwise, |
787 | return EXPR itself. */ |
788 | |
789 | tree |
790 | convert_bitfield_to_declared_type (tree expr) |
791 | { |
792 | tree bitfield_type; |
793 | |
794 | bitfield_type = is_bitfield_expr_with_lowered_type (expr); |
795 | if (bitfield_type) |
796 | expr = convert_to_integer_nofold (TYPE_MAIN_VARIANT (bitfield_type), |
797 | expr); |
798 | return expr; |
799 | } |
800 | |
801 | /* EXPR is being used in an rvalue context. Return a version of EXPR |
802 | that is marked as an rvalue. */ |
803 | |
804 | tree |
805 | rvalue (tree expr) |
806 | { |
807 | tree type; |
808 | |
809 | if (error_operand_p (expr)) |
810 | return expr; |
811 | |
812 | expr = mark_rvalue_use (expr); |
813 | |
814 | /* [basic.lval] |
815 | |
816 | Non-class rvalues always have cv-unqualified types. */ |
817 | type = TREE_TYPE (expr); |
818 | if (!CLASS_TYPE_P (type) && cv_qualified_p (type)) |
819 | type = cv_unqualified (type); |
820 | |
821 | /* We need to do this for rvalue refs as well to get the right answer |
822 | from decltype; see c++/36628. */ |
823 | if (!processing_template_decl && glvalue_p (expr)) |
824 | expr = build1 (NON_LVALUE_EXPR, type, expr); |
825 | else if (type != TREE_TYPE (expr)) |
826 | expr = build_nop (type, expr); |
827 | |
828 | return expr; |
829 | } |
830 | |
831 | |
832 | struct cplus_array_info |
833 | { |
834 | tree type; |
835 | tree domain; |
836 | }; |
837 | |
838 | struct cplus_array_hasher : ggc_ptr_hash<tree_node> |
839 | { |
840 | typedef cplus_array_info *compare_type; |
841 | |
842 | static hashval_t hash (tree t); |
843 | static bool equal (tree, cplus_array_info *); |
844 | }; |
845 | |
846 | /* Hash an ARRAY_TYPE. K is really of type `tree'. */ |
847 | |
848 | hashval_t |
849 | cplus_array_hasher::hash (tree t) |
850 | { |
851 | hashval_t hash; |
852 | |
853 | hash = TYPE_UID (TREE_TYPE (t)); |
854 | if (TYPE_DOMAIN (t)) |
855 | hash ^= TYPE_UID (TYPE_DOMAIN (t)); |
856 | return hash; |
857 | } |
858 | |
859 | /* Compare two ARRAY_TYPEs. K1 is really of type `tree', K2 is really |
860 | of type `cplus_array_info*'. */ |
861 | |
862 | bool |
863 | cplus_array_hasher::equal (tree t1, cplus_array_info *t2) |
864 | { |
865 | return (TREE_TYPE (t1) == t2->type && TYPE_DOMAIN (t1) == t2->domain); |
866 | } |
867 | |
868 | /* Hash table containing dependent array types, which are unsuitable for |
869 | the language-independent type hash table. */ |
870 | static GTY (()) hash_table<cplus_array_hasher> *cplus_array_htab; |
871 | |
872 | /* Build an ARRAY_TYPE without laying it out. */ |
873 | |
874 | static tree |
875 | build_min_array_type (tree elt_type, tree index_type) |
876 | { |
877 | tree t = cxx_make_type (ARRAY_TYPE); |
878 | TREE_TYPE (t) = elt_type; |
879 | TYPE_DOMAIN (t) = index_type; |
880 | return t; |
881 | } |
882 | |
883 | /* Set TYPE_CANONICAL like build_array_type_1, but using |
884 | build_cplus_array_type. */ |
885 | |
886 | static void |
887 | set_array_type_canon (tree t, tree elt_type, tree index_type) |
888 | { |
889 | /* Set the canonical type for this new node. */ |
890 | if (TYPE_STRUCTURAL_EQUALITY_P (elt_type) |
891 | || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))) |
892 | SET_TYPE_STRUCTURAL_EQUALITY (t); |
893 | else if (TYPE_CANONICAL (elt_type) != elt_type |
894 | || (index_type && TYPE_CANONICAL (index_type) != index_type)) |
895 | TYPE_CANONICAL (t) |
896 | = build_cplus_array_type (TYPE_CANONICAL (elt_type), |
897 | index_type |
898 | ? TYPE_CANONICAL (index_type) : index_type); |
899 | else |
900 | TYPE_CANONICAL (t) = t; |
901 | } |
902 | |
903 | /* Like build_array_type, but handle special C++ semantics: an array of a |
904 | variant element type is a variant of the array of the main variant of |
905 | the element type. */ |
906 | |
907 | tree |
908 | build_cplus_array_type (tree elt_type, tree index_type) |
909 | { |
910 | tree t; |
911 | |
912 | if (elt_type == error_mark_node || index_type == error_mark_node) |
913 | return error_mark_node; |
914 | |
915 | bool dependent = (uses_template_parms (elt_type) |
916 | || (index_type && uses_template_parms (index_type))); |
917 | |
918 | if (elt_type != TYPE_MAIN_VARIANT (elt_type)) |
919 | /* Start with an array of the TYPE_MAIN_VARIANT. */ |
920 | t = build_cplus_array_type (TYPE_MAIN_VARIANT (elt_type), |
921 | index_type); |
922 | else if (dependent) |
923 | { |
924 | /* Since type_hash_canon calls layout_type, we need to use our own |
925 | hash table. */ |
926 | cplus_array_info cai; |
927 | hashval_t hash; |
928 | |
929 | if (cplus_array_htab == NULL) |
930 | cplus_array_htab = hash_table<cplus_array_hasher>::create_ggc (61); |
931 | |
932 | hash = TYPE_UID (elt_type); |
933 | if (index_type) |
934 | hash ^= TYPE_UID (index_type); |
935 | cai.type = elt_type; |
936 | cai.domain = index_type; |
937 | |
938 | tree *e = cplus_array_htab->find_slot_with_hash (&cai, hash, INSERT); |
939 | if (*e) |
940 | /* We have found the type: we're done. */ |
941 | return (tree) *e; |
942 | else |
943 | { |
944 | /* Build a new array type. */ |
945 | t = build_min_array_type (elt_type, index_type); |
946 | |
947 | /* Store it in the hash table. */ |
948 | *e = t; |
949 | |
950 | /* Set the canonical type for this new node. */ |
951 | set_array_type_canon (t, elt_type, index_type); |
952 | } |
953 | } |
954 | else |
955 | { |
956 | bool typeless_storage |
957 | = (elt_type == unsigned_char_type_node |
958 | || elt_type == signed_char_type_node |
959 | || elt_type == char_type_node |
960 | || (TREE_CODE (elt_type) == ENUMERAL_TYPE |
961 | && TYPE_CONTEXT (elt_type) == std_node |
962 | && !strcmp ("byte" , TYPE_NAME_STRING (elt_type)))); |
963 | t = build_array_type (elt_type, index_type, typeless_storage); |
964 | } |
965 | |
966 | /* Now check whether we already have this array variant. */ |
967 | if (elt_type != TYPE_MAIN_VARIANT (elt_type)) |
968 | { |
969 | tree m = t; |
970 | for (t = m; t; t = TYPE_NEXT_VARIANT (t)) |
971 | if (TREE_TYPE (t) == elt_type |
972 | && TYPE_NAME (t) == NULL_TREE |
973 | && TYPE_ATTRIBUTES (t) == NULL_TREE) |
974 | break; |
975 | if (!t) |
976 | { |
977 | t = build_min_array_type (elt_type, index_type); |
978 | set_array_type_canon (t, elt_type, index_type); |
979 | if (!dependent) |
980 | { |
981 | layout_type (t); |
982 | /* Make sure sizes are shared with the main variant. |
983 | layout_type can't be called after setting TYPE_NEXT_VARIANT, |
984 | as it will overwrite alignment etc. of all variants. */ |
985 | TYPE_SIZE (t) = TYPE_SIZE (m); |
986 | TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (m); |
987 | TYPE_TYPELESS_STORAGE (t) = TYPE_TYPELESS_STORAGE (m); |
988 | } |
989 | |
990 | TYPE_MAIN_VARIANT (t) = m; |
991 | TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m); |
992 | TYPE_NEXT_VARIANT (m) = t; |
993 | } |
994 | } |
995 | |
996 | /* Avoid spurious warnings with VLAs (c++/54583). */ |
997 | if (TYPE_SIZE (t) && EXPR_P (TYPE_SIZE (t))) |
998 | TREE_NO_WARNING (TYPE_SIZE (t)) = 1; |
999 | |
1000 | /* Push these needs up to the ARRAY_TYPE so that initialization takes |
1001 | place more easily. */ |
1002 | bool needs_ctor = (TYPE_NEEDS_CONSTRUCTING (t) |
1003 | = TYPE_NEEDS_CONSTRUCTING (elt_type)); |
1004 | bool needs_dtor = (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t) |
1005 | = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (elt_type)); |
1006 | |
1007 | if (!dependent && t == TYPE_MAIN_VARIANT (t) |
1008 | && !COMPLETE_TYPE_P (t) && COMPLETE_TYPE_P (elt_type)) |
1009 | { |
1010 | /* The element type has been completed since the last time we saw |
1011 | this array type; update the layout and 'tor flags for any variants |
1012 | that need it. */ |
1013 | layout_type (t); |
1014 | for (tree v = TYPE_NEXT_VARIANT (t); v; v = TYPE_NEXT_VARIANT (v)) |
1015 | { |
1016 | TYPE_NEEDS_CONSTRUCTING (v) = needs_ctor; |
1017 | TYPE_HAS_NONTRIVIAL_DESTRUCTOR (v) = needs_dtor; |
1018 | } |
1019 | } |
1020 | |
1021 | return t; |
1022 | } |
1023 | |
1024 | /* Return an ARRAY_TYPE with element type ELT and length N. */ |
1025 | |
1026 | tree |
1027 | build_array_of_n_type (tree elt, int n) |
1028 | { |
1029 | return build_cplus_array_type (elt, build_index_type (size_int (n - 1))); |
1030 | } |
1031 | |
1032 | /* True iff T is an N3639 array of runtime bound (VLA). These were |
1033 | approved for C++14 but then removed. */ |
1034 | |
1035 | bool |
1036 | array_of_runtime_bound_p (tree t) |
1037 | { |
1038 | if (!t || TREE_CODE (t) != ARRAY_TYPE) |
1039 | return false; |
1040 | tree dom = TYPE_DOMAIN (t); |
1041 | if (!dom) |
1042 | return false; |
1043 | tree max = TYPE_MAX_VALUE (dom); |
1044 | return (!potential_rvalue_constant_expression (max) |
1045 | || (!value_dependent_expression_p (max) && !TREE_CONSTANT (max))); |
1046 | } |
1047 | |
1048 | /* Return a reference type node referring to TO_TYPE. If RVAL is |
1049 | true, return an rvalue reference type, otherwise return an lvalue |
1050 | reference type. If a type node exists, reuse it, otherwise create |
1051 | a new one. */ |
1052 | tree |
1053 | cp_build_reference_type (tree to_type, bool rval) |
1054 | { |
1055 | tree lvalue_ref, t; |
1056 | |
1057 | if (TREE_CODE (to_type) == REFERENCE_TYPE) |
1058 | { |
1059 | rval = rval && TYPE_REF_IS_RVALUE (to_type); |
1060 | to_type = TREE_TYPE (to_type); |
1061 | } |
1062 | |
1063 | lvalue_ref = build_reference_type (to_type); |
1064 | if (!rval) |
1065 | return lvalue_ref; |
1066 | |
1067 | /* This code to create rvalue reference types is based on and tied |
1068 | to the code creating lvalue reference types in the middle-end |
1069 | functions build_reference_type_for_mode and build_reference_type. |
1070 | |
1071 | It works by putting the rvalue reference type nodes after the |
1072 | lvalue reference nodes in the TYPE_NEXT_REF_TO linked list, so |
1073 | they will effectively be ignored by the middle end. */ |
1074 | |
1075 | for (t = lvalue_ref; (t = TYPE_NEXT_REF_TO (t)); ) |
1076 | if (TYPE_REF_IS_RVALUE (t)) |
1077 | return t; |
1078 | |
1079 | t = build_distinct_type_copy (lvalue_ref); |
1080 | |
1081 | TYPE_REF_IS_RVALUE (t) = true; |
1082 | TYPE_NEXT_REF_TO (t) = TYPE_NEXT_REF_TO (lvalue_ref); |
1083 | TYPE_NEXT_REF_TO (lvalue_ref) = t; |
1084 | |
1085 | if (TYPE_STRUCTURAL_EQUALITY_P (to_type)) |
1086 | SET_TYPE_STRUCTURAL_EQUALITY (t); |
1087 | else if (TYPE_CANONICAL (to_type) != to_type) |
1088 | TYPE_CANONICAL (t) |
1089 | = cp_build_reference_type (TYPE_CANONICAL (to_type), rval); |
1090 | else |
1091 | TYPE_CANONICAL (t) = t; |
1092 | |
1093 | layout_type (t); |
1094 | |
1095 | return t; |
1096 | |
1097 | } |
1098 | |
1099 | /* Returns EXPR cast to rvalue reference type, like std::move. */ |
1100 | |
1101 | tree |
1102 | move (tree expr) |
1103 | { |
1104 | tree type = TREE_TYPE (expr); |
1105 | gcc_assert (TREE_CODE (type) != REFERENCE_TYPE); |
1106 | type = cp_build_reference_type (type, /*rval*/true); |
1107 | return build_static_cast (type, expr, tf_warning_or_error); |
1108 | } |
1109 | |
1110 | /* Used by the C++ front end to build qualified array types. However, |
1111 | the C version of this function does not properly maintain canonical |
1112 | types (which are not used in C). */ |
1113 | tree |
1114 | c_build_qualified_type (tree type, int type_quals, tree /* orig_qual_type */, |
1115 | size_t /* orig_qual_indirect */) |
1116 | { |
1117 | return cp_build_qualified_type (type, type_quals); |
1118 | } |
1119 | |
1120 | |
1121 | /* Make a variant of TYPE, qualified with the TYPE_QUALS. Handles |
1122 | arrays correctly. In particular, if TYPE is an array of T's, and |
1123 | TYPE_QUALS is non-empty, returns an array of qualified T's. |
1124 | |
1125 | FLAGS determines how to deal with ill-formed qualifications. If |
1126 | tf_ignore_bad_quals is set, then bad qualifications are dropped |
1127 | (this is permitted if TYPE was introduced via a typedef or template |
1128 | type parameter). If bad qualifications are dropped and tf_warning |
1129 | is set, then a warning is issued for non-const qualifications. If |
1130 | tf_ignore_bad_quals is not set and tf_error is not set, we |
1131 | return error_mark_node. Otherwise, we issue an error, and ignore |
1132 | the qualifications. |
1133 | |
1134 | Qualification of a reference type is valid when the reference came |
1135 | via a typedef or template type argument. [dcl.ref] No such |
1136 | dispensation is provided for qualifying a function type. [dcl.fct] |
1137 | DR 295 queries this and the proposed resolution brings it into line |
1138 | with qualifying a reference. We implement the DR. We also behave |
1139 | in a similar manner for restricting non-pointer types. */ |
1140 | |
1141 | tree |
1142 | cp_build_qualified_type_real (tree type, |
1143 | int type_quals, |
1144 | tsubst_flags_t complain) |
1145 | { |
1146 | tree result; |
1147 | int bad_quals = TYPE_UNQUALIFIED; |
1148 | |
1149 | if (type == error_mark_node) |
1150 | return type; |
1151 | |
1152 | if (type_quals == cp_type_quals (type)) |
1153 | return type; |
1154 | |
1155 | if (TREE_CODE (type) == ARRAY_TYPE) |
1156 | { |
1157 | /* In C++, the qualification really applies to the array element |
1158 | type. Obtain the appropriately qualified element type. */ |
1159 | tree t; |
1160 | tree element_type |
1161 | = cp_build_qualified_type_real (TREE_TYPE (type), |
1162 | type_quals, |
1163 | complain); |
1164 | |
1165 | if (element_type == error_mark_node) |
1166 | return error_mark_node; |
1167 | |
1168 | /* See if we already have an identically qualified type. Tests |
1169 | should be equivalent to those in check_qualified_type. */ |
1170 | for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) |
1171 | if (TREE_TYPE (t) == element_type |
1172 | && TYPE_NAME (t) == TYPE_NAME (type) |
1173 | && TYPE_CONTEXT (t) == TYPE_CONTEXT (type) |
1174 | && attribute_list_equal (TYPE_ATTRIBUTES (t), |
1175 | TYPE_ATTRIBUTES (type))) |
1176 | break; |
1177 | |
1178 | if (!t) |
1179 | { |
1180 | t = build_cplus_array_type (element_type, TYPE_DOMAIN (type)); |
1181 | |
1182 | /* Keep the typedef name. */ |
1183 | if (TYPE_NAME (t) != TYPE_NAME (type)) |
1184 | { |
1185 | t = build_variant_type_copy (t); |
1186 | TYPE_NAME (t) = TYPE_NAME (type); |
1187 | SET_TYPE_ALIGN (t, TYPE_ALIGN (type)); |
1188 | TYPE_USER_ALIGN (t) = TYPE_USER_ALIGN (type); |
1189 | } |
1190 | } |
1191 | |
1192 | /* Even if we already had this variant, we update |
1193 | TYPE_NEEDS_CONSTRUCTING and TYPE_HAS_NONTRIVIAL_DESTRUCTOR in case |
1194 | they changed since the variant was originally created. |
1195 | |
1196 | This seems hokey; if there is some way to use a previous |
1197 | variant *without* coming through here, |
1198 | TYPE_NEEDS_CONSTRUCTING will never be updated. */ |
1199 | TYPE_NEEDS_CONSTRUCTING (t) |
1200 | = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (element_type)); |
1201 | TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t) |
1202 | = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TYPE_MAIN_VARIANT (element_type)); |
1203 | return t; |
1204 | } |
1205 | else if (TREE_CODE (type) == TYPE_PACK_EXPANSION) |
1206 | { |
1207 | tree t = PACK_EXPANSION_PATTERN (type); |
1208 | |
1209 | t = cp_build_qualified_type_real (t, type_quals, complain); |
1210 | return make_pack_expansion (t, complain); |
1211 | } |
1212 | |
1213 | /* A reference or method type shall not be cv-qualified. |
1214 | [dcl.ref], [dcl.fct]. This used to be an error, but as of DR 295 |
1215 | (in CD1) we always ignore extra cv-quals on functions. */ |
1216 | if (type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE) |
1217 | && (TREE_CODE (type) == REFERENCE_TYPE |
1218 | || TREE_CODE (type) == FUNCTION_TYPE |
1219 | || TREE_CODE (type) == METHOD_TYPE)) |
1220 | { |
1221 | if (TREE_CODE (type) == REFERENCE_TYPE) |
1222 | bad_quals |= type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE); |
1223 | type_quals &= ~(TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE); |
1224 | } |
1225 | |
1226 | /* But preserve any function-cv-quals on a FUNCTION_TYPE. */ |
1227 | if (TREE_CODE (type) == FUNCTION_TYPE) |
1228 | type_quals |= type_memfn_quals (type); |
1229 | |
1230 | /* A restrict-qualified type must be a pointer (or reference) |
1231 | to object or incomplete type. */ |
1232 | if ((type_quals & TYPE_QUAL_RESTRICT) |
1233 | && TREE_CODE (type) != TEMPLATE_TYPE_PARM |
1234 | && TREE_CODE (type) != TYPENAME_TYPE |
1235 | && !POINTER_TYPE_P (type)) |
1236 | { |
1237 | bad_quals |= TYPE_QUAL_RESTRICT; |
1238 | type_quals &= ~TYPE_QUAL_RESTRICT; |
1239 | } |
1240 | |
1241 | if (bad_quals == TYPE_UNQUALIFIED |
1242 | || (complain & tf_ignore_bad_quals)) |
1243 | /*OK*/; |
1244 | else if (!(complain & tf_error)) |
1245 | return error_mark_node; |
1246 | else |
1247 | { |
1248 | tree bad_type = build_qualified_type (ptr_type_node, bad_quals); |
1249 | error ("%qV qualifiers cannot be applied to %qT" , |
1250 | bad_type, type); |
1251 | } |
1252 | |
1253 | /* Retrieve (or create) the appropriately qualified variant. */ |
1254 | result = build_qualified_type (type, type_quals); |
1255 | |
1256 | /* Preserve exception specs and ref-qualifier since build_qualified_type |
1257 | doesn't know about them. */ |
1258 | if (TREE_CODE (result) == FUNCTION_TYPE |
1259 | || TREE_CODE (result) == METHOD_TYPE) |
1260 | { |
1261 | result = build_exception_variant (result, TYPE_RAISES_EXCEPTIONS (type)); |
1262 | result = build_ref_qualified_type (result, type_memfn_rqual (type)); |
1263 | } |
1264 | |
1265 | return result; |
1266 | } |
1267 | |
1268 | /* Return TYPE with const and volatile removed. */ |
1269 | |
1270 | tree |
1271 | cv_unqualified (tree type) |
1272 | { |
1273 | int quals; |
1274 | |
1275 | if (type == error_mark_node) |
1276 | return type; |
1277 | |
1278 | quals = cp_type_quals (type); |
1279 | quals &= ~(TYPE_QUAL_CONST|TYPE_QUAL_VOLATILE); |
1280 | return cp_build_qualified_type (type, quals); |
1281 | } |
1282 | |
1283 | /* Subroutine of strip_typedefs. We want to apply to RESULT the attributes |
1284 | from ATTRIBS that affect type identity, and no others. If any are not |
1285 | applied, set *remove_attributes to true. */ |
1286 | |
1287 | static tree |
1288 | apply_identity_attributes (tree result, tree attribs, bool *remove_attributes) |
1289 | { |
1290 | tree first_ident = NULL_TREE; |
1291 | tree new_attribs = NULL_TREE; |
1292 | tree *p = &new_attribs; |
1293 | |
1294 | if (OVERLOAD_TYPE_P (result)) |
1295 | { |
1296 | /* On classes and enums all attributes are ingrained. */ |
1297 | gcc_assert (attribs == TYPE_ATTRIBUTES (result)); |
1298 | return result; |
1299 | } |
1300 | |
1301 | for (tree a = attribs; a; a = TREE_CHAIN (a)) |
1302 | { |
1303 | const attribute_spec *as |
1304 | = lookup_attribute_spec (get_attribute_name (a)); |
1305 | if (as && as->affects_type_identity) |
1306 | { |
1307 | if (!first_ident) |
1308 | first_ident = a; |
1309 | else if (first_ident == error_mark_node) |
1310 | { |
1311 | *p = tree_cons (TREE_PURPOSE (a), TREE_VALUE (a), NULL_TREE); |
1312 | p = &TREE_CHAIN (*p); |
1313 | } |
1314 | } |
1315 | else if (first_ident) |
1316 | { |
1317 | for (tree a2 = first_ident; a2; a2 = TREE_CHAIN (a2)) |
1318 | { |
1319 | *p = tree_cons (TREE_PURPOSE (a2), TREE_VALUE (a2), NULL_TREE); |
1320 | p = &TREE_CHAIN (*p); |
1321 | } |
1322 | first_ident = error_mark_node; |
1323 | } |
1324 | } |
1325 | if (first_ident != error_mark_node) |
1326 | new_attribs = first_ident; |
1327 | |
1328 | if (first_ident == attribs) |
1329 | /* All attributes affected type identity. */; |
1330 | else |
1331 | *remove_attributes = true; |
1332 | |
1333 | return cp_build_type_attribute_variant (result, new_attribs); |
1334 | } |
1335 | |
1336 | /* Builds a qualified variant of T that is not a typedef variant. |
1337 | E.g. consider the following declarations: |
1338 | typedef const int ConstInt; |
1339 | typedef ConstInt* PtrConstInt; |
1340 | If T is PtrConstInt, this function returns a type representing |
1341 | const int*. |
1342 | In other words, if T is a typedef, the function returns the underlying type. |
1343 | The cv-qualification and attributes of the type returned match the |
1344 | input type. |
1345 | They will always be compatible types. |
1346 | The returned type is built so that all of its subtypes |
1347 | recursively have their typedefs stripped as well. |
1348 | |
1349 | This is different from just returning TYPE_CANONICAL (T) |
1350 | Because of several reasons: |
1351 | * If T is a type that needs structural equality |
1352 | its TYPE_CANONICAL (T) will be NULL. |
1353 | * TYPE_CANONICAL (T) desn't carry type attributes |
1354 | and loses template parameter names. |
1355 | |
1356 | If REMOVE_ATTRIBUTES is non-null, also strip attributes that don't |
1357 | affect type identity, and set the referent to true if any were |
1358 | stripped. */ |
1359 | |
1360 | tree |
1361 | strip_typedefs (tree t, bool *remove_attributes) |
1362 | { |
1363 | tree result = NULL, type = NULL, t0 = NULL; |
1364 | |
1365 | if (!t || t == error_mark_node) |
1366 | return t; |
1367 | |
1368 | if (TREE_CODE (t) == TREE_LIST) |
1369 | { |
1370 | bool changed = false; |
1371 | vec<tree,va_gc> *vec = make_tree_vector (); |
1372 | tree r = t; |
1373 | for (; t; t = TREE_CHAIN (t)) |
1374 | { |
1375 | gcc_assert (!TREE_PURPOSE (t)); |
1376 | tree elt = strip_typedefs (TREE_VALUE (t), remove_attributes); |
1377 | if (elt != TREE_VALUE (t)) |
1378 | changed = true; |
1379 | vec_safe_push (vec, elt); |
1380 | } |
1381 | if (changed) |
1382 | r = build_tree_list_vec (vec); |
1383 | release_tree_vector (vec); |
1384 | return r; |
1385 | } |
1386 | |
1387 | gcc_assert (TYPE_P (t)); |
1388 | |
1389 | if (t == TYPE_CANONICAL (t)) |
1390 | return t; |
1391 | |
1392 | if (dependent_alias_template_spec_p (t)) |
1393 | /* DR 1558: However, if the template-id is dependent, subsequent |
1394 | template argument substitution still applies to the template-id. */ |
1395 | return t; |
1396 | |
1397 | switch (TREE_CODE (t)) |
1398 | { |
1399 | case POINTER_TYPE: |
1400 | type = strip_typedefs (TREE_TYPE (t), remove_attributes); |
1401 | result = build_pointer_type (type); |
1402 | break; |
1403 | case REFERENCE_TYPE: |
1404 | type = strip_typedefs (TREE_TYPE (t), remove_attributes); |
1405 | result = cp_build_reference_type (type, TYPE_REF_IS_RVALUE (t)); |
1406 | break; |
1407 | case OFFSET_TYPE: |
1408 | t0 = strip_typedefs (TYPE_OFFSET_BASETYPE (t), remove_attributes); |
1409 | type = strip_typedefs (TREE_TYPE (t), remove_attributes); |
1410 | result = build_offset_type (t0, type); |
1411 | break; |
1412 | case RECORD_TYPE: |
1413 | if (TYPE_PTRMEMFUNC_P (t)) |
1414 | { |
1415 | t0 = strip_typedefs (TYPE_PTRMEMFUNC_FN_TYPE (t), remove_attributes); |
1416 | result = build_ptrmemfunc_type (t0); |
1417 | } |
1418 | break; |
1419 | case ARRAY_TYPE: |
1420 | type = strip_typedefs (TREE_TYPE (t), remove_attributes); |
1421 | t0 = strip_typedefs (TYPE_DOMAIN (t), remove_attributes); |
1422 | result = build_cplus_array_type (type, t0); |
1423 | break; |
1424 | case FUNCTION_TYPE: |
1425 | case METHOD_TYPE: |
1426 | { |
1427 | tree arg_types = NULL, arg_node, arg_node2, arg_type; |
1428 | bool changed; |
1429 | |
1430 | /* Because we stomp on TREE_PURPOSE of TYPE_ARG_TYPES in many places |
1431 | around the compiler (e.g. cp_parser_late_parsing_default_args), we |
1432 | can't expect that re-hashing a function type will find a previous |
1433 | equivalent type, so try to reuse the input type if nothing has |
1434 | changed. If the type is itself a variant, that will change. */ |
1435 | bool is_variant = typedef_variant_p (t); |
1436 | if (remove_attributes |
1437 | && (TYPE_ATTRIBUTES (t) || TYPE_USER_ALIGN (t))) |
1438 | is_variant = true; |
1439 | |
1440 | type = strip_typedefs (TREE_TYPE (t), remove_attributes); |
1441 | tree canon_spec = (flag_noexcept_type |
1442 | ? canonical_eh_spec (TYPE_RAISES_EXCEPTIONS (t)) |
1443 | : NULL_TREE); |
1444 | changed = (type != TREE_TYPE (t) || is_variant |
1445 | || TYPE_RAISES_EXCEPTIONS (t) != canon_spec); |
1446 | |
1447 | for (arg_node = TYPE_ARG_TYPES (t); |
1448 | arg_node; |
1449 | arg_node = TREE_CHAIN (arg_node)) |
1450 | { |
1451 | if (arg_node == void_list_node) |
1452 | break; |
1453 | arg_type = strip_typedefs (TREE_VALUE (arg_node), |
1454 | remove_attributes); |
1455 | gcc_assert (arg_type); |
1456 | if (arg_type == TREE_VALUE (arg_node) && !changed) |
1457 | continue; |
1458 | |
1459 | if (!changed) |
1460 | { |
1461 | changed = true; |
1462 | for (arg_node2 = TYPE_ARG_TYPES (t); |
1463 | arg_node2 != arg_node; |
1464 | arg_node2 = TREE_CHAIN (arg_node2)) |
1465 | arg_types |
1466 | = tree_cons (TREE_PURPOSE (arg_node2), |
1467 | TREE_VALUE (arg_node2), arg_types); |
1468 | } |
1469 | |
1470 | arg_types |
1471 | = tree_cons (TREE_PURPOSE (arg_node), arg_type, arg_types); |
1472 | } |
1473 | |
1474 | if (!changed) |
1475 | return t; |
1476 | |
1477 | if (arg_types) |
1478 | arg_types = nreverse (arg_types); |
1479 | |
1480 | /* A list of parameters not ending with an ellipsis |
1481 | must end with void_list_node. */ |
1482 | if (arg_node) |
1483 | arg_types = chainon (arg_types, void_list_node); |
1484 | |
1485 | if (TREE_CODE (t) == METHOD_TYPE) |
1486 | { |
1487 | tree class_type = TREE_TYPE (TREE_VALUE (arg_types)); |
1488 | gcc_assert (class_type); |
1489 | result = |
1490 | build_method_type_directly (class_type, type, |
1491 | TREE_CHAIN (arg_types)); |
1492 | result |
1493 | = build_ref_qualified_type (result, type_memfn_rqual (t)); |
1494 | } |
1495 | else |
1496 | { |
1497 | result = build_function_type (type, |
1498 | arg_types); |
1499 | result = apply_memfn_quals (result, |
1500 | type_memfn_quals (t), |
1501 | type_memfn_rqual (t)); |
1502 | } |
1503 | |
1504 | if (canon_spec) |
1505 | result = build_exception_variant (result, canon_spec); |
1506 | if (TYPE_HAS_LATE_RETURN_TYPE (t)) |
1507 | TYPE_HAS_LATE_RETURN_TYPE (result) = 1; |
1508 | } |
1509 | break; |
1510 | case TYPENAME_TYPE: |
1511 | { |
1512 | bool changed = false; |
1513 | tree fullname = TYPENAME_TYPE_FULLNAME (t); |
1514 | if (TREE_CODE (fullname) == TEMPLATE_ID_EXPR |
1515 | && TREE_OPERAND (fullname, 1)) |
1516 | { |
1517 | tree args = TREE_OPERAND (fullname, 1); |
1518 | tree new_args = copy_node (args); |
1519 | for (int i = 0; i < TREE_VEC_LENGTH (args); ++i) |
1520 | { |
1521 | tree arg = TREE_VEC_ELT (args, i); |
1522 | tree strip_arg; |
1523 | if (TYPE_P (arg)) |
1524 | strip_arg = strip_typedefs (arg, remove_attributes); |
1525 | else |
1526 | strip_arg = strip_typedefs_expr (arg, remove_attributes); |
1527 | TREE_VEC_ELT (new_args, i) = strip_arg; |
1528 | if (strip_arg != arg) |
1529 | changed = true; |
1530 | } |
1531 | if (changed) |
1532 | { |
1533 | NON_DEFAULT_TEMPLATE_ARGS_COUNT (new_args) |
1534 | = NON_DEFAULT_TEMPLATE_ARGS_COUNT (args); |
1535 | fullname |
1536 | = lookup_template_function (TREE_OPERAND (fullname, 0), |
1537 | new_args); |
1538 | } |
1539 | else |
1540 | ggc_free (new_args); |
1541 | } |
1542 | tree ctx = strip_typedefs (TYPE_CONTEXT (t), remove_attributes); |
1543 | if (!changed && ctx == TYPE_CONTEXT (t) && !typedef_variant_p (t)) |
1544 | return t; |
1545 | tree name = fullname; |
1546 | if (TREE_CODE (fullname) == TEMPLATE_ID_EXPR) |
1547 | name = TREE_OPERAND (fullname, 0); |
1548 | /* Use build_typename_type rather than make_typename_type because we |
1549 | don't want to resolve it here, just strip typedefs. */ |
1550 | result = build_typename_type (ctx, name, fullname, typename_type); |
1551 | } |
1552 | break; |
1553 | case DECLTYPE_TYPE: |
1554 | result = strip_typedefs_expr (DECLTYPE_TYPE_EXPR (t), |
1555 | remove_attributes); |
1556 | if (result == DECLTYPE_TYPE_EXPR (t)) |
1557 | result = NULL_TREE; |
1558 | else |
1559 | result = (finish_decltype_type |
1560 | (result, |
1561 | DECLTYPE_TYPE_ID_EXPR_OR_MEMBER_ACCESS_P (t), |
1562 | tf_none)); |
1563 | break; |
1564 | case UNDERLYING_TYPE: |
1565 | type = strip_typedefs (UNDERLYING_TYPE_TYPE (t), remove_attributes); |
1566 | result = finish_underlying_type (type); |
1567 | break; |
1568 | default: |
1569 | break; |
1570 | } |
1571 | |
1572 | if (!result) |
1573 | { |
1574 | if (typedef_variant_p (t)) |
1575 | { |
1576 | /* Explicitly get the underlying type, as TYPE_MAIN_VARIANT doesn't |
1577 | strip typedefs with attributes. */ |
1578 | result = TYPE_MAIN_VARIANT (DECL_ORIGINAL_TYPE (TYPE_NAME (t))); |
1579 | result = strip_typedefs (result); |
1580 | } |
1581 | else |
1582 | result = TYPE_MAIN_VARIANT (t); |
1583 | } |
1584 | gcc_assert (!typedef_variant_p (result)); |
1585 | |
1586 | if (COMPLETE_TYPE_P (result) && !COMPLETE_TYPE_P (t)) |
1587 | /* If RESULT is complete and T isn't, it's likely the case that T |
1588 | is a variant of RESULT which hasn't been updated yet. Skip the |
1589 | attribute handling. */; |
1590 | else |
1591 | { |
1592 | if (TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (result) |
1593 | || TYPE_ALIGN (t) != TYPE_ALIGN (result)) |
1594 | { |
1595 | gcc_assert (TYPE_USER_ALIGN (t)); |
1596 | if (remove_attributes) |
1597 | *remove_attributes = true; |
1598 | else |
1599 | { |
1600 | if (TYPE_ALIGN (t) == TYPE_ALIGN (result)) |
1601 | result = build_variant_type_copy (result); |
1602 | else |
1603 | result = build_aligned_type (result, TYPE_ALIGN (t)); |
1604 | TYPE_USER_ALIGN (result) = true; |
1605 | } |
1606 | } |
1607 | |
1608 | if (TYPE_ATTRIBUTES (t)) |
1609 | { |
1610 | if (remove_attributes) |
1611 | result = apply_identity_attributes (result, TYPE_ATTRIBUTES (t), |
1612 | remove_attributes); |
1613 | else |
1614 | result = cp_build_type_attribute_variant (result, |
1615 | TYPE_ATTRIBUTES (t)); |
1616 | } |
1617 | } |
1618 | |
1619 | return cp_build_qualified_type (result, cp_type_quals (t)); |
1620 | } |
1621 | |
1622 | /* Like strip_typedefs above, but works on expressions, so that in |
1623 | |
1624 | template<class T> struct A |
1625 | { |
1626 | typedef T TT; |
1627 | B<sizeof(TT)> b; |
1628 | }; |
1629 | |
1630 | sizeof(TT) is replaced by sizeof(T). */ |
1631 | |
1632 | tree |
1633 | strip_typedefs_expr (tree t, bool *remove_attributes) |
1634 | { |
1635 | unsigned i,n; |
1636 | tree r, type, *ops; |
1637 | enum tree_code code; |
1638 | |
1639 | if (t == NULL_TREE || t == error_mark_node) |
1640 | return t; |
1641 | |
1642 | if (DECL_P (t) || CONSTANT_CLASS_P (t)) |
1643 | return t; |
1644 | |
1645 | /* Some expressions have type operands, so let's handle types here rather |
1646 | than check TYPE_P in multiple places below. */ |
1647 | if (TYPE_P (t)) |
1648 | return strip_typedefs (t, remove_attributes); |
1649 | |
1650 | code = TREE_CODE (t); |
1651 | switch (code) |
1652 | { |
1653 | case IDENTIFIER_NODE: |
1654 | case TEMPLATE_PARM_INDEX: |
1655 | case OVERLOAD: |
1656 | case BASELINK: |
1657 | case ARGUMENT_PACK_SELECT: |
1658 | return t; |
1659 | |
1660 | case TRAIT_EXPR: |
1661 | { |
1662 | tree type1 = strip_typedefs (TRAIT_EXPR_TYPE1 (t), remove_attributes); |
1663 | tree type2 = strip_typedefs (TRAIT_EXPR_TYPE2 (t), remove_attributes); |
1664 | if (type1 == TRAIT_EXPR_TYPE1 (t) |
1665 | && type2 == TRAIT_EXPR_TYPE2 (t)) |
1666 | return t; |
1667 | r = copy_node (t); |
1668 | TRAIT_EXPR_TYPE1 (r) = type1; |
1669 | TRAIT_EXPR_TYPE2 (r) = type2; |
1670 | return r; |
1671 | } |
1672 | |
1673 | case TREE_LIST: |
1674 | { |
1675 | vec<tree, va_gc> *vec = make_tree_vector (); |
1676 | bool changed = false; |
1677 | tree it; |
1678 | for (it = t; it; it = TREE_CHAIN (it)) |
1679 | { |
1680 | tree val = strip_typedefs_expr (TREE_VALUE (t), remove_attributes); |
1681 | vec_safe_push (vec, val); |
1682 | if (val != TREE_VALUE (t)) |
1683 | changed = true; |
1684 | gcc_assert (TREE_PURPOSE (it) == NULL_TREE); |
1685 | } |
1686 | if (changed) |
1687 | { |
1688 | r = NULL_TREE; |
1689 | FOR_EACH_VEC_ELT_REVERSE (*vec, i, it) |
1690 | r = tree_cons (NULL_TREE, it, r); |
1691 | } |
1692 | else |
1693 | r = t; |
1694 | release_tree_vector (vec); |
1695 | return r; |
1696 | } |
1697 | |
1698 | case TREE_VEC: |
1699 | { |
1700 | bool changed = false; |
1701 | vec<tree, va_gc> *vec = make_tree_vector (); |
1702 | n = TREE_VEC_LENGTH (t); |
1703 | vec_safe_reserve (vec, n); |
1704 | for (i = 0; i < n; ++i) |
1705 | { |
1706 | tree op = strip_typedefs_expr (TREE_VEC_ELT (t, i), |
1707 | remove_attributes); |
1708 | vec->quick_push (op); |
1709 | if (op != TREE_VEC_ELT (t, i)) |
1710 | changed = true; |
1711 | } |
1712 | if (changed) |
1713 | { |
1714 | r = copy_node (t); |
1715 | for (i = 0; i < n; ++i) |
1716 | TREE_VEC_ELT (r, i) = (*vec)[i]; |
1717 | NON_DEFAULT_TEMPLATE_ARGS_COUNT (r) |
1718 | = NON_DEFAULT_TEMPLATE_ARGS_COUNT (t); |
1719 | } |
1720 | else |
1721 | r = t; |
1722 | release_tree_vector (vec); |
1723 | return r; |
1724 | } |
1725 | |
1726 | case CONSTRUCTOR: |
1727 | { |
1728 | bool changed = false; |
1729 | vec<constructor_elt, va_gc> *vec |
1730 | = vec_safe_copy (CONSTRUCTOR_ELTS (t)); |
1731 | n = CONSTRUCTOR_NELTS (t); |
1732 | type = strip_typedefs (TREE_TYPE (t), remove_attributes); |
1733 | for (i = 0; i < n; ++i) |
1734 | { |
1735 | constructor_elt *e = &(*vec)[i]; |
1736 | tree op = strip_typedefs_expr (e->value, remove_attributes); |
1737 | if (op != e->value) |
1738 | { |
1739 | changed = true; |
1740 | e->value = op; |
1741 | } |
1742 | gcc_checking_assert |
1743 | (e->index == strip_typedefs_expr (e->index, remove_attributes)); |
1744 | } |
1745 | |
1746 | if (!changed && type == TREE_TYPE (t)) |
1747 | { |
1748 | vec_free (vec); |
1749 | return t; |
1750 | } |
1751 | else |
1752 | { |
1753 | r = copy_node (t); |
1754 | TREE_TYPE (r) = type; |
1755 | CONSTRUCTOR_ELTS (r) = vec; |
1756 | return r; |
1757 | } |
1758 | } |
1759 | |
1760 | case LAMBDA_EXPR: |
1761 | error ("lambda-expression in a constant expression" ); |
1762 | return error_mark_node; |
1763 | |
1764 | default: |
1765 | break; |
1766 | } |
1767 | |
1768 | gcc_assert (EXPR_P (t)); |
1769 | |
1770 | n = TREE_OPERAND_LENGTH (t); |
1771 | ops = XALLOCAVEC (tree, n); |
1772 | type = TREE_TYPE (t); |
1773 | |
1774 | switch (code) |
1775 | { |
1776 | CASE_CONVERT: |
1777 | case IMPLICIT_CONV_EXPR: |
1778 | case DYNAMIC_CAST_EXPR: |
1779 | case STATIC_CAST_EXPR: |
1780 | case CONST_CAST_EXPR: |
1781 | case REINTERPRET_CAST_EXPR: |
1782 | case CAST_EXPR: |
1783 | case NEW_EXPR: |
1784 | type = strip_typedefs (type, remove_attributes); |
1785 | /* fallthrough */ |
1786 | |
1787 | default: |
1788 | for (i = 0; i < n; ++i) |
1789 | ops[i] = strip_typedefs_expr (TREE_OPERAND (t, i), remove_attributes); |
1790 | break; |
1791 | } |
1792 | |
1793 | /* If nothing changed, return t. */ |
1794 | for (i = 0; i < n; ++i) |
1795 | if (ops[i] != TREE_OPERAND (t, i)) |
1796 | break; |
1797 | if (i == n && type == TREE_TYPE (t)) |
1798 | return t; |
1799 | |
1800 | r = copy_node (t); |
1801 | TREE_TYPE (r) = type; |
1802 | for (i = 0; i < n; ++i) |
1803 | TREE_OPERAND (r, i) = ops[i]; |
1804 | return r; |
1805 | } |
1806 | |
1807 | /* Makes a copy of BINFO and TYPE, which is to be inherited into a |
1808 | graph dominated by T. If BINFO is NULL, TYPE is a dependent base, |
1809 | and we do a shallow copy. If BINFO is non-NULL, we do a deep copy. |
1810 | VIRT indicates whether TYPE is inherited virtually or not. |
1811 | IGO_PREV points at the previous binfo of the inheritance graph |
1812 | order chain. The newly copied binfo's TREE_CHAIN forms this |
1813 | ordering. |
1814 | |
1815 | The CLASSTYPE_VBASECLASSES vector of T is constructed in the |
1816 | correct order. That is in the order the bases themselves should be |
1817 | constructed in. |
1818 | |
1819 | The BINFO_INHERITANCE of a virtual base class points to the binfo |
1820 | of the most derived type. ??? We could probably change this so that |
1821 | BINFO_INHERITANCE becomes synonymous with BINFO_PRIMARY, and hence |
1822 | remove a field. They currently can only differ for primary virtual |
1823 | virtual bases. */ |
1824 | |
1825 | tree |
1826 | copy_binfo (tree binfo, tree type, tree t, tree *igo_prev, int virt) |
1827 | { |
1828 | tree new_binfo; |
1829 | |
1830 | if (virt) |
1831 | { |
1832 | /* See if we've already made this virtual base. */ |
1833 | new_binfo = binfo_for_vbase (type, t); |
1834 | if (new_binfo) |
1835 | return new_binfo; |
1836 | } |
1837 | |
1838 | new_binfo = make_tree_binfo (binfo ? BINFO_N_BASE_BINFOS (binfo) : 0); |
1839 | BINFO_TYPE (new_binfo) = type; |
1840 | |
1841 | /* Chain it into the inheritance graph. */ |
1842 | TREE_CHAIN (*igo_prev) = new_binfo; |
1843 | *igo_prev = new_binfo; |
1844 | |
1845 | if (binfo && !BINFO_DEPENDENT_BASE_P (binfo)) |
1846 | { |
1847 | int ix; |
1848 | tree base_binfo; |
1849 | |
1850 | gcc_assert (SAME_BINFO_TYPE_P (BINFO_TYPE (binfo), type)); |
1851 | |
1852 | BINFO_OFFSET (new_binfo) = BINFO_OFFSET (binfo); |
1853 | BINFO_VIRTUALS (new_binfo) = BINFO_VIRTUALS (binfo); |
1854 | |
1855 | /* We do not need to copy the accesses, as they are read only. */ |
1856 | BINFO_BASE_ACCESSES (new_binfo) = BINFO_BASE_ACCESSES (binfo); |
1857 | |
1858 | /* Recursively copy base binfos of BINFO. */ |
1859 | for (ix = 0; BINFO_BASE_ITERATE (binfo, ix, base_binfo); ix++) |
1860 | { |
1861 | tree new_base_binfo; |
1862 | new_base_binfo = copy_binfo (base_binfo, BINFO_TYPE (base_binfo), |
1863 | t, igo_prev, |
1864 | BINFO_VIRTUAL_P (base_binfo)); |
1865 | |
1866 | if (!BINFO_INHERITANCE_CHAIN (new_base_binfo)) |
1867 | BINFO_INHERITANCE_CHAIN (new_base_binfo) = new_binfo; |
1868 | BINFO_BASE_APPEND (new_binfo, new_base_binfo); |
1869 | } |
1870 | } |
1871 | else |
1872 | BINFO_DEPENDENT_BASE_P (new_binfo) = 1; |
1873 | |
1874 | if (virt) |
1875 | { |
1876 | /* Push it onto the list after any virtual bases it contains |
1877 | will have been pushed. */ |
1878 | CLASSTYPE_VBASECLASSES (t)->quick_push (new_binfo); |
1879 | BINFO_VIRTUAL_P (new_binfo) = 1; |
1880 | BINFO_INHERITANCE_CHAIN (new_binfo) = TYPE_BINFO (t); |
1881 | } |
1882 | |
1883 | return new_binfo; |
1884 | } |
1885 | |
1886 | /* Hashing of lists so that we don't make duplicates. |
1887 | The entry point is `list_hash_canon'. */ |
1888 | |
1889 | struct list_proxy |
1890 | { |
1891 | tree purpose; |
1892 | tree value; |
1893 | tree chain; |
1894 | }; |
1895 | |
1896 | struct list_hasher : ggc_ptr_hash<tree_node> |
1897 | { |
1898 | typedef list_proxy *compare_type; |
1899 | |
1900 | static hashval_t hash (tree); |
1901 | static bool equal (tree, list_proxy *); |
1902 | }; |
1903 | |
1904 | /* Now here is the hash table. When recording a list, it is added |
1905 | to the slot whose index is the hash code mod the table size. |
1906 | Note that the hash table is used for several kinds of lists. |
1907 | While all these live in the same table, they are completely independent, |
1908 | and the hash code is computed differently for each of these. */ |
1909 | |
1910 | static GTY (()) hash_table<list_hasher> *list_hash_table; |
1911 | |
1912 | /* Compare ENTRY (an entry in the hash table) with DATA (a list_proxy |
1913 | for a node we are thinking about adding). */ |
1914 | |
1915 | bool |
1916 | list_hasher::equal (tree t, list_proxy *proxy) |
1917 | { |
1918 | return (TREE_VALUE (t) == proxy->value |
1919 | && TREE_PURPOSE (t) == proxy->purpose |
1920 | && TREE_CHAIN (t) == proxy->chain); |
1921 | } |
1922 | |
1923 | /* Compute a hash code for a list (chain of TREE_LIST nodes |
1924 | with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the |
1925 | TREE_COMMON slots), by adding the hash codes of the individual entries. */ |
1926 | |
1927 | static hashval_t |
1928 | list_hash_pieces (tree purpose, tree value, tree chain) |
1929 | { |
1930 | hashval_t hashcode = 0; |
1931 | |
1932 | if (chain) |
1933 | hashcode += TREE_HASH (chain); |
1934 | |
1935 | if (value) |
1936 | hashcode += TREE_HASH (value); |
1937 | else |
1938 | hashcode += 1007; |
1939 | if (purpose) |
1940 | hashcode += TREE_HASH (purpose); |
1941 | else |
1942 | hashcode += 1009; |
1943 | return hashcode; |
1944 | } |
1945 | |
1946 | /* Hash an already existing TREE_LIST. */ |
1947 | |
1948 | hashval_t |
1949 | list_hasher::hash (tree t) |
1950 | { |
1951 | return list_hash_pieces (TREE_PURPOSE (t), |
1952 | TREE_VALUE (t), |
1953 | TREE_CHAIN (t)); |
1954 | } |
1955 | |
1956 | /* Given list components PURPOSE, VALUE, AND CHAIN, return the canonical |
1957 | object for an identical list if one already exists. Otherwise, build a |
1958 | new one, and record it as the canonical object. */ |
1959 | |
1960 | tree |
1961 | hash_tree_cons (tree purpose, tree value, tree chain) |
1962 | { |
1963 | int hashcode = 0; |
1964 | tree *slot; |
1965 | struct list_proxy proxy; |
1966 | |
1967 | /* Hash the list node. */ |
1968 | hashcode = list_hash_pieces (purpose, value, chain); |
1969 | /* Create a proxy for the TREE_LIST we would like to create. We |
1970 | don't actually create it so as to avoid creating garbage. */ |
1971 | proxy.purpose = purpose; |
1972 | proxy.value = value; |
1973 | proxy.chain = chain; |
1974 | /* See if it is already in the table. */ |
1975 | slot = list_hash_table->find_slot_with_hash (&proxy, hashcode, INSERT); |
1976 | /* If not, create a new node. */ |
1977 | if (!*slot) |
1978 | *slot = tree_cons (purpose, value, chain); |
1979 | return (tree) *slot; |
1980 | } |
1981 | |
1982 | /* Constructor for hashed lists. */ |
1983 | |
1984 | tree |
1985 | hash_tree_chain (tree value, tree chain) |
1986 | { |
1987 | return hash_tree_cons (NULL_TREE, value, chain); |
1988 | } |
1989 | |
1990 | void |
1991 | debug_binfo (tree elem) |
1992 | { |
1993 | HOST_WIDE_INT n; |
1994 | tree virtuals; |
1995 | |
1996 | fprintf (stderr, "type \"%s\", offset = " HOST_WIDE_INT_PRINT_DEC |
1997 | "\nvtable type:\n" , |
1998 | TYPE_NAME_STRING (BINFO_TYPE (elem)), |
1999 | TREE_INT_CST_LOW (BINFO_OFFSET (elem))); |
2000 | debug_tree (BINFO_TYPE (elem)); |
2001 | if (BINFO_VTABLE (elem)) |
2002 | fprintf (stderr, "vtable decl \"%s\"\n" , |
2003 | IDENTIFIER_POINTER (DECL_NAME (get_vtbl_decl_for_binfo (elem)))); |
2004 | else |
2005 | fprintf (stderr, "no vtable decl yet\n" ); |
2006 | fprintf (stderr, "virtuals:\n" ); |
2007 | virtuals = BINFO_VIRTUALS (elem); |
2008 | n = 0; |
2009 | |
2010 | while (virtuals) |
2011 | { |
2012 | tree fndecl = TREE_VALUE (virtuals); |
2013 | fprintf (stderr, "%s [%ld =? %ld]\n" , |
2014 | IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)), |
2015 | (long) n, (long) TREE_INT_CST_LOW (DECL_VINDEX (fndecl))); |
2016 | ++n; |
2017 | virtuals = TREE_CHAIN (virtuals); |
2018 | } |
2019 | } |
2020 | |
2021 | /* Build a representation for the qualified name SCOPE::NAME. TYPE is |
2022 | the type of the result expression, if known, or NULL_TREE if the |
2023 | resulting expression is type-dependent. If TEMPLATE_P is true, |
2024 | NAME is known to be a template because the user explicitly used the |
2025 | "template" keyword after the "::". |
2026 | |
2027 | All SCOPE_REFs should be built by use of this function. */ |
2028 | |
2029 | tree |
2030 | build_qualified_name (tree type, tree scope, tree name, bool template_p) |
2031 | { |
2032 | tree t; |
2033 | if (type == error_mark_node |
2034 | || scope == error_mark_node |
2035 | || name == error_mark_node) |
2036 | return error_mark_node; |
2037 | gcc_assert (TREE_CODE (name) != SCOPE_REF); |
2038 | t = build2 (SCOPE_REF, type, scope, name); |
2039 | QUALIFIED_NAME_IS_TEMPLATE (t) = template_p; |
2040 | PTRMEM_OK_P (t) = true; |
2041 | if (type) |
2042 | t = convert_from_reference (t); |
2043 | return t; |
2044 | } |
2045 | |
2046 | /* Like check_qualified_type, but also check ref-qualifier and exception |
2047 | specification. */ |
2048 | |
2049 | static bool |
2050 | cp_check_qualified_type (const_tree cand, const_tree base, int type_quals, |
2051 | cp_ref_qualifier rqual, tree raises) |
2052 | { |
2053 | return (TYPE_QUALS (cand) == type_quals |
2054 | && check_base_type (cand, base) |
2055 | && comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (cand), |
2056 | ce_exact) |
2057 | && type_memfn_rqual (cand) == rqual); |
2058 | } |
2059 | |
2060 | /* Build the FUNCTION_TYPE or METHOD_TYPE with the ref-qualifier RQUAL. */ |
2061 | |
2062 | tree |
2063 | build_ref_qualified_type (tree type, cp_ref_qualifier rqual) |
2064 | { |
2065 | tree t; |
2066 | |
2067 | if (rqual == type_memfn_rqual (type)) |
2068 | return type; |
2069 | |
2070 | int type_quals = TYPE_QUALS (type); |
2071 | tree raises = TYPE_RAISES_EXCEPTIONS (type); |
2072 | for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) |
2073 | if (cp_check_qualified_type (t, type, type_quals, rqual, raises)) |
2074 | return t; |
2075 | |
2076 | t = build_variant_type_copy (type); |
2077 | switch (rqual) |
2078 | { |
2079 | case REF_QUAL_RVALUE: |
2080 | FUNCTION_RVALUE_QUALIFIED (t) = 1; |
2081 | FUNCTION_REF_QUALIFIED (t) = 1; |
2082 | break; |
2083 | case REF_QUAL_LVALUE: |
2084 | FUNCTION_RVALUE_QUALIFIED (t) = 0; |
2085 | FUNCTION_REF_QUALIFIED (t) = 1; |
2086 | break; |
2087 | default: |
2088 | FUNCTION_REF_QUALIFIED (t) = 0; |
2089 | break; |
2090 | } |
2091 | |
2092 | if (TYPE_STRUCTURAL_EQUALITY_P (type)) |
2093 | /* Propagate structural equality. */ |
2094 | SET_TYPE_STRUCTURAL_EQUALITY (t); |
2095 | else if (TYPE_CANONICAL (type) != type) |
2096 | /* Build the underlying canonical type, since it is different |
2097 | from TYPE. */ |
2098 | TYPE_CANONICAL (t) = build_ref_qualified_type (TYPE_CANONICAL (type), |
2099 | rqual); |
2100 | else |
2101 | /* T is its own canonical type. */ |
2102 | TYPE_CANONICAL (t) = t; |
2103 | |
2104 | return t; |
2105 | } |
2106 | |
2107 | /* Cache of free ovl nodes. Uses OVL_FUNCTION for chaining. */ |
2108 | static GTY((deletable)) tree ovl_cache; |
2109 | |
2110 | /* Make a raw overload node containing FN. */ |
2111 | |
2112 | tree |
2113 | ovl_make (tree fn, tree next) |
2114 | { |
2115 | tree result = ovl_cache; |
2116 | |
2117 | if (result) |
2118 | { |
2119 | ovl_cache = OVL_FUNCTION (result); |
2120 | /* Zap the flags. */ |
2121 | memset (result, 0, sizeof (tree_base)); |
2122 | TREE_SET_CODE (result, OVERLOAD); |
2123 | } |
2124 | else |
2125 | result = make_node (OVERLOAD); |
2126 | |
2127 | if (TREE_CODE (fn) == OVERLOAD) |
2128 | OVL_NESTED_P (result) = true; |
2129 | |
2130 | TREE_TYPE (result) = (next || TREE_CODE (fn) == TEMPLATE_DECL |
2131 | ? unknown_type_node : TREE_TYPE (fn)); |
2132 | OVL_FUNCTION (result) = fn; |
2133 | OVL_CHAIN (result) = next; |
2134 | return result; |
2135 | } |
2136 | |
2137 | static tree |
2138 | ovl_copy (tree ovl) |
2139 | { |
2140 | tree result = ovl_cache; |
2141 | |
2142 | if (result) |
2143 | { |
2144 | ovl_cache = OVL_FUNCTION (result); |
2145 | /* Zap the flags. */ |
2146 | memset (result, 0, sizeof (tree_base)); |
2147 | TREE_SET_CODE (result, OVERLOAD); |
2148 | } |
2149 | else |
2150 | result = make_node (OVERLOAD); |
2151 | |
2152 | gcc_checking_assert (!OVL_NESTED_P (ovl) && OVL_USED_P (ovl)); |
2153 | TREE_TYPE (result) = TREE_TYPE (ovl); |
2154 | OVL_FUNCTION (result) = OVL_FUNCTION (ovl); |
2155 | OVL_CHAIN (result) = OVL_CHAIN (ovl); |
2156 | OVL_HIDDEN_P (result) = OVL_HIDDEN_P (ovl); |
2157 | OVL_USING_P (result) = OVL_USING_P (ovl); |
2158 | OVL_LOOKUP_P (result) = OVL_LOOKUP_P (ovl); |
2159 | |
2160 | return result; |
2161 | } |
2162 | |
2163 | /* Add FN to the (potentially NULL) overload set OVL. USING_P is |
2164 | true, if FN is via a using declaration. We also pay attention to |
2165 | DECL_HIDDEN. Overloads are ordered as hidden, using, regular. */ |
2166 | |
2167 | tree |
2168 | ovl_insert (tree fn, tree maybe_ovl, bool using_p) |
2169 | { |
2170 | bool copying = false; /* Checking use only. */ |
2171 | bool hidden_p = DECL_HIDDEN_P (fn); |
2172 | int weight = (hidden_p << 1) | (using_p << 0); |
2173 | |
2174 | tree result = NULL_TREE; |
2175 | tree insert_after = NULL_TREE; |
2176 | |
2177 | /* Find insertion point. */ |
2178 | while (maybe_ovl && TREE_CODE (maybe_ovl) == OVERLOAD |
2179 | && (weight < ((OVL_HIDDEN_P (maybe_ovl) << 1) |
2180 | | (OVL_USING_P (maybe_ovl) << 0)))) |
2181 | { |
2182 | gcc_checking_assert (!OVL_LOOKUP_P (maybe_ovl) |
2183 | && (!copying || OVL_USED_P (maybe_ovl))); |
2184 | if (OVL_USED_P (maybe_ovl)) |
2185 | { |
2186 | copying = true; |
2187 | maybe_ovl = ovl_copy (maybe_ovl); |
2188 | if (insert_after) |
2189 | OVL_CHAIN (insert_after) = maybe_ovl; |
2190 | } |
2191 | if (!result) |
2192 | result = maybe_ovl; |
2193 | insert_after = maybe_ovl; |
2194 | maybe_ovl = OVL_CHAIN (maybe_ovl); |
2195 | } |
2196 | |
2197 | tree trail = fn; |
2198 | if (maybe_ovl || using_p || hidden_p || TREE_CODE (fn) == TEMPLATE_DECL) |
2199 | { |
2200 | trail = ovl_make (fn, maybe_ovl); |
2201 | if (hidden_p) |
2202 | OVL_HIDDEN_P (trail) = true; |
2203 | if (using_p) |
2204 | OVL_USING_P (trail) = true; |
2205 | } |
2206 | |
2207 | if (insert_after) |
2208 | { |
2209 | OVL_CHAIN (insert_after) = trail; |
2210 | TREE_TYPE (insert_after) = unknown_type_node; |
2211 | } |
2212 | else |
2213 | result = trail; |
2214 | |
2215 | return result; |
2216 | } |
2217 | |
2218 | /* Skip any hidden names at the beginning of OVL. */ |
2219 | |
2220 | tree |
2221 | ovl_skip_hidden (tree ovl) |
2222 | { |
2223 | for (; |
2224 | ovl && TREE_CODE (ovl) == OVERLOAD && OVL_HIDDEN_P (ovl); |
2225 | ovl = OVL_CHAIN (ovl)) |
2226 | gcc_checking_assert (DECL_HIDDEN_P (OVL_FUNCTION (ovl))); |
2227 | |
2228 | if (ovl && TREE_CODE (ovl) != OVERLOAD && DECL_HIDDEN_P (ovl)) |
2229 | { |
2230 | /* Any hidden functions should have been wrapped in an |
2231 | overload, but injected friend classes will not. */ |
2232 | gcc_checking_assert (!DECL_DECLARES_FUNCTION_P (ovl)); |
2233 | ovl = NULL_TREE; |
2234 | } |
2235 | |
2236 | return ovl; |
2237 | } |
2238 | |
2239 | /* NODE is an OVL_HIDDEN_P node which is now revealed. */ |
2240 | |
2241 | tree |
2242 | ovl_iterator::reveal_node (tree overload, tree node) |
2243 | { |
2244 | /* We cannot have returned NODE as part of a lookup overload, so it |
2245 | cannot be USED. */ |
2246 | gcc_checking_assert (!OVL_USED_P (node)); |
2247 | |
2248 | OVL_HIDDEN_P (node) = false; |
2249 | if (tree chain = OVL_CHAIN (node)) |
2250 | if (TREE_CODE (chain) == OVERLOAD |
2251 | && (OVL_USING_P (chain) || OVL_HIDDEN_P (chain))) |
2252 | { |
2253 | /* The node needs moving, and the simplest way is to remove it |
2254 | and reinsert. */ |
2255 | overload = remove_node (overload, node); |
2256 | overload = ovl_insert (OVL_FUNCTION (node), overload); |
2257 | } |
2258 | return overload; |
2259 | } |
2260 | |
2261 | /* NODE is on the overloads of OVL. Remove it. If a predecessor is |
2262 | OVL_USED_P we must copy OVL nodes, because those are immutable. |
2263 | The removed node is unaltered and may continue to be iterated |
2264 | from (i.e. it is safe to remove a node from an overload one is |
2265 | currently iterating over). */ |
2266 | |
2267 | tree |
2268 | ovl_iterator::remove_node (tree overload, tree node) |
2269 | { |
2270 | bool copying = false; /* Checking use only. */ |
2271 | |
2272 | tree *slot = &overload; |
2273 | while (*slot != node) |
2274 | { |
2275 | tree probe = *slot; |
2276 | gcc_checking_assert (!OVL_LOOKUP_P (probe) |
2277 | && (!copying || OVL_USED_P (probe))); |
2278 | if (OVL_USED_P (probe)) |
2279 | { |
2280 | copying = true; |
2281 | probe = ovl_copy (probe); |
2282 | *slot = probe; |
2283 | } |
2284 | |
2285 | slot = &OVL_CHAIN (probe); |
2286 | } |
2287 | |
2288 | /* Stitch out NODE. We don't have to worry about now making a |
2289 | singleton overload (and consequently maybe setting its type), |
2290 | because all uses of this function will be followed by inserting a |
2291 | new node that must follow the place we've cut this out from. */ |
2292 | if (TREE_CODE (node) != OVERLOAD) |
2293 | /* Cloned inherited ctors don't mark themselves as via_using. */ |
2294 | *slot = NULL_TREE; |
2295 | else |
2296 | *slot = OVL_CHAIN (node); |
2297 | |
2298 | return overload; |
2299 | } |
2300 | |
2301 | /* Mark or unmark a lookup set. */ |
2302 | |
2303 | void |
2304 | lookup_mark (tree ovl, bool val) |
2305 | { |
2306 | for (lkp_iterator iter (ovl); iter; ++iter) |
2307 | { |
2308 | gcc_checking_assert (LOOKUP_SEEN_P (*iter) != val); |
2309 | LOOKUP_SEEN_P (*iter) = val; |
2310 | } |
2311 | } |
2312 | |
2313 | /* Add a set of new FNS into a lookup. */ |
2314 | |
2315 | tree |
2316 | lookup_add (tree fns, tree lookup) |
2317 | { |
2318 | if (lookup || TREE_CODE (fns) == TEMPLATE_DECL) |
2319 | { |
2320 | lookup = ovl_make (fns, lookup); |
2321 | OVL_LOOKUP_P (lookup) = true; |
2322 | } |
2323 | else |
2324 | lookup = fns; |
2325 | |
2326 | return lookup; |
2327 | } |
2328 | |
2329 | /* FNS is a new overload set, add them to LOOKUP, if they are not |
2330 | already present there. */ |
2331 | |
2332 | tree |
2333 | lookup_maybe_add (tree fns, tree lookup, bool deduping) |
2334 | { |
2335 | if (deduping) |
2336 | for (tree next, probe = fns; probe; probe = next) |
2337 | { |
2338 | tree fn = probe; |
2339 | next = NULL_TREE; |
2340 | |
2341 | if (TREE_CODE (probe) == OVERLOAD) |
2342 | { |
2343 | fn = OVL_FUNCTION (probe); |
2344 | next = OVL_CHAIN (probe); |
2345 | } |
2346 | |
2347 | if (!LOOKUP_SEEN_P (fn)) |
2348 | LOOKUP_SEEN_P (fn) = true; |
2349 | else |
2350 | { |
2351 | /* This function was already seen. Insert all the |
2352 | predecessors onto the lookup. */ |
2353 | for (; fns != probe; fns = OVL_CHAIN (fns)) |
2354 | { |
2355 | lookup = lookup_add (OVL_FUNCTION (fns), lookup); |
2356 | /* Propagate OVL_USING, but OVL_HIDDEN doesn't matter. */ |
2357 | if (OVL_USING_P (fns)) |
2358 | OVL_USING_P (lookup) = true; |
2359 | } |
2360 | |
2361 | /* And now skip this function. */ |
2362 | fns = next; |
2363 | } |
2364 | } |
2365 | |
2366 | if (fns) |
2367 | /* We ended in a set of new functions. Add them all in one go. */ |
2368 | lookup = lookup_add (fns, lookup); |
2369 | |
2370 | return lookup; |
2371 | } |
2372 | |
2373 | /* Regular overload OVL is part of a kept lookup. Mark the nodes on |
2374 | it as immutable. */ |
2375 | |
2376 | static void |
2377 | ovl_used (tree ovl) |
2378 | { |
2379 | for (; |
2380 | ovl && TREE_CODE (ovl) == OVERLOAD |
2381 | && !OVL_USED_P (ovl); |
2382 | ovl = OVL_CHAIN (ovl)) |
2383 | { |
2384 | gcc_checking_assert (!OVL_LOOKUP_P (ovl)); |
2385 | OVL_USED_P (ovl) = true; |
2386 | } |
2387 | } |
2388 | |
2389 | /* If KEEP is true, preserve the contents of a lookup so that it is |
2390 | available for a later instantiation. Otherwise release the LOOKUP |
2391 | nodes for reuse. */ |
2392 | |
2393 | void |
2394 | lookup_keep (tree lookup, bool keep) |
2395 | { |
2396 | for (; |
2397 | lookup && TREE_CODE (lookup) == OVERLOAD |
2398 | && OVL_LOOKUP_P (lookup) && !OVL_USED_P (lookup); |
2399 | lookup = OVL_CHAIN (lookup)) |
2400 | if (keep) |
2401 | { |
2402 | OVL_USED_P (lookup) = true; |
2403 | ovl_used (OVL_FUNCTION (lookup)); |
2404 | } |
2405 | else |
2406 | { |
2407 | OVL_FUNCTION (lookup) = ovl_cache; |
2408 | ovl_cache = lookup; |
2409 | } |
2410 | |
2411 | if (keep) |
2412 | ovl_used (lookup); |
2413 | } |
2414 | |
2415 | /* Returns nonzero if X is an expression for a (possibly overloaded) |
2416 | function. If "f" is a function or function template, "f", "c->f", |
2417 | "c.f", "C::f", and "f<int>" will all be considered possibly |
2418 | overloaded functions. Returns 2 if the function is actually |
2419 | overloaded, i.e., if it is impossible to know the type of the |
2420 | function without performing overload resolution. */ |
2421 | |
2422 | int |
2423 | is_overloaded_fn (tree x) |
2424 | { |
2425 | /* A baselink is also considered an overloaded function. */ |
2426 | if (TREE_CODE (x) == OFFSET_REF |
2427 | || TREE_CODE (x) == COMPONENT_REF) |
2428 | x = TREE_OPERAND (x, 1); |
2429 | x = MAYBE_BASELINK_FUNCTIONS (x); |
2430 | if (TREE_CODE (x) == TEMPLATE_ID_EXPR) |
2431 | x = TREE_OPERAND (x, 0); |
2432 | |
2433 | if (DECL_FUNCTION_TEMPLATE_P (OVL_FIRST (x)) |
2434 | || (TREE_CODE (x) == OVERLOAD && !OVL_SINGLE_P (x))) |
2435 | return 2; |
2436 | |
2437 | return (TREE_CODE (x) == FUNCTION_DECL |
2438 | || TREE_CODE (x) == OVERLOAD); |
2439 | } |
2440 | |
2441 | /* X is the CALL_EXPR_FN of a CALL_EXPR. If X represents a dependent name |
2442 | (14.6.2), return the IDENTIFIER_NODE for that name. Otherwise, return |
2443 | NULL_TREE. */ |
2444 | |
2445 | tree |
2446 | dependent_name (tree x) |
2447 | { |
2448 | if (identifier_p (x)) |
2449 | return x; |
2450 | if (TREE_CODE (x) == TEMPLATE_ID_EXPR) |
2451 | x = TREE_OPERAND (x, 0); |
2452 | if (TREE_CODE (x) == OVERLOAD || TREE_CODE (x) == FUNCTION_DECL) |
2453 | return OVL_NAME (x); |
2454 | return NULL_TREE; |
2455 | } |
2456 | |
2457 | /* Returns true iff X is an expression for an overloaded function |
2458 | whose type cannot be known without performing overload |
2459 | resolution. */ |
2460 | |
2461 | bool |
2462 | really_overloaded_fn (tree x) |
2463 | { |
2464 | return is_overloaded_fn (x) == 2; |
2465 | } |
2466 | |
2467 | /* Get the overload set FROM refers to. */ |
2468 | |
2469 | tree |
2470 | get_fns (tree from) |
2471 | { |
2472 | /* A baselink is also considered an overloaded function. */ |
2473 | if (TREE_CODE (from) == OFFSET_REF |
2474 | || TREE_CODE (from) == COMPONENT_REF) |
2475 | from = TREE_OPERAND (from, 1); |
2476 | if (BASELINK_P (from)) |
2477 | from = BASELINK_FUNCTIONS (from); |
2478 | if (TREE_CODE (from) == TEMPLATE_ID_EXPR) |
2479 | from = TREE_OPERAND (from, 0); |
2480 | gcc_assert (TREE_CODE (from) == OVERLOAD |
2481 | || TREE_CODE (from) == FUNCTION_DECL); |
2482 | return from; |
2483 | } |
2484 | |
2485 | /* Return the first function of the overload set FROM refers to. */ |
2486 | |
2487 | tree |
2488 | get_first_fn (tree from) |
2489 | { |
2490 | return OVL_FIRST (get_fns (from)); |
2491 | } |
2492 | |
2493 | /* Return the scope where the overloaded functions OVL were found. */ |
2494 | |
2495 | tree |
2496 | ovl_scope (tree ovl) |
2497 | { |
2498 | if (TREE_CODE (ovl) == OFFSET_REF |
2499 | || TREE_CODE (ovl) == COMPONENT_REF) |
2500 | ovl = TREE_OPERAND (ovl, 1); |
2501 | if (TREE_CODE (ovl) == BASELINK) |
2502 | return BINFO_TYPE (BASELINK_BINFO (ovl)); |
2503 | if (TREE_CODE (ovl) == TEMPLATE_ID_EXPR) |
2504 | ovl = TREE_OPERAND (ovl, 0); |
2505 | /* Skip using-declarations. */ |
2506 | lkp_iterator iter (ovl); |
2507 | do |
2508 | ovl = *iter; |
2509 | while (iter.using_p () && ++iter); |
2510 | |
2511 | return CP_DECL_CONTEXT (ovl); |
2512 | } |
2513 | |
2514 | #define PRINT_RING_SIZE 4 |
2515 | |
2516 | static const char * |
2517 | cxx_printable_name_internal (tree decl, int v, bool translate) |
2518 | { |
2519 | static unsigned int uid_ring[PRINT_RING_SIZE]; |
2520 | static char *print_ring[PRINT_RING_SIZE]; |
2521 | static bool trans_ring[PRINT_RING_SIZE]; |
2522 | static int ring_counter; |
2523 | int i; |
2524 | |
2525 | /* Only cache functions. */ |
2526 | if (v < 2 |
2527 | || TREE_CODE (decl) != FUNCTION_DECL |
2528 | || DECL_LANG_SPECIFIC (decl) == 0) |
2529 | return lang_decl_name (decl, v, translate); |
2530 | |
2531 | /* See if this print name is lying around. */ |
2532 | for (i = 0; i < PRINT_RING_SIZE; i++) |
2533 | if (uid_ring[i] == DECL_UID (decl) && translate == trans_ring[i]) |
2534 | /* yes, so return it. */ |
2535 | return print_ring[i]; |
2536 | |
2537 | if (++ring_counter == PRINT_RING_SIZE) |
2538 | ring_counter = 0; |
2539 | |
2540 | if (current_function_decl != NULL_TREE) |
2541 | { |
2542 | /* There may be both translated and untranslated versions of the |
2543 | name cached. */ |
2544 | for (i = 0; i < 2; i++) |
2545 | { |
2546 | if (uid_ring[ring_counter] == DECL_UID (current_function_decl)) |
2547 | ring_counter += 1; |
2548 | if (ring_counter == PRINT_RING_SIZE) |
2549 | ring_counter = 0; |
2550 | } |
2551 | gcc_assert (uid_ring[ring_counter] != DECL_UID (current_function_decl)); |
2552 | } |
2553 | |
2554 | free (print_ring[ring_counter]); |
2555 | |
2556 | print_ring[ring_counter] = xstrdup (lang_decl_name (decl, v, translate)); |
2557 | uid_ring[ring_counter] = DECL_UID (decl); |
2558 | trans_ring[ring_counter] = translate; |
2559 | return print_ring[ring_counter]; |
2560 | } |
2561 | |
2562 | const char * |
2563 | cxx_printable_name (tree decl, int v) |
2564 | { |
2565 | return cxx_printable_name_internal (decl, v, false); |
2566 | } |
2567 | |
2568 | const char * |
2569 | cxx_printable_name_translate (tree decl, int v) |
2570 | { |
2571 | return cxx_printable_name_internal (decl, v, true); |
2572 | } |
2573 | |
2574 | /* Return the canonical version of exception-specification RAISES for a C++17 |
2575 | function type, for use in type comparison and building TYPE_CANONICAL. */ |
2576 | |
2577 | tree |
2578 | canonical_eh_spec (tree raises) |
2579 | { |
2580 | if (raises == NULL_TREE) |
2581 | return raises; |
2582 | else if (DEFERRED_NOEXCEPT_SPEC_P (raises) |
2583 | || uses_template_parms (raises) |
2584 | || uses_template_parms (TREE_PURPOSE (raises))) |
2585 | /* Keep a dependent or deferred exception specification. */ |
2586 | return raises; |
2587 | else if (nothrow_spec_p (raises)) |
2588 | /* throw() -> noexcept. */ |
2589 | return noexcept_true_spec; |
2590 | else |
2591 | /* For C++17 type matching, anything else -> nothing. */ |
2592 | return NULL_TREE; |
2593 | } |
2594 | |
2595 | /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions |
2596 | listed in RAISES. */ |
2597 | |
2598 | tree |
2599 | build_exception_variant (tree type, tree raises) |
2600 | { |
2601 | tree v; |
2602 | int type_quals; |
2603 | |
2604 | if (comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (type), ce_exact)) |
2605 | return type; |
2606 | |
2607 | type_quals = TYPE_QUALS (type); |
2608 | cp_ref_qualifier rqual = type_memfn_rqual (type); |
2609 | for (v = TYPE_MAIN_VARIANT (type); v; v = TYPE_NEXT_VARIANT (v)) |
2610 | if (cp_check_qualified_type (v, type, type_quals, rqual, raises)) |
2611 | return v; |
2612 | |
2613 | /* Need to build a new variant. */ |
2614 | v = build_variant_type_copy (type); |
2615 | TYPE_RAISES_EXCEPTIONS (v) = raises; |
2616 | |
2617 | if (!flag_noexcept_type) |
2618 | /* The exception-specification is not part of the canonical type. */ |
2619 | return v; |
2620 | |
2621 | /* Canonicalize the exception specification. */ |
2622 | tree cr = canonical_eh_spec (raises); |
2623 | |
2624 | if (TYPE_STRUCTURAL_EQUALITY_P (type)) |
2625 | /* Propagate structural equality. */ |
2626 | SET_TYPE_STRUCTURAL_EQUALITY (v); |
2627 | else if (TYPE_CANONICAL (type) != type || cr != raises) |
2628 | /* Build the underlying canonical type, since it is different |
2629 | from TYPE. */ |
2630 | TYPE_CANONICAL (v) = build_exception_variant (TYPE_CANONICAL (type), cr); |
2631 | else |
2632 | /* T is its own canonical type. */ |
2633 | TYPE_CANONICAL (v) = v; |
2634 | |
2635 | return v; |
2636 | } |
2637 | |
2638 | /* Given a TEMPLATE_TEMPLATE_PARM node T, create a new |
2639 | BOUND_TEMPLATE_TEMPLATE_PARM bound with NEWARGS as its template |
2640 | arguments. */ |
2641 | |
2642 | tree |
2643 | bind_template_template_parm (tree t, tree newargs) |
2644 | { |
2645 | tree decl = TYPE_NAME (t); |
2646 | tree t2; |
2647 | |
2648 | t2 = cxx_make_type (BOUND_TEMPLATE_TEMPLATE_PARM); |
2649 | decl = build_decl (input_location, |
2650 | TYPE_DECL, DECL_NAME (decl), NULL_TREE); |
2651 | |
2652 | /* These nodes have to be created to reflect new TYPE_DECL and template |
2653 | arguments. */ |
2654 | TEMPLATE_TYPE_PARM_INDEX (t2) = copy_node (TEMPLATE_TYPE_PARM_INDEX (t)); |
2655 | TEMPLATE_PARM_DECL (TEMPLATE_TYPE_PARM_INDEX (t2)) = decl; |
2656 | TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t2) |
2657 | = build_template_info (TEMPLATE_TEMPLATE_PARM_TEMPLATE_DECL (t), newargs); |
2658 | |
2659 | TREE_TYPE (decl) = t2; |
2660 | TYPE_NAME (t2) = decl; |
2661 | TYPE_STUB_DECL (t2) = decl; |
2662 | TYPE_SIZE (t2) = 0; |
2663 | SET_TYPE_STRUCTURAL_EQUALITY (t2); |
2664 | |
2665 | return t2; |
2666 | } |
2667 | |
2668 | /* Called from count_trees via walk_tree. */ |
2669 | |
2670 | static tree |
2671 | count_trees_r (tree *tp, int *walk_subtrees, void *data) |
2672 | { |
2673 | ++*((int *) data); |
2674 | |
2675 | if (TYPE_P (*tp)) |
2676 | *walk_subtrees = 0; |
2677 | |
2678 | return NULL_TREE; |
2679 | } |
2680 | |
2681 | /* Debugging function for measuring the rough complexity of a tree |
2682 | representation. */ |
2683 | |
2684 | int |
2685 | count_trees (tree t) |
2686 | { |
2687 | int n_trees = 0; |
2688 | cp_walk_tree_without_duplicates (&t, count_trees_r, &n_trees); |
2689 | return n_trees; |
2690 | } |
2691 | |
2692 | /* Called from verify_stmt_tree via walk_tree. */ |
2693 | |
2694 | static tree |
2695 | verify_stmt_tree_r (tree* tp, int * /*walk_subtrees*/, void* data) |
2696 | { |
2697 | tree t = *tp; |
2698 | hash_table<nofree_ptr_hash <tree_node> > *statements |
2699 | = static_cast <hash_table<nofree_ptr_hash <tree_node> > *> (data); |
2700 | tree_node **slot; |
2701 | |
2702 | if (!STATEMENT_CODE_P (TREE_CODE (t))) |
2703 | return NULL_TREE; |
2704 | |
2705 | /* If this statement is already present in the hash table, then |
2706 | there is a circularity in the statement tree. */ |
2707 | gcc_assert (!statements->find (t)); |
2708 | |
2709 | slot = statements->find_slot (t, INSERT); |
2710 | *slot = t; |
2711 | |
2712 | return NULL_TREE; |
2713 | } |
2714 | |
2715 | /* Debugging function to check that the statement T has not been |
2716 | corrupted. For now, this function simply checks that T contains no |
2717 | circularities. */ |
2718 | |
2719 | void |
2720 | verify_stmt_tree (tree t) |
2721 | { |
2722 | hash_table<nofree_ptr_hash <tree_node> > statements (37); |
2723 | cp_walk_tree (&t, verify_stmt_tree_r, &statements, NULL); |
2724 | } |
2725 | |
2726 | /* Check if the type T depends on a type with no linkage and if so, return |
2727 | it. If RELAXED_P then do not consider a class type declared within |
2728 | a vague-linkage function to have no linkage. */ |
2729 | |
2730 | tree |
2731 | no_linkage_check (tree t, bool relaxed_p) |
2732 | { |
2733 | tree r; |
2734 | |
2735 | /* There's no point in checking linkage on template functions; we |
2736 | can't know their complete types. */ |
2737 | if (processing_template_decl) |
2738 | return NULL_TREE; |
2739 | |
2740 | switch (TREE_CODE (t)) |
2741 | { |
2742 | case RECORD_TYPE: |
2743 | if (TYPE_PTRMEMFUNC_P (t)) |
2744 | goto ptrmem; |
2745 | /* Lambda types that don't have mangling scope have no linkage. We |
2746 | check CLASSTYPE_LAMBDA_EXPR for error_mark_node because |
2747 | when we get here from pushtag none of the lambda information is |
2748 | set up yet, so we want to assume that the lambda has linkage and |
2749 | fix it up later if not. */ |
2750 | if (CLASSTYPE_LAMBDA_EXPR (t) |
2751 | && CLASSTYPE_LAMBDA_EXPR (t) != error_mark_node |
2752 | && LAMBDA_TYPE_EXTRA_SCOPE (t) == NULL_TREE) |
2753 | return t; |
2754 | /* Fall through. */ |
2755 | case UNION_TYPE: |
2756 | if (!CLASS_TYPE_P (t)) |
2757 | return NULL_TREE; |
2758 | /* Fall through. */ |
2759 | case ENUMERAL_TYPE: |
2760 | /* Only treat unnamed types as having no linkage if they're at |
2761 | namespace scope. This is core issue 966. */ |
2762 | if (TYPE_UNNAMED_P (t) && TYPE_NAMESPACE_SCOPE_P (t)) |
2763 | return t; |
2764 | |
2765 | for (r = CP_TYPE_CONTEXT (t); ; ) |
2766 | { |
2767 | /* If we're a nested type of a !TREE_PUBLIC class, we might not |
2768 | have linkage, or we might just be in an anonymous namespace. |
2769 | If we're in a TREE_PUBLIC class, we have linkage. */ |
2770 | if (TYPE_P (r) && !TREE_PUBLIC (TYPE_NAME (r))) |
2771 | return no_linkage_check (TYPE_CONTEXT (t), relaxed_p); |
2772 | else if (TREE_CODE (r) == FUNCTION_DECL) |
2773 | { |
2774 | if (!relaxed_p || !vague_linkage_p (r)) |
2775 | return t; |
2776 | else |
2777 | r = CP_DECL_CONTEXT (r); |
2778 | } |
2779 | else |
2780 | break; |
2781 | } |
2782 | |
2783 | return NULL_TREE; |
2784 | |
2785 | case ARRAY_TYPE: |
2786 | case POINTER_TYPE: |
2787 | case REFERENCE_TYPE: |
2788 | case VECTOR_TYPE: |
2789 | return no_linkage_check (TREE_TYPE (t), relaxed_p); |
2790 | |
2791 | case OFFSET_TYPE: |
2792 | ptrmem: |
2793 | r = no_linkage_check (TYPE_PTRMEM_POINTED_TO_TYPE (t), |
2794 | relaxed_p); |
2795 | if (r) |
2796 | return r; |
2797 | return no_linkage_check (TYPE_PTRMEM_CLASS_TYPE (t), relaxed_p); |
2798 | |
2799 | case METHOD_TYPE: |
2800 | case FUNCTION_TYPE: |
2801 | { |
2802 | tree parm = TYPE_ARG_TYPES (t); |
2803 | if (TREE_CODE (t) == METHOD_TYPE) |
2804 | /* The 'this' pointer isn't interesting; a method has the same |
2805 | linkage (or lack thereof) as its enclosing class. */ |
2806 | parm = TREE_CHAIN (parm); |
2807 | for (; |
2808 | parm && parm != void_list_node; |
2809 | parm = TREE_CHAIN (parm)) |
2810 | { |
2811 | r = no_linkage_check (TREE_VALUE (parm), relaxed_p); |
2812 | if (r) |
2813 | return r; |
2814 | } |
2815 | return no_linkage_check (TREE_TYPE (t), relaxed_p); |
2816 | } |
2817 | |
2818 | default: |
2819 | return NULL_TREE; |
2820 | } |
2821 | } |
2822 | |
2823 | extern int depth_reached; |
2824 | |
2825 | void |
2826 | cxx_print_statistics (void) |
2827 | { |
2828 | print_class_statistics (); |
2829 | print_template_statistics (); |
2830 | if (GATHER_STATISTICS) |
2831 | fprintf (stderr, "maximum template instantiation depth reached: %d\n" , |
2832 | depth_reached); |
2833 | } |
2834 | |
2835 | /* Return, as an INTEGER_CST node, the number of elements for TYPE |
2836 | (which is an ARRAY_TYPE). This counts only elements of the top |
2837 | array. */ |
2838 | |
2839 | tree |
2840 | array_type_nelts_top (tree type) |
2841 | { |
2842 | return fold_build2_loc (input_location, |
2843 | PLUS_EXPR, sizetype, |
2844 | array_type_nelts (type), |
2845 | size_one_node); |
2846 | } |
2847 | |
2848 | /* Return, as an INTEGER_CST node, the number of elements for TYPE |
2849 | (which is an ARRAY_TYPE). This one is a recursive count of all |
2850 | ARRAY_TYPEs that are clumped together. */ |
2851 | |
2852 | tree |
2853 | array_type_nelts_total (tree type) |
2854 | { |
2855 | tree sz = array_type_nelts_top (type); |
2856 | type = TREE_TYPE (type); |
2857 | while (TREE_CODE (type) == ARRAY_TYPE) |
2858 | { |
2859 | tree n = array_type_nelts_top (type); |
2860 | sz = fold_build2_loc (input_location, |
2861 | MULT_EXPR, sizetype, sz, n); |
2862 | type = TREE_TYPE (type); |
2863 | } |
2864 | return sz; |
2865 | } |
2866 | |
2867 | /* Called from break_out_target_exprs via mapcar. */ |
2868 | |
2869 | static tree |
2870 | bot_manip (tree* tp, int* walk_subtrees, void* data) |
2871 | { |
2872 | splay_tree target_remap = ((splay_tree) data); |
2873 | tree t = *tp; |
2874 | |
2875 | if (!TYPE_P (t) && TREE_CONSTANT (t) && !TREE_SIDE_EFFECTS (t)) |
2876 | { |
2877 | /* There can't be any TARGET_EXPRs or their slot variables below this |
2878 | point. But we must make a copy, in case subsequent processing |
2879 | alters any part of it. For example, during gimplification a cast |
2880 | of the form (T) &X::f (where "f" is a member function) will lead |
2881 | to replacing the PTRMEM_CST for &X::f with a VAR_DECL. */ |
2882 | *walk_subtrees = 0; |
2883 | *tp = unshare_expr (t); |
2884 | return NULL_TREE; |
2885 | } |
2886 | if (TREE_CODE (t) == TARGET_EXPR) |
2887 | { |
2888 | tree u; |
2889 | |
2890 | if (TREE_CODE (TREE_OPERAND (t, 1)) == AGGR_INIT_EXPR) |
2891 | { |
2892 | u = build_cplus_new (TREE_TYPE (t), TREE_OPERAND (t, 1), |
2893 | tf_warning_or_error); |
2894 | if (AGGR_INIT_ZERO_FIRST (TREE_OPERAND (t, 1))) |
2895 | AGGR_INIT_ZERO_FIRST (TREE_OPERAND (u, 1)) = true; |
2896 | } |
2897 | else |
2898 | u = build_target_expr_with_type (TREE_OPERAND (t, 1), TREE_TYPE (t), |
2899 | tf_warning_or_error); |
2900 | |
2901 | TARGET_EXPR_IMPLICIT_P (u) = TARGET_EXPR_IMPLICIT_P (t); |
2902 | TARGET_EXPR_LIST_INIT_P (u) = TARGET_EXPR_LIST_INIT_P (t); |
2903 | TARGET_EXPR_DIRECT_INIT_P (u) = TARGET_EXPR_DIRECT_INIT_P (t); |
2904 | |
2905 | /* Map the old variable to the new one. */ |
2906 | splay_tree_insert (target_remap, |
2907 | (splay_tree_key) TREE_OPERAND (t, 0), |
2908 | (splay_tree_value) TREE_OPERAND (u, 0)); |
2909 | |
2910 | TREE_OPERAND (u, 1) = break_out_target_exprs (TREE_OPERAND (u, 1)); |
2911 | |
2912 | /* Replace the old expression with the new version. */ |
2913 | *tp = u; |
2914 | /* We don't have to go below this point; the recursive call to |
2915 | break_out_target_exprs will have handled anything below this |
2916 | point. */ |
2917 | *walk_subtrees = 0; |
2918 | return NULL_TREE; |
2919 | } |
2920 | if (TREE_CODE (*tp) == SAVE_EXPR) |
2921 | { |
2922 | t = *tp; |
2923 | splay_tree_node n = splay_tree_lookup (target_remap, |
2924 | (splay_tree_key) t); |
2925 | if (n) |
2926 | { |
2927 | *tp = (tree)n->value; |
2928 | *walk_subtrees = 0; |
2929 | } |
2930 | else |
2931 | { |
2932 | copy_tree_r (tp, walk_subtrees, NULL); |
2933 | splay_tree_insert (target_remap, |
2934 | (splay_tree_key)t, |
2935 | (splay_tree_value)*tp); |
2936 | /* Make sure we don't remap an already-remapped SAVE_EXPR. */ |
2937 | splay_tree_insert (target_remap, |
2938 | (splay_tree_key)*tp, |
2939 | (splay_tree_value)*tp); |
2940 | } |
2941 | return NULL_TREE; |
2942 | } |
2943 | |
2944 | /* Make a copy of this node. */ |
2945 | t = copy_tree_r (tp, walk_subtrees, NULL); |
2946 | if (TREE_CODE (*tp) == CALL_EXPR) |
2947 | { |
2948 | set_flags_from_callee (*tp); |
2949 | |
2950 | /* builtin_LINE and builtin_FILE get the location where the default |
2951 | argument is expanded, not where the call was written. */ |
2952 | tree callee = get_callee_fndecl (*tp); |
2953 | if (callee && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) |
2954 | switch (DECL_FUNCTION_CODE (callee)) |
2955 | { |
2956 | case BUILT_IN_FILE: |
2957 | case BUILT_IN_LINE: |
2958 | SET_EXPR_LOCATION (*tp, input_location); |
2959 | default: |
2960 | break; |
2961 | } |
2962 | } |
2963 | return t; |
2964 | } |
2965 | |
2966 | /* Replace all remapped VAR_DECLs in T with their new equivalents. |
2967 | DATA is really a splay-tree mapping old variables to new |
2968 | variables. */ |
2969 | |
2970 | static tree |
2971 | bot_replace (tree* t, int* /*walk_subtrees*/, void* data) |
2972 | { |
2973 | splay_tree target_remap = ((splay_tree) data); |
2974 | |
2975 | if (VAR_P (*t)) |
2976 | { |
2977 | splay_tree_node n = splay_tree_lookup (target_remap, |
2978 | (splay_tree_key) *t); |
2979 | if (n) |
2980 | *t = (tree) n->value; |
2981 | } |
2982 | else if (TREE_CODE (*t) == PARM_DECL |
2983 | && DECL_NAME (*t) == this_identifier |
2984 | && !DECL_CONTEXT (*t)) |
2985 | { |
2986 | /* In an NSDMI we need to replace the 'this' parameter we used for |
2987 | parsing with the real one for this function. */ |
2988 | *t = current_class_ptr; |
2989 | } |
2990 | else if (TREE_CODE (*t) == CONVERT_EXPR |
2991 | && CONVERT_EXPR_VBASE_PATH (*t)) |
2992 | { |
2993 | /* In an NSDMI build_base_path defers building conversions to virtual |
2994 | bases, and we handle it here. */ |
2995 | tree basetype = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (*t))); |
2996 | vec<tree, va_gc> *vbases = CLASSTYPE_VBASECLASSES (current_class_type); |
2997 | int i; tree binfo; |
2998 | FOR_EACH_VEC_SAFE_ELT (vbases, i, binfo) |
2999 | if (BINFO_TYPE (binfo) == basetype) |
3000 | break; |
3001 | *t = build_base_path (PLUS_EXPR, TREE_OPERAND (*t, 0), binfo, true, |
3002 | tf_warning_or_error); |
3003 | } |
3004 | |
3005 | return NULL_TREE; |
3006 | } |
3007 | |
3008 | /* When we parse a default argument expression, we may create |
3009 | temporary variables via TARGET_EXPRs. When we actually use the |
3010 | default-argument expression, we make a copy of the expression |
3011 | and replace the temporaries with appropriate local versions. */ |
3012 | |
3013 | tree |
3014 | break_out_target_exprs (tree t) |
3015 | { |
3016 | static int target_remap_count; |
3017 | static splay_tree target_remap; |
3018 | |
3019 | if (!target_remap_count++) |
3020 | target_remap = splay_tree_new (splay_tree_compare_pointers, |
3021 | /*splay_tree_delete_key_fn=*/NULL, |
3022 | /*splay_tree_delete_value_fn=*/NULL); |
3023 | cp_walk_tree (&t, bot_manip, target_remap, NULL); |
3024 | cp_walk_tree (&t, bot_replace, target_remap, NULL); |
3025 | |
3026 | if (!--target_remap_count) |
3027 | { |
3028 | splay_tree_delete (target_remap); |
3029 | target_remap = NULL; |
3030 | } |
3031 | |
3032 | return t; |
3033 | } |
3034 | |
3035 | /* Build an expression for the subobject of OBJ at CONSTRUCTOR index INDEX, |
3036 | which we expect to have type TYPE. */ |
3037 | |
3038 | tree |
3039 | build_ctor_subob_ref (tree index, tree type, tree obj) |
3040 | { |
3041 | if (index == NULL_TREE) |
3042 | /* Can't refer to a particular member of a vector. */ |
3043 | obj = NULL_TREE; |
3044 | else if (TREE_CODE (index) == INTEGER_CST) |
3045 | obj = cp_build_array_ref (input_location, obj, index, tf_none); |
3046 | else |
3047 | obj = build_class_member_access_expr (obj, index, NULL_TREE, |
3048 | /*reference*/false, tf_none); |
3049 | if (obj) |
3050 | { |
3051 | tree objtype = TREE_TYPE (obj); |
3052 | if (TREE_CODE (objtype) == ARRAY_TYPE && !TYPE_DOMAIN (objtype)) |
3053 | { |
3054 | /* When the destination object refers to a flexible array member |
3055 | verify that it matches the type of the source object except |
3056 | for its domain and qualifiers. */ |
3057 | gcc_assert (comptypes (TYPE_MAIN_VARIANT (type), |
3058 | TYPE_MAIN_VARIANT (objtype), |
3059 | COMPARE_REDECLARATION)); |
3060 | } |
3061 | else |
3062 | gcc_assert (same_type_ignoring_top_level_qualifiers_p (type, objtype)); |
3063 | } |
3064 | |
3065 | return obj; |
3066 | } |
3067 | |
3068 | struct replace_placeholders_t |
3069 | { |
3070 | tree obj; /* The object to be substituted for a PLACEHOLDER_EXPR. */ |
3071 | bool seen; /* Whether we've encountered a PLACEHOLDER_EXPR. */ |
3072 | hash_set<tree> *pset; /* To avoid walking same trees multiple times. */ |
3073 | }; |
3074 | |
3075 | /* Like substitute_placeholder_in_expr, but handle C++ tree codes and |
3076 | build up subexpressions as we go deeper. */ |
3077 | |
3078 | static tree |
3079 | replace_placeholders_r (tree* t, int* walk_subtrees, void* data_) |
3080 | { |
3081 | replace_placeholders_t *d = static_cast<replace_placeholders_t*>(data_); |
3082 | tree obj = d->obj; |
3083 | |
3084 | if (TREE_CONSTANT (*t)) |
3085 | { |
3086 | *walk_subtrees = false; |
3087 | return NULL_TREE; |
3088 | } |
3089 | |
3090 | switch (TREE_CODE (*t)) |
3091 | { |
3092 | case PLACEHOLDER_EXPR: |
3093 | { |
3094 | tree x = obj; |
3095 | for (; !same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (*t), |
3096 | TREE_TYPE (x)); |
3097 | x = TREE_OPERAND (x, 0)) |
3098 | gcc_assert (TREE_CODE (x) == COMPONENT_REF); |
3099 | *t = x; |
3100 | *walk_subtrees = false; |
3101 | d->seen = true; |
3102 | } |
3103 | break; |
3104 | |
3105 | case CONSTRUCTOR: |
3106 | { |
3107 | constructor_elt *ce; |
3108 | vec<constructor_elt,va_gc> *v = CONSTRUCTOR_ELTS (*t); |
3109 | for (unsigned i = 0; vec_safe_iterate (v, i, &ce); ++i) |
3110 | { |
3111 | tree *valp = &ce->value; |
3112 | tree type = TREE_TYPE (*valp); |
3113 | tree subob = obj; |
3114 | |
3115 | if (TREE_CODE (*valp) == CONSTRUCTOR |
3116 | && AGGREGATE_TYPE_P (type)) |
3117 | { |
3118 | /* If we're looking at the initializer for OBJ, then build |
3119 | a sub-object reference. If we're looking at an |
3120 | initializer for another object, just pass OBJ down. */ |
3121 | if (same_type_ignoring_top_level_qualifiers_p |
3122 | (TREE_TYPE (*t), TREE_TYPE (obj))) |
3123 | subob = build_ctor_subob_ref (ce->index, type, obj); |
3124 | if (TREE_CODE (*valp) == TARGET_EXPR) |
3125 | valp = &TARGET_EXPR_INITIAL (*valp); |
3126 | } |
3127 | d->obj = subob; |
3128 | cp_walk_tree (valp, replace_placeholders_r, data_, d->pset); |
3129 | d->obj = obj; |
3130 | } |
3131 | *walk_subtrees = false; |
3132 | break; |
3133 | } |
3134 | |
3135 | default: |
3136 | break; |
3137 | } |
3138 | |
3139 | return NULL_TREE; |
3140 | } |
3141 | |
3142 | /* Replace PLACEHOLDER_EXPRs in EXP with object OBJ. SEEN_P is set if |
3143 | a PLACEHOLDER_EXPR has been encountered. */ |
3144 | |
3145 | tree |
3146 | replace_placeholders (tree exp, tree obj, bool *seen_p) |
3147 | { |
3148 | /* This is only relevant for C++14. */ |
3149 | if (cxx_dialect < cxx14) |
3150 | return exp; |
3151 | |
3152 | /* If the object isn't a (member of a) class, do nothing. */ |
3153 | tree op0 = obj; |
3154 | while (TREE_CODE (op0) == COMPONENT_REF) |
3155 | op0 = TREE_OPERAND (op0, 0); |
3156 | if (!CLASS_TYPE_P (strip_array_types (TREE_TYPE (op0)))) |
3157 | return exp; |
3158 | |
3159 | tree *tp = &exp; |
3160 | hash_set<tree> pset; |
3161 | replace_placeholders_t data = { obj, false, &pset }; |
3162 | if (TREE_CODE (exp) == TARGET_EXPR) |
3163 | tp = &TARGET_EXPR_INITIAL (exp); |
3164 | cp_walk_tree (tp, replace_placeholders_r, &data, &pset); |
3165 | if (seen_p) |
3166 | *seen_p = data.seen; |
3167 | return exp; |
3168 | } |
3169 | |
3170 | /* Similar to `build_nt', but for template definitions of dependent |
3171 | expressions */ |
3172 | |
3173 | tree |
3174 | build_min_nt_loc (location_t loc, enum tree_code code, ...) |
3175 | { |
3176 | tree t; |
3177 | int length; |
3178 | int i; |
3179 | va_list p; |
3180 | |
3181 | gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); |
3182 | |
3183 | va_start (p, code); |
3184 | |
3185 | t = make_node (code); |
3186 | SET_EXPR_LOCATION (t, loc); |
3187 | length = TREE_CODE_LENGTH (code); |
3188 | |
3189 | for (i = 0; i < length; i++) |
3190 | { |
3191 | tree x = va_arg (p, tree); |
3192 | TREE_OPERAND (t, i) = x; |
3193 | if (x && TREE_CODE (x) == OVERLOAD) |
3194 | lookup_keep (x, true); |
3195 | } |
3196 | |
3197 | va_end (p); |
3198 | return t; |
3199 | } |
3200 | |
3201 | /* Similar to `build', but for template definitions. */ |
3202 | |
3203 | tree |
3204 | build_min (enum tree_code code, tree tt, ...) |
3205 | { |
3206 | tree t; |
3207 | int length; |
3208 | int i; |
3209 | va_list p; |
3210 | |
3211 | gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); |
3212 | |
3213 | va_start (p, tt); |
3214 | |
3215 | t = make_node (code); |
3216 | length = TREE_CODE_LENGTH (code); |
3217 | TREE_TYPE (t) = tt; |
3218 | |
3219 | for (i = 0; i < length; i++) |
3220 | { |
3221 | tree x = va_arg (p, tree); |
3222 | TREE_OPERAND (t, i) = x; |
3223 | if (x) |
3224 | { |
3225 | if (!TYPE_P (x) && TREE_SIDE_EFFECTS (x)) |
3226 | TREE_SIDE_EFFECTS (t) = 1; |
3227 | if (TREE_CODE (x) == OVERLOAD) |
3228 | lookup_keep (x, true); |
3229 | } |
3230 | } |
3231 | |
3232 | va_end (p); |
3233 | |
3234 | if (code == CAST_EXPR) |
3235 | /* The single operand is a TREE_LIST, which we have to check. */ |
3236 | for (tree v = TREE_OPERAND (t, 0); v; v = TREE_CHAIN (v)) |
3237 | if (TREE_CODE (TREE_VALUE (v)) == OVERLOAD) |
3238 | lookup_keep (TREE_VALUE (v), true); |
3239 | |
3240 | return t; |
3241 | } |
3242 | |
3243 | /* Similar to `build', but for template definitions of non-dependent |
3244 | expressions. NON_DEP is the non-dependent expression that has been |
3245 | built. */ |
3246 | |
3247 | tree |
3248 | build_min_non_dep (enum tree_code code, tree non_dep, ...) |
3249 | { |
3250 | tree t; |
3251 | int length; |
3252 | int i; |
3253 | va_list p; |
3254 | |
3255 | gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); |
3256 | |
3257 | va_start (p, non_dep); |
3258 | |
3259 | if (REFERENCE_REF_P (non_dep)) |
3260 | non_dep = TREE_OPERAND (non_dep, 0); |
3261 | |
3262 | t = make_node (code); |
3263 | length = TREE_CODE_LENGTH (code); |
3264 | TREE_TYPE (t) = unlowered_expr_type (non_dep); |
3265 | TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (non_dep); |
3266 | |
3267 | for (i = 0; i < length; i++) |
3268 | { |
3269 | tree x = va_arg (p, tree); |
3270 | TREE_OPERAND (t, i) = x; |
3271 | if (x && TREE_CODE (x) == OVERLOAD) |
3272 | lookup_keep (x, true); |
3273 | } |
3274 | |
3275 | if (code == COMPOUND_EXPR && TREE_CODE (non_dep) != COMPOUND_EXPR) |
3276 | /* This should not be considered a COMPOUND_EXPR, because it |
3277 | resolves to an overload. */ |
3278 | COMPOUND_EXPR_OVERLOADED (t) = 1; |
3279 | |
3280 | va_end (p); |
3281 | return convert_from_reference (t); |
3282 | } |
3283 | |
3284 | /* Similar to build_min_nt, but call expressions */ |
3285 | |
3286 | tree |
3287 | build_min_nt_call_vec (tree fn, vec<tree, va_gc> *args) |
3288 | { |
3289 | tree ret, t; |
3290 | unsigned int ix; |
3291 | |
3292 | ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3); |
3293 | CALL_EXPR_FN (ret) = fn; |
3294 | CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE; |
3295 | FOR_EACH_VEC_SAFE_ELT (args, ix, t) |
3296 | { |
3297 | CALL_EXPR_ARG (ret, ix) = t; |
3298 | if (TREE_CODE (t) == OVERLOAD) |
3299 | lookup_keep (t, true); |
3300 | } |
3301 | return ret; |
3302 | } |
3303 | |
3304 | /* Similar to `build_min_nt_call_vec', but for template definitions of |
3305 | non-dependent expressions. NON_DEP is the non-dependent expression |
3306 | that has been built. */ |
3307 | |
3308 | tree |
3309 | build_min_non_dep_call_vec (tree non_dep, tree fn, vec<tree, va_gc> *argvec) |
3310 | { |
3311 | tree t = build_min_nt_call_vec (fn, argvec); |
3312 | if (REFERENCE_REF_P (non_dep)) |
3313 | non_dep = TREE_OPERAND (non_dep, 0); |
3314 | TREE_TYPE (t) = TREE_TYPE (non_dep); |
3315 | TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (non_dep); |
3316 | return convert_from_reference (t); |
3317 | } |
3318 | |
3319 | /* Similar to build_min_non_dep, but for expressions that have been resolved to |
3320 | a call to an operator overload. OP is the operator that has been |
3321 | overloaded. NON_DEP is the non-dependent expression that's been built, |
3322 | which should be a CALL_EXPR or an INDIRECT_REF to a CALL_EXPR. OVERLOAD is |
3323 | the overload that NON_DEP is calling. */ |
3324 | |
3325 | tree |
3326 | build_min_non_dep_op_overload (enum tree_code op, |
3327 | tree non_dep, |
3328 | tree overload, ...) |
3329 | { |
3330 | va_list p; |
3331 | int nargs, expected_nargs; |
3332 | tree fn, call; |
3333 | vec<tree, va_gc> *args; |
3334 | |
3335 | non_dep = extract_call_expr (non_dep); |
3336 | |
3337 | nargs = call_expr_nargs (non_dep); |
3338 | |
3339 | expected_nargs = cp_tree_code_length (op); |
3340 | if ((op == POSTINCREMENT_EXPR |
3341 | || op == POSTDECREMENT_EXPR) |
3342 | /* With -fpermissive non_dep could be operator++(). */ |
3343 | && (!flag_permissive || nargs != expected_nargs)) |
3344 | expected_nargs += 1; |
3345 | gcc_assert (nargs == expected_nargs); |
3346 | |
3347 | args = make_tree_vector (); |
3348 | va_start (p, overload); |
3349 | |
3350 | if (TREE_CODE (TREE_TYPE (overload)) == FUNCTION_TYPE) |
3351 | { |
3352 | fn = overload; |
3353 | for (int i = 0; i < nargs; i++) |
3354 | { |
3355 | tree arg = va_arg (p, tree); |
3356 | vec_safe_push (args, arg); |
3357 | } |
3358 | } |
3359 | else if (TREE_CODE (TREE_TYPE (overload)) == METHOD_TYPE) |
3360 | { |
3361 | tree object = va_arg (p, tree); |
3362 | tree binfo = TYPE_BINFO (TREE_TYPE (object)); |
3363 | tree method = build_baselink (binfo, binfo, overload, NULL_TREE); |
3364 | fn = build_min (COMPONENT_REF, TREE_TYPE (overload), |
3365 | object, method, NULL_TREE); |
3366 | for (int i = 1; i < nargs; i++) |
3367 | { |
3368 | tree arg = va_arg (p, tree); |
3369 | vec_safe_push (args, arg); |
3370 | } |
3371 | } |
3372 | else |
3373 | gcc_unreachable (); |
3374 | |
3375 | va_end (p); |
3376 | call = build_min_non_dep_call_vec (non_dep, fn, args); |
3377 | release_tree_vector (args); |
3378 | |
3379 | tree call_expr = extract_call_expr (call); |
3380 | KOENIG_LOOKUP_P (call_expr) = KOENIG_LOOKUP_P (non_dep); |
3381 | CALL_EXPR_OPERATOR_SYNTAX (call_expr) = true; |
3382 | CALL_EXPR_ORDERED_ARGS (call_expr) = CALL_EXPR_ORDERED_ARGS (non_dep); |
3383 | CALL_EXPR_REVERSE_ARGS (call_expr) = CALL_EXPR_REVERSE_ARGS (non_dep); |
3384 | |
3385 | return call; |
3386 | } |
3387 | |
3388 | /* Return a new tree vec copied from VEC, with ELT inserted at index IDX. */ |
3389 | |
3390 | vec<tree, va_gc> * |
3391 | vec_copy_and_insert (vec<tree, va_gc> *old_vec, tree elt, unsigned idx) |
3392 | { |
3393 | unsigned len = vec_safe_length (old_vec); |
3394 | gcc_assert (idx <= len); |
3395 | |
3396 | vec<tree, va_gc> *new_vec = NULL; |
3397 | vec_alloc (new_vec, len + 1); |
3398 | |
3399 | unsigned i; |
3400 | for (i = 0; i < len; ++i) |
3401 | { |
3402 | if (i == idx) |
3403 | new_vec->quick_push (elt); |
3404 | new_vec->quick_push ((*old_vec)[i]); |
3405 | } |
3406 | if (i == idx) |
3407 | new_vec->quick_push (elt); |
3408 | |
3409 | return new_vec; |
3410 | } |
3411 | |
3412 | tree |
3413 | get_type_decl (tree t) |
3414 | { |
3415 | if (TREE_CODE (t) == TYPE_DECL) |
3416 | return t; |
3417 | if (TYPE_P (t)) |
3418 | return TYPE_STUB_DECL (t); |
3419 | gcc_assert (t == error_mark_node); |
3420 | return t; |
3421 | } |
3422 | |
3423 | /* Returns the namespace that contains DECL, whether directly or |
3424 | indirectly. */ |
3425 | |
3426 | tree |
3427 | decl_namespace_context (tree decl) |
3428 | { |
3429 | while (1) |
3430 | { |
3431 | if (TREE_CODE (decl) == NAMESPACE_DECL) |
3432 | return decl; |
3433 | else if (TYPE_P (decl)) |
3434 | decl = CP_DECL_CONTEXT (TYPE_MAIN_DECL (decl)); |
3435 | else |
3436 | decl = CP_DECL_CONTEXT (decl); |
3437 | } |
3438 | } |
3439 | |
3440 | /* Returns true if decl is within an anonymous namespace, however deeply |
3441 | nested, or false otherwise. */ |
3442 | |
3443 | bool |
3444 | decl_anon_ns_mem_p (const_tree decl) |
3445 | { |
3446 | while (TREE_CODE (decl) != NAMESPACE_DECL) |
3447 | { |
3448 | /* Classes inside anonymous namespaces have TREE_PUBLIC == 0. */ |
3449 | if (TYPE_P (decl)) |
3450 | return !TREE_PUBLIC (TYPE_MAIN_DECL (decl)); |
3451 | |
3452 | decl = CP_DECL_CONTEXT (decl); |
3453 | } |
3454 | return !TREE_PUBLIC (decl); |
3455 | } |
3456 | |
3457 | /* Subroutine of cp_tree_equal: t1 and t2 are the CALL_EXPR_FNs of two |
3458 | CALL_EXPRS. Return whether they are equivalent. */ |
3459 | |
3460 | static bool |
3461 | called_fns_equal (tree t1, tree t2) |
3462 | { |
3463 | /* Core 1321: dependent names are equivalent even if the overload sets |
3464 | are different. But do compare explicit template arguments. */ |
3465 | tree name1 = dependent_name (t1); |
3466 | tree name2 = dependent_name (t2); |
3467 | if (name1 || name2) |
3468 | { |
3469 | tree targs1 = NULL_TREE, targs2 = NULL_TREE; |
3470 | |
3471 | if (name1 != name2) |
3472 | return false; |
3473 | |
3474 | if (TREE_CODE (t1) == TEMPLATE_ID_EXPR) |
3475 | targs1 = TREE_OPERAND (t1, 1); |
3476 | if (TREE_CODE (t2) == TEMPLATE_ID_EXPR) |
3477 | targs2 = TREE_OPERAND (t2, 1); |
3478 | return cp_tree_equal (targs1, targs2); |
3479 | } |
3480 | else |
3481 | return cp_tree_equal (t1, t2); |
3482 | } |
3483 | |
3484 | /* Return truthvalue of whether T1 is the same tree structure as T2. |
3485 | Return 1 if they are the same. Return 0 if they are different. */ |
3486 | |
3487 | bool |
3488 | cp_tree_equal (tree t1, tree t2) |
3489 | { |
3490 | enum tree_code code1, code2; |
3491 | |
3492 | if (t1 == t2) |
3493 | return true; |
3494 | if (!t1 || !t2) |
3495 | return false; |
3496 | |
3497 | code1 = TREE_CODE (t1); |
3498 | code2 = TREE_CODE (t2); |
3499 | |
3500 | if (code1 != code2) |
3501 | return false; |
3502 | |
3503 | if (CONSTANT_CLASS_P (t1) |
3504 | && !same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))) |
3505 | return false; |
3506 | |
3507 | switch (code1) |
3508 | { |
3509 | case VOID_CST: |
3510 | /* There's only a single VOID_CST node, so we should never reach |
3511 | here. */ |
3512 | gcc_unreachable (); |
3513 | |
3514 | case INTEGER_CST: |
3515 | return tree_int_cst_equal (t1, t2); |
3516 | |
3517 | case REAL_CST: |
3518 | return real_equal (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2)); |
3519 | |
3520 | case STRING_CST: |
3521 | return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) |
3522 | && !memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), |
3523 | TREE_STRING_LENGTH (t1)); |
3524 | |
3525 | case FIXED_CST: |
3526 | return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), |
3527 | TREE_FIXED_CST (t2)); |
3528 | |
3529 | case COMPLEX_CST: |
3530 | return cp_tree_equal (TREE_REALPART (t1), TREE_REALPART (t2)) |
3531 | && cp_tree_equal (TREE_IMAGPART (t1), TREE_IMAGPART (t2)); |
3532 | |
3533 | case VECTOR_CST: |
3534 | return operand_equal_p (t1, t2, OEP_ONLY_CONST); |
3535 | |
3536 | case CONSTRUCTOR: |
3537 | /* We need to do this when determining whether or not two |
3538 | non-type pointer to member function template arguments |
3539 | are the same. */ |
3540 | if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2)) |
3541 | || CONSTRUCTOR_NELTS (t1) != CONSTRUCTOR_NELTS (t2)) |
3542 | return false; |
3543 | { |
3544 | tree field, value; |
3545 | unsigned int i; |
3546 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, field, value) |
3547 | { |
3548 | constructor_elt *elt2 = CONSTRUCTOR_ELT (t2, i); |
3549 | if (!cp_tree_equal (field, elt2->index) |
3550 | || !cp_tree_equal (value, elt2->value)) |
3551 | return false; |
3552 | } |
3553 | } |
3554 | return true; |
3555 | |
3556 | case TREE_LIST: |
3557 | if (!cp_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))) |
3558 | return false; |
3559 | if (!cp_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2))) |
3560 | return false; |
3561 | return cp_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2)); |
3562 | |
3563 | case SAVE_EXPR: |
3564 | return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); |
3565 | |
3566 | case CALL_EXPR: |
3567 | { |
3568 | tree arg1, arg2; |
3569 | call_expr_arg_iterator iter1, iter2; |
3570 | if (!called_fns_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2))) |
3571 | return false; |
3572 | for (arg1 = first_call_expr_arg (t1, &iter1), |
3573 | arg2 = first_call_expr_arg (t2, &iter2); |
3574 | arg1 && arg2; |
3575 | arg1 = next_call_expr_arg (&iter1), |
3576 | arg2 = next_call_expr_arg (&iter2)) |
3577 | if (!cp_tree_equal (arg1, arg2)) |
3578 | return false; |
3579 | if (arg1 || arg2) |
3580 | return false; |
3581 | return true; |
3582 | } |
3583 | |
3584 | case TARGET_EXPR: |
3585 | { |
3586 | tree o1 = TREE_OPERAND (t1, 0); |
3587 | tree o2 = TREE_OPERAND (t2, 0); |
3588 | |
3589 | /* Special case: if either target is an unallocated VAR_DECL, |
3590 | it means that it's going to be unified with whatever the |
3591 | TARGET_EXPR is really supposed to initialize, so treat it |
3592 | as being equivalent to anything. */ |
3593 | if (VAR_P (o1) && DECL_NAME (o1) == NULL_TREE |
3594 | && !DECL_RTL_SET_P (o1)) |
3595 | /*Nop*/; |
3596 | else if (VAR_P (o2) && DECL_NAME (o2) == NULL_TREE |
3597 | && !DECL_RTL_SET_P (o2)) |
3598 | /*Nop*/; |
3599 | else if (!cp_tree_equal (o1, o2)) |
3600 | return false; |
3601 | |
3602 | return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)); |
3603 | } |
3604 | |
3605 | case PARM_DECL: |
3606 | /* For comparing uses of parameters in late-specified return types |
3607 | with an out-of-class definition of the function, but can also come |
3608 | up for expressions that involve 'this' in a member function |
3609 | template. */ |
3610 | |
3611 | if (comparing_specializations && !CONSTRAINT_VAR_P (t1)) |
3612 | /* When comparing hash table entries, only an exact match is |
3613 | good enough; we don't want to replace 'this' with the |
3614 | version from another function. But be more flexible |
3615 | with local parameters in a requires-expression. */ |
3616 | return false; |
3617 | |
3618 | if (same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))) |
3619 | { |
3620 | if (DECL_ARTIFICIAL (t1) ^ DECL_ARTIFICIAL (t2)) |
3621 | return false; |
3622 | if (CONSTRAINT_VAR_P (t1) ^ CONSTRAINT_VAR_P (t2)) |
3623 | return false; |
3624 | if (DECL_ARTIFICIAL (t1) |
3625 | || (DECL_PARM_LEVEL (t1) == DECL_PARM_LEVEL (t2) |
3626 | && DECL_PARM_INDEX (t1) == DECL_PARM_INDEX (t2))) |
3627 | return true; |
3628 | } |
3629 | return false; |
3630 | |
3631 | case VAR_DECL: |
3632 | case CONST_DECL: |
3633 | case FIELD_DECL: |
3634 | case FUNCTION_DECL: |
3635 | case TEMPLATE_DECL: |
3636 | case IDENTIFIER_NODE: |
3637 | case SSA_NAME: |
3638 | return false; |
3639 | |
3640 | case BASELINK: |
3641 | return (BASELINK_BINFO (t1) == BASELINK_BINFO (t2) |
3642 | && BASELINK_ACCESS_BINFO (t1) == BASELINK_ACCESS_BINFO (t2) |
3643 | && BASELINK_QUALIFIED_P (t1) == BASELINK_QUALIFIED_P (t2) |
3644 | && cp_tree_equal (BASELINK_FUNCTIONS (t1), |
3645 | BASELINK_FUNCTIONS (t2))); |
3646 | |
3647 | case TEMPLATE_PARM_INDEX: |
3648 | return (TEMPLATE_PARM_IDX (t1) == TEMPLATE_PARM_IDX (t2) |
3649 | && TEMPLATE_PARM_LEVEL (t1) == TEMPLATE_PARM_LEVEL (t2) |
3650 | && (TEMPLATE_PARM_PARAMETER_PACK (t1) |
3651 | == TEMPLATE_PARM_PARAMETER_PACK (t2)) |
3652 | && same_type_p (TREE_TYPE (TEMPLATE_PARM_DECL (t1)), |
3653 | TREE_TYPE (TEMPLATE_PARM_DECL (t2)))); |
3654 | |
3655 | case TEMPLATE_ID_EXPR: |
3656 | return (cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)) |
3657 | && cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1))); |
3658 | |
3659 | case CONSTRAINT_INFO: |
3660 | return cp_tree_equal (CI_ASSOCIATED_CONSTRAINTS (t1), |
3661 | CI_ASSOCIATED_CONSTRAINTS (t2)); |
3662 | |
3663 | case CHECK_CONSTR: |
3664 | return (CHECK_CONSTR_CONCEPT (t1) == CHECK_CONSTR_CONCEPT (t2) |
3665 | && comp_template_args (CHECK_CONSTR_ARGS (t1), |
3666 | CHECK_CONSTR_ARGS (t2))); |
3667 | |
3668 | case TREE_VEC: |
3669 | { |
3670 | unsigned ix; |
3671 | if (TREE_VEC_LENGTH (t1) != TREE_VEC_LENGTH (t2)) |
3672 | return false; |
3673 | for (ix = TREE_VEC_LENGTH (t1); ix--;) |
3674 | if (!cp_tree_equal (TREE_VEC_ELT (t1, ix), |
3675 | TREE_VEC_ELT (t2, ix))) |
3676 | return false; |
3677 | return true; |
3678 | } |
3679 | |
3680 | case SIZEOF_EXPR: |
3681 | case ALIGNOF_EXPR: |
3682 | { |
3683 | tree o1 = TREE_OPERAND (t1, 0); |
3684 | tree o2 = TREE_OPERAND (t2, 0); |
3685 | |
3686 | if (code1 == SIZEOF_EXPR) |
3687 | { |
3688 | if (SIZEOF_EXPR_TYPE_P (t1)) |
3689 | o1 = TREE_TYPE (o1); |
3690 | if (SIZEOF_EXPR_TYPE_P (t2)) |
3691 | o2 = TREE_TYPE (o2); |
3692 | } |
3693 | if (TREE_CODE (o1) != TREE_CODE (o2)) |
3694 | return false; |
3695 | if (TYPE_P (o1)) |
3696 | return same_type_p (o1, o2); |
3697 | else |
3698 | return cp_tree_equal (o1, o2); |
3699 | } |
3700 | |
3701 | case MODOP_EXPR: |
3702 | { |
3703 | tree t1_op1, t2_op1; |
3704 | |
3705 | if (!cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))) |
3706 | return false; |
3707 | |
3708 | t1_op1 = TREE_OPERAND (t1, 1); |
3709 | t2_op1 = TREE_OPERAND (t2, 1); |
3710 | if (TREE_CODE (t1_op1) != TREE_CODE (t2_op1)) |
3711 | return false; |
3712 | |
3713 | return cp_tree_equal (TREE_OPERAND (t1, 2), TREE_OPERAND (t2, 2)); |
3714 | } |
3715 | |
3716 | case PTRMEM_CST: |
3717 | /* Two pointer-to-members are the same if they point to the same |
3718 | field or function in the same class. */ |
3719 | if (PTRMEM_CST_MEMBER (t1) != PTRMEM_CST_MEMBER (t2)) |
3720 | return false; |
3721 | |
3722 | return same_type_p (PTRMEM_CST_CLASS (t1), PTRMEM_CST_CLASS (t2)); |
3723 | |
3724 | case OVERLOAD: |
3725 | { |
3726 | /* Two overloads. Must be exactly the same set of decls. */ |
3727 | lkp_iterator first (t1); |
3728 | lkp_iterator second (t2); |
3729 | |
3730 | for (; first && second; ++first, ++second) |
3731 | if (*first != *second) |
3732 | return false; |
3733 | return !(first || second); |
3734 | } |
3735 | |
3736 | case TRAIT_EXPR: |
3737 | if (TRAIT_EXPR_KIND (t1) != TRAIT_EXPR_KIND (t2)) |
3738 | return false; |
3739 | return same_type_p (TRAIT_EXPR_TYPE1 (t1), TRAIT_EXPR_TYPE1 (t2)) |
3740 | && cp_tree_equal (TRAIT_EXPR_TYPE2 (t1), TRAIT_EXPR_TYPE2 (t2)); |
3741 | |
3742 | case CAST_EXPR: |
3743 | case STATIC_CAST_EXPR: |
3744 | case REINTERPRET_CAST_EXPR: |
3745 | case CONST_CAST_EXPR: |
3746 | case DYNAMIC_CAST_EXPR: |
3747 | case IMPLICIT_CONV_EXPR: |
3748 | case NEW_EXPR: |
3749 | CASE_CONVERT: |
3750 | case NON_LVALUE_EXPR: |
3751 | case VIEW_CONVERT_EXPR: |
3752 | if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))) |
3753 | return false; |
3754 | /* Now compare operands as usual. */ |
3755 | break; |
3756 | |
3757 | case DEFERRED_NOEXCEPT: |
3758 | return (cp_tree_equal (DEFERRED_NOEXCEPT_PATTERN (t1), |
3759 | DEFERRED_NOEXCEPT_PATTERN (t2)) |
3760 | && comp_template_args (DEFERRED_NOEXCEPT_ARGS (t1), |
3761 | DEFERRED_NOEXCEPT_ARGS (t2))); |
3762 | break; |
3763 | |
3764 | default: |
3765 | break; |
3766 | } |
3767 | |
3768 | switch (TREE_CODE_CLASS (code1)) |
3769 | { |
3770 | case tcc_unary: |
3771 | case tcc_binary: |
3772 | case tcc_comparison: |
3773 | case tcc_expression: |
3774 | case tcc_vl_exp: |
3775 | case tcc_reference: |
3776 | case tcc_statement: |
3777 | { |
3778 | int i, n; |
3779 | |
3780 | n = cp_tree_operand_length (t1); |
3781 | if (TREE_CODE_CLASS (code1) == tcc_vl_exp |
3782 | && n != TREE_OPERAND_LENGTH (t2)) |
3783 | return false; |
3784 | |
3785 | for (i = 0; i < n; ++i) |
3786 | if (!cp_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i))) |
3787 | return false; |
3788 | |
3789 | return true; |
3790 | } |
3791 | |
3792 | case tcc_type: |
3793 | return same_type_p (t1, t2); |
3794 | default: |
3795 | gcc_unreachable (); |
3796 | } |
3797 | /* We can get here with --disable-checking. */ |
3798 | return false; |
3799 | } |
3800 | |
3801 | /* The type of ARG when used as an lvalue. */ |
3802 | |
3803 | tree |
3804 | lvalue_type (tree arg) |
3805 | { |
3806 | tree type = TREE_TYPE (arg); |
3807 | return type; |
3808 | } |
3809 | |
3810 | /* The type of ARG for printing error messages; denote lvalues with |
3811 | reference types. */ |
3812 | |
3813 | tree |
3814 | error_type (tree arg) |
3815 | { |
3816 | tree type = TREE_TYPE (arg); |
3817 | |
3818 | if (TREE_CODE (type) == ARRAY_TYPE) |
3819 | ; |
3820 | else if (TREE_CODE (type) == ERROR_MARK) |
3821 | ; |
3822 | else if (lvalue_p (arg)) |
3823 | type = build_reference_type (lvalue_type (arg)); |
3824 | else if (MAYBE_CLASS_TYPE_P (type)) |
3825 | type = lvalue_type (arg); |
3826 | |
3827 | return type; |
3828 | } |
3829 | |
3830 | /* Does FUNCTION use a variable-length argument list? */ |
3831 | |
3832 | int |
3833 | varargs_function_p (const_tree function) |
3834 | { |
3835 | return stdarg_p (TREE_TYPE (function)); |
3836 | } |
3837 | |
3838 | /* Returns 1 if decl is a member of a class. */ |
3839 | |
3840 | int |
3841 | member_p (const_tree decl) |
3842 | { |
3843 | const_tree const ctx = DECL_CONTEXT (decl); |
3844 | return (ctx && TYPE_P (ctx)); |
3845 | } |
3846 | |
3847 | /* Create a placeholder for member access where we don't actually have an |
3848 | object that the access is against. */ |
3849 | |
3850 | tree |
3851 | build_dummy_object (tree type) |
3852 | { |
3853 | tree decl = build1 (CONVERT_EXPR, build_pointer_type (type), void_node); |
3854 | return cp_build_fold_indirect_ref (decl); |
3855 | } |
3856 | |
3857 | /* We've gotten a reference to a member of TYPE. Return *this if appropriate, |
3858 | or a dummy object otherwise. If BINFOP is non-0, it is filled with the |
3859 | binfo path from current_class_type to TYPE, or 0. */ |
3860 | |
3861 | tree |
3862 | maybe_dummy_object (tree type, tree* binfop) |
3863 | { |
3864 | tree decl, context; |
3865 | tree binfo; |
3866 | tree current = current_nonlambda_class_type (); |
3867 | |
3868 | if (current |
3869 | && (binfo = lookup_base (current, type, ba_any, NULL, |
3870 | tf_warning_or_error))) |
3871 | context = current; |
3872 | else |
3873 | { |
3874 | /* Reference from a nested class member function. */ |
3875 | context = type; |
3876 | binfo = TYPE_BINFO (type); |
3877 | } |
3878 | |
3879 | if (binfop) |
3880 | *binfop = binfo; |
3881 | |
3882 | if (current_class_ref |
3883 | /* current_class_ref might not correspond to current_class_type if |
3884 | we're in tsubst_default_argument or a lambda-declarator; in either |
3885 | case, we want to use current_class_ref if it matches CONTEXT. */ |
3886 | && (same_type_ignoring_top_level_qualifiers_p |
3887 | (TREE_TYPE (current_class_ref), context))) |
3888 | decl = current_class_ref; |
3889 | else |
3890 | decl = build_dummy_object (context); |
3891 | |
3892 | return decl; |
3893 | } |
3894 | |
3895 | /* Returns 1 if OB is a placeholder object, or a pointer to one. */ |
3896 | |
3897 | int |
3898 | is_dummy_object (const_tree ob) |
3899 | { |
3900 | if (INDIRECT_REF_P (ob)) |
3901 | ob = TREE_OPERAND (ob, 0); |
3902 | return (TREE_CODE (ob) == CONVERT_EXPR |
3903 | && TREE_OPERAND (ob, 0) == void_node); |
3904 | } |
3905 | |
3906 | /* Returns 1 iff type T is something we want to treat as a scalar type for |
3907 | the purpose of deciding whether it is trivial/POD/standard-layout. */ |
3908 | |
3909 | bool |
3910 | scalarish_type_p (const_tree t) |
3911 | { |
3912 | if (t == error_mark_node) |
3913 | return 1; |
3914 | |
3915 | return (SCALAR_TYPE_P (t) || VECTOR_TYPE_P (t)); |
3916 | } |
3917 | |
3918 | /* Returns true iff T requires non-trivial default initialization. */ |
3919 | |
3920 | bool |
3921 | type_has_nontrivial_default_init (const_tree t) |
3922 | { |
3923 | t = strip_array_types (CONST_CAST_TREE (t)); |
3924 | |
3925 | if (CLASS_TYPE_P (t)) |
3926 | return TYPE_HAS_COMPLEX_DFLT (t); |
3927 | else |
3928 | return 0; |
3929 | } |
3930 | |
3931 | /* Track classes with only deleted copy/move constructors so that we can warn |
3932 | if they are used in call/return by value. */ |
3933 | |
3934 | static GTY(()) hash_set<tree>* deleted_copy_types; |
3935 | static void |
3936 | remember_deleted_copy (const_tree t) |
3937 | { |
3938 | if (!deleted_copy_types) |
3939 | deleted_copy_types = hash_set<tree>::create_ggc(37); |
3940 | deleted_copy_types->add (CONST_CAST_TREE (t)); |
3941 | } |
3942 | void |
3943 | maybe_warn_parm_abi (tree t, location_t loc) |
3944 | { |
3945 | if (!deleted_copy_types |
3946 | || !deleted_copy_types->contains (t)) |
3947 | return; |
3948 | |
3949 | warning_at (loc, OPT_Wabi, "the calling convention for %qT changes in " |
3950 | "-fabi-version=12 (GCC 8)" , t); |
3951 | static bool explained = false; |
3952 | if (!explained) |
3953 | { |
3954 | inform (loc, " because all of its copy and move constructors " |
3955 | "are deleted" ); |
3956 | explained = true; |
3957 | } |
3958 | } |
3959 | |
3960 | /* Returns true iff copying an object of type T (including via move |
3961 | constructor) is non-trivial. That is, T has no non-trivial copy |
3962 | constructors and no non-trivial move constructors, and not all copy/move |
3963 | constructors are deleted. This function implements the ABI notion of |
3964 | non-trivial copy, which has diverged from the one in the standard. */ |
3965 | |
3966 | bool |
3967 | type_has_nontrivial_copy_init (const_tree type) |
3968 | { |
3969 | tree t = strip_array_types (CONST_CAST_TREE (type)); |
3970 | |
3971 | if (CLASS_TYPE_P (t)) |
3972 | { |
3973 | gcc_assert (COMPLETE_TYPE_P (t)); |
3974 | |
3975 | if (TYPE_HAS_COMPLEX_COPY_CTOR (t) |
3976 | || TYPE_HAS_COMPLEX_MOVE_CTOR (t)) |
3977 | /* Nontrivial. */ |
3978 | return true; |
3979 | |
3980 | if (cxx_dialect < cxx11) |
3981 | /* No deleted functions before C++11. */ |
3982 | return false; |
3983 | |
3984 | /* Before ABI v12 we did a bitwise copy of types with only deleted |
3985 | copy/move constructors. */ |
3986 | if (!abi_version_at_least (12) |
3987 | && !(warn_abi && abi_version_crosses (12))) |
3988 | return false; |
3989 | |
3990 | bool saw_copy = false; |
3991 | bool saw_non_deleted = false; |
3992 | |
3993 | if (CLASSTYPE_LAZY_MOVE_CTOR (t)) |
3994 | saw_copy = saw_non_deleted = true; |
3995 | else if (CLASSTYPE_LAZY_COPY_CTOR (t)) |
3996 | { |
3997 | saw_copy = true; |
3998 | if (classtype_has_move_assign_or_move_ctor_p (t, true)) |
3999 | /* [class.copy]/8 If the class definition declares a move |
4000 | constructor or move assignment operator, the implicitly declared |
4001 | copy constructor is defined as deleted.... */; |
4002 | else |
4003 | /* Any other reason the implicitly-declared function would be |
4004 | deleted would also cause TYPE_HAS_COMPLEX_COPY_CTOR to be |
4005 | set. */ |
4006 | saw_non_deleted = true; |
4007 | } |
4008 | |
4009 | if (!saw_non_deleted) |
4010 | for (ovl_iterator iter (CLASSTYPE_CONSTRUCTORS (t)); iter; ++iter) |
4011 | { |
4012 | tree fn = *iter; |
4013 | if (copy_fn_p (fn)) |
4014 | { |
4015 | saw_copy = true; |
4016 | if (!DECL_DELETED_FN (fn)) |
4017 | { |
4018 | /* Not deleted, therefore trivial. */ |
4019 | saw_non_deleted = true; |
4020 | break; |
4021 | } |
4022 | } |
4023 | } |
4024 | |
4025 | gcc_assert (saw_copy); |
4026 | |
4027 | if (saw_copy && !saw_non_deleted) |
4028 | { |
4029 | if (warn_abi && abi_version_crosses (12)) |
4030 | remember_deleted_copy (t); |
4031 | if (abi_version_at_least (12)) |
4032 | return true; |
4033 | } |
4034 | |
4035 | return false; |
4036 | } |
4037 | else |
4038 | return 0; |
4039 | } |
4040 | |
4041 | /* Returns 1 iff type T is a trivially copyable type, as defined in |
4042 | [basic.types] and [class]. */ |
4043 | |
4044 | bool |
4045 | trivially_copyable_p (const_tree t) |
4046 | { |
4047 | t = strip_array_types (CONST_CAST_TREE (t)); |
4048 | |
4049 | if (CLASS_TYPE_P (t)) |
4050 | return ((!TYPE_HAS_COPY_CTOR (t) |
4051 | || !TYPE_HAS_COMPLEX_COPY_CTOR (t)) |
4052 | && !TYPE_HAS_COMPLEX_MOVE_CTOR (t) |
4053 | && (!TYPE_HAS_COPY_ASSIGN (t) |
4054 | || !TYPE_HAS_COMPLEX_COPY_ASSIGN (t)) |
4055 | && !TYPE_HAS_COMPLEX_MOVE_ASSIGN (t) |
4056 | && TYPE_HAS_TRIVIAL_DESTRUCTOR (t)); |
4057 | else |
4058 | return !CP_TYPE_VOLATILE_P (t) && scalarish_type_p (t); |
4059 | } |
4060 | |
4061 | /* Returns 1 iff type T is a trivial type, as defined in [basic.types] and |
4062 | [class]. */ |
4063 | |
4064 | bool |
4065 | trivial_type_p (const_tree t) |
4066 | { |
4067 | t = strip_array_types (CONST_CAST_TREE (t)); |
4068 | |
4069 | if (CLASS_TYPE_P (t)) |
4070 | return (TYPE_HAS_TRIVIAL_DFLT (t) |
4071 | && trivially_copyable_p (t)); |
4072 | else |
4073 | return scalarish_type_p (t); |
4074 | } |
4075 | |
4076 | /* Returns 1 iff type T is a POD type, as defined in [basic.types]. */ |
4077 | |
4078 | bool |
4079 | pod_type_p (const_tree t) |
4080 | { |
4081 | /* This CONST_CAST is okay because strip_array_types returns its |
4082 | argument unmodified and we assign it to a const_tree. */ |
4083 | t = strip_array_types (CONST_CAST_TREE(t)); |
4084 | |
4085 | if (!CLASS_TYPE_P (t)) |
4086 | return scalarish_type_p (t); |
4087 | else if (cxx_dialect > cxx98) |
4088 | /* [class]/10: A POD struct is a class that is both a trivial class and a |
4089 | standard-layout class, and has no non-static data members of type |
4090 | non-POD struct, non-POD union (or array of such types). |
4091 | |
4092 | We don't need to check individual members because if a member is |
4093 | non-std-layout or non-trivial, the class will be too. */ |
4094 | return (std_layout_type_p (t) && trivial_type_p (t)); |
4095 | else |
4096 | /* The C++98 definition of POD is different. */ |
4097 | return !CLASSTYPE_NON_LAYOUT_POD_P (t); |
4098 | } |
4099 | |
4100 | /* Returns true iff T is POD for the purpose of layout, as defined in the |
4101 | C++ ABI. */ |
4102 | |
4103 | bool |
4104 | layout_pod_type_p (const_tree t) |
4105 | { |
4106 | t = strip_array_types (CONST_CAST_TREE (t)); |
4107 | |
4108 | if (CLASS_TYPE_P (t)) |
4109 | return !CLASSTYPE_NON_LAYOUT_POD_P (t); |
4110 | else |
4111 | return scalarish_type_p (t); |
4112 | } |
4113 | |
4114 | /* Returns true iff T is a standard-layout type, as defined in |
4115 | [basic.types]. */ |
4116 | |
4117 | bool |
4118 | std_layout_type_p (const_tree t) |
4119 | { |
4120 | t = strip_array_types (CONST_CAST_TREE (t)); |
4121 | |
4122 | if (CLASS_TYPE_P (t)) |
4123 | return !CLASSTYPE_NON_STD_LAYOUT (t); |
4124 | else |
4125 | return scalarish_type_p (t); |
4126 | } |
4127 | |
4128 | static bool record_has_unique_obj_representations (const_tree, const_tree); |
4129 | |
4130 | /* Returns true iff T satisfies std::has_unique_object_representations<T>, |
4131 | as defined in [meta.unary.prop]. */ |
4132 | |
4133 | bool |
4134 | type_has_unique_obj_representations (const_tree t) |
4135 | { |
4136 | bool ret; |
4137 | |
4138 | t = strip_array_types (CONST_CAST_TREE (t)); |
4139 | |
4140 | if (!trivially_copyable_p (t)) |
4141 | return false; |
4142 | |
4143 | if (CLASS_TYPE_P (t) && CLASSTYPE_UNIQUE_OBJ_REPRESENTATIONS_SET (t)) |
4144 | return CLASSTYPE_UNIQUE_OBJ_REPRESENTATIONS (t); |
4145 | |
4146 | switch (TREE_CODE (t)) |
4147 | { |
4148 | case INTEGER_TYPE: |
4149 | case POINTER_TYPE: |
4150 | case REFERENCE_TYPE: |
4151 | /* If some backend has any paddings in these types, we should add |
4152 | a target hook for this and handle it there. */ |
4153 | return true; |
4154 | |
4155 | case BOOLEAN_TYPE: |
4156 | /* For bool values other than 0 and 1 should only appear with |
4157 | undefined behavior. */ |
4158 | return true; |
4159 | |
4160 | case ENUMERAL_TYPE: |
4161 | return type_has_unique_obj_representations (ENUM_UNDERLYING_TYPE (t)); |
4162 | |
4163 | case REAL_TYPE: |
4164 | /* XFmode certainly contains padding on x86, which the CPU doesn't store |
4165 | when storing long double values, so for that we have to return false. |
4166 | Other kinds of floating point values are questionable due to +.0/-.0 |
4167 | and NaNs, let's play safe for now. */ |
4168 | return false; |
4169 | |
4170 | case FIXED_POINT_TYPE: |
4171 | return false; |
4172 | |
4173 | case OFFSET_TYPE: |
4174 | return true; |
4175 | |
4176 | case COMPLEX_TYPE: |
4177 | case VECTOR_TYPE: |
4178 | return type_has_unique_obj_representations ( |
---|