1/* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2024 Free Software Foundation, Inc.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20You should have received a copy of the GNU General Public License
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "cp-tree.h"
28#include "stringpool.h"
29#include "cgraph.h"
30#include "tree-iterator.h"
31#include "toplev.h"
32#include "gimplify.h"
33#include "target.h"
34#include "decl.h"
35
36/* Constructor for a lambda expression. */
37
38tree
39build_lambda_expr (void)
40{
41 tree lambda = make_node (LAMBDA_EXPR);
42 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
43 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
44 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
45 LAMBDA_EXPR_REGEN_INFO (lambda) = NULL_TREE;
46 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
47 return lambda;
48}
49
50/* Create the closure object for a LAMBDA_EXPR. */
51
52tree
53build_lambda_object (tree lambda_expr)
54{
55 /* Build aggregate constructor call.
56 - cp_parser_braced_list
57 - cp_parser_functional_cast */
58 vec<constructor_elt, va_gc> *elts = NULL;
59 tree node, expr, type;
60
61 if (processing_template_decl || lambda_expr == error_mark_node)
62 return lambda_expr;
63
64 /* Make sure any error messages refer to the lambda-introducer. */
65 location_t loc = LAMBDA_EXPR_LOCATION (lambda_expr);
66 iloc_sentinel il (loc);
67
68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69 node;
70 node = TREE_CHAIN (node))
71 {
72 tree field = TREE_PURPOSE (node);
73 tree val = TREE_VALUE (node);
74
75 if (field == error_mark_node)
76 {
77 expr = error_mark_node;
78 goto out;
79 }
80
81 if (TREE_CODE (val) == TREE_LIST)
82 val = build_x_compound_expr_from_list (val, ELK_INIT,
83 tf_warning_or_error);
84
85 if (DECL_P (val))
86 mark_used (val);
87
88 /* Mere mortals can't copy arrays with aggregate initialization, so
89 do some magic to make it work here. */
90 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
91 val = build_array_copy (val);
92 else if (DECL_NORMAL_CAPTURE_P (field)
93 && !DECL_VLA_CAPTURE_P (field)
94 && !TYPE_REF_P (TREE_TYPE (field)))
95 {
96 /* "the entities that are captured by copy are used to
97 direct-initialize each corresponding non-static data
98 member of the resulting closure object."
99
100 There's normally no way to express direct-initialization
101 from an element of a CONSTRUCTOR, so we build up a special
102 TARGET_EXPR to bypass the usual copy-initialization. */
103 val = force_rvalue (val, tf_warning_or_error);
104 if (TREE_CODE (val) == TARGET_EXPR)
105 TARGET_EXPR_DIRECT_INIT_P (val) = true;
106 }
107
108 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
109 }
110
111 expr = build_constructor (init_list_type_node, elts);
112 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
113
114 /* N2927: "[The closure] class type is not an aggregate."
115 But we briefly treat it as an aggregate to make this simpler. */
116 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
117 CLASSTYPE_NON_AGGREGATE (type) = 0;
118 expr = finish_compound_literal (type, expr, tf_warning_or_error);
119 protected_set_expr_location (expr, loc);
120 CLASSTYPE_NON_AGGREGATE (type) = 1;
121
122 out:
123 return expr;
124}
125
126/* Return an initialized RECORD_TYPE for LAMBDA.
127 LAMBDA must have its explicit captures already. */
128
129tree
130begin_lambda_type (tree lambda)
131{
132 /* Lambda names are nearly but not quite anonymous. */
133 tree name = make_anon_name ();
134 IDENTIFIER_LAMBDA_P (name) = true;
135
136 /* Create the new RECORD_TYPE for this lambda. */
137 tree type = xref_tag (/*tag_code=*/record_type, name);
138 if (type == error_mark_node)
139 return error_mark_node;
140
141 /* Designate it as a struct so that we can use aggregate initialization. */
142 CLASSTYPE_DECLARED_CLASS (type) = false;
143
144 /* Cross-reference the expression and the type. */
145 LAMBDA_EXPR_CLOSURE (lambda) = type;
146 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
147
148 /* In C++17, assume the closure is literal; we'll clear the flag later if
149 necessary. */
150 if (cxx_dialect >= cxx17)
151 CLASSTYPE_LITERAL_P (type) = true;
152
153 /* Clear base types. */
154 xref_basetypes (type, /*bases=*/NULL_TREE);
155
156 /* Start the class. */
157 type = begin_class_definition (type);
158
159 return type;
160}
161
162/* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
163 closure type. */
164
165tree
166lambda_function (tree lambda)
167{
168 tree type;
169 if (TREE_CODE (lambda) == LAMBDA_EXPR)
170 type = LAMBDA_EXPR_CLOSURE (lambda);
171 else
172 type = lambda;
173 gcc_assert (LAMBDA_TYPE_P (type));
174 /* Don't let debug_tree cause instantiation. */
175 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
176 && !COMPLETE_OR_OPEN_TYPE_P (type))
177 return NULL_TREE;
178 lambda = get_class_binding_direct (type, call_op_identifier);
179 if (lambda)
180 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
181 return lambda;
182}
183
184/* True if EXPR is an expression whose type can be used directly in lambda
185 capture. Not to be used for 'auto'. */
186
187static bool
188type_deducible_expression_p (tree expr)
189{
190 if (!type_dependent_expression_p (expr))
191 return true;
192 if (BRACE_ENCLOSED_INITIALIZER_P (expr)
193 || TREE_CODE (expr) == EXPR_PACK_EXPANSION)
194 return false;
195 tree t = non_reference (TREE_TYPE (expr));
196 return (t && TREE_CODE (t) != TYPE_PACK_EXPANSION
197 && !WILDCARD_TYPE_P (t) && !LAMBDA_TYPE_P (t)
198 && !array_of_unknown_bound_p (t)
199 && !type_uses_auto (t));
200}
201
202/* Returns the type to use for the FIELD_DECL corresponding to the
203 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
204 C++14 init capture, and BY_REFERENCE_P indicates whether we're
205 capturing by reference. */
206
207tree
208lambda_capture_field_type (tree expr, bool explicit_init_p,
209 bool by_reference_p)
210{
211 tree type;
212 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
213
214 if (is_this)
215 type = TREE_TYPE (expr);
216 else if (explicit_init_p)
217 {
218 tree auto_node = make_auto ();
219
220 type = auto_node;
221 if (by_reference_p)
222 /* Add the reference now, so deduction doesn't lose
223 outermost CV qualifiers of EXPR. */
224 type = build_reference_type (type);
225 if (uses_parameter_packs (expr))
226 /* Stick with 'auto' even if the type could be deduced. */;
227 else
228 type = do_auto_deduction (type, expr, auto_node);
229 }
230 else if (!type_deducible_expression_p (expr))
231 {
232 type = cxx_make_type (DECLTYPE_TYPE);
233 DECLTYPE_TYPE_EXPR (type) = expr;
234 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
235 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
236 SET_TYPE_STRUCTURAL_EQUALITY (type);
237 }
238 else
239 {
240 STRIP_ANY_LOCATION_WRAPPER (expr);
241
242 if (!by_reference_p && is_capture_proxy (expr))
243 {
244 /* When capturing by-value another capture proxy from an enclosing
245 lambda, consider the type of the corresponding field instead,
246 as the proxy may be additionally const-qualifed if the enclosing
247 lambda is non-mutable (PR94376). */
248 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (expr)) == COMPONENT_REF);
249 expr = TREE_OPERAND (DECL_VALUE_EXPR (expr), 1);
250 }
251
252 type = non_reference (unlowered_expr_type (expr));
253
254 if (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE)
255 type = build_reference_type (type);
256 }
257
258 return type;
259}
260
261/* Returns true iff DECL is a lambda capture proxy variable created by
262 build_capture_proxy. */
263
264bool
265is_capture_proxy (tree decl)
266{
267 /* Location wrappers should be stripped or otherwise handled by the
268 caller before using this predicate. */
269 gcc_checking_assert (!location_wrapper_p (decl));
270
271 return (VAR_P (decl)
272 && DECL_HAS_VALUE_EXPR_P (decl)
273 && !DECL_ANON_UNION_VAR_P (decl)
274 && !DECL_DECOMPOSITION_P (decl)
275 && !DECL_FNAME_P (decl)
276 && !(DECL_ARTIFICIAL (decl)
277 && DECL_LANG_SPECIFIC (decl)
278 && DECL_OMP_PRIVATIZED_MEMBER (decl))
279 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
280}
281
282/* Returns true iff DECL is a capture proxy for a normal capture
283 (i.e. without explicit initializer). */
284
285bool
286is_normal_capture_proxy (tree decl)
287{
288 if (!is_capture_proxy (decl))
289 /* It's not a capture proxy. */
290 return false;
291
292 return (DECL_LANG_SPECIFIC (decl)
293 && DECL_CAPTURED_VARIABLE (decl));
294}
295
296/* Returns true iff DECL is a capture proxy for a normal capture
297 of a constant variable. */
298
299bool
300is_constant_capture_proxy (tree decl)
301{
302 if (is_normal_capture_proxy (decl))
303 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
304 return false;
305}
306
307/* VAR is a capture proxy created by build_capture_proxy; add it to the
308 current function, which is the operator() for the appropriate lambda. */
309
310void
311insert_capture_proxy (tree var)
312{
313 if (is_normal_capture_proxy (decl: var))
314 {
315 tree cap = DECL_CAPTURED_VARIABLE (var);
316 if (CHECKING_P)
317 {
318 gcc_assert (!is_normal_capture_proxy (cap));
319 tree old = retrieve_local_specialization (cap);
320 if (old)
321 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
322 }
323 register_local_specialization (var, cap);
324 }
325
326 /* Put the capture proxy in the extra body block so that it won't clash
327 with a later local variable. */
328 pushdecl_outermost_localscope (var);
329
330 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
331 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
332 tree stmt_list = (*stmt_list_stack)[1];
333 gcc_assert (stmt_list);
334 append_to_statement_list_force (var, &stmt_list);
335}
336
337/* We've just finished processing a lambda; if the containing scope is also
338 a lambda, insert any capture proxies that were created while processing
339 the nested lambda. */
340
341void
342insert_pending_capture_proxies (void)
343{
344 tree lam;
345 vec<tree, va_gc> *proxies;
346 unsigned i;
347
348 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
349 return;
350
351 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
352 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
353 for (i = 0; i < vec_safe_length (v: proxies); ++i)
354 {
355 tree var = (*proxies)[i];
356 insert_capture_proxy (var);
357 }
358 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
359 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
360}
361
362/* Given REF, a COMPONENT_REF designating a field in the lambda closure,
363 return the type we want the proxy to have: the type of the field itself,
364 with added const-qualification if the lambda isn't mutable and the
365 capture is by value. */
366
367tree
368lambda_proxy_type (tree ref)
369{
370 tree type;
371 if (ref == error_mark_node)
372 return error_mark_node;
373 if (REFERENCE_REF_P (ref))
374 ref = TREE_OPERAND (ref, 0);
375 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
376 type = TREE_TYPE (ref);
377 if (!type || WILDCARD_TYPE_P (non_reference (type)))
378 {
379 type = cxx_make_type (DECLTYPE_TYPE);
380 DECLTYPE_TYPE_EXPR (type) = ref;
381 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
382 SET_TYPE_STRUCTURAL_EQUALITY (type);
383 }
384 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
385 type = make_pack_expansion (type);
386 return type;
387}
388
389/* MEMBER is a capture field in a lambda closure class. Now that we're
390 inside the operator(), build a placeholder var for future lookups and
391 debugging. */
392
393static tree
394build_capture_proxy (tree member, tree init)
395{
396 tree var, object, fn, closure, name, lam, type;
397
398 if (PACK_EXPANSION_P (member))
399 member = PACK_EXPANSION_PATTERN (member);
400
401 closure = DECL_CONTEXT (member);
402 fn = lambda_function (lambda: closure);
403 lam = CLASSTYPE_LAMBDA_EXPR (closure);
404
405 object = DECL_ARGUMENTS (fn);
406 /* The proxy variable forwards to the capture field. */
407 if (INDIRECT_TYPE_P (TREE_TYPE (object)))
408 object = build_fold_indirect_ref (object);
409 object = finish_non_static_data_member (member, object, NULL_TREE);
410 if (REFERENCE_REF_P (object))
411 object = TREE_OPERAND (object, 0);
412
413 /* Remove the __ inserted by add_capture. */
414 if (IDENTIFIER_POINTER (DECL_NAME (member))[2] == '_'
415 && IDENTIFIER_POINTER (DECL_NAME (member))[3] == '.')
416 name = get_identifier ("_");
417 else
418 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
419
420 type = lambda_proxy_type (ref: object);
421
422 if (name == this_identifier && !INDIRECT_TYPE_P (type))
423 {
424 type = build_pointer_type (type);
425 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
426 object = build_fold_addr_expr_with_type (object, type);
427 }
428
429 if (DECL_VLA_CAPTURE_P (member))
430 {
431 /* Rebuild the VLA type from the pointer and maxindex. */
432 tree field = next_aggregate_field (TYPE_FIELDS (type));
433 tree ptr = build_simple_component_ref (object, field);
434 field = next_aggregate_field (DECL_CHAIN (field));
435 tree max = build_simple_component_ref (object, field);
436 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
437 build_index_type (max));
438 type = build_reference_type (type);
439 object = convert (type, ptr);
440 }
441
442 complete_type (type);
443
444 var = build_decl (input_location, VAR_DECL, name, type);
445 SET_DECL_VALUE_EXPR (var, object);
446 DECL_HAS_VALUE_EXPR_P (var) = 1;
447 DECL_ARTIFICIAL (var) = 1;
448 TREE_USED (var) = 1;
449 DECL_CONTEXT (var) = fn;
450
451 if (DECL_NORMAL_CAPTURE_P (member))
452 {
453 if (DECL_VLA_CAPTURE_P (member))
454 {
455 init = CONSTRUCTOR_ELT (init, 0)->value;
456 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
457 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
458 }
459 else
460 {
461 if (PACK_EXPANSION_P (init))
462 init = PACK_EXPANSION_PATTERN (init);
463 }
464
465 if (INDIRECT_REF_P (init))
466 init = TREE_OPERAND (init, 0);
467 STRIP_NOPS (init);
468
469 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
470 while (is_normal_capture_proxy (decl: init))
471 init = DECL_CAPTURED_VARIABLE (init);
472 retrofit_lang_decl (var);
473 DECL_CAPTURED_VARIABLE (var) = init;
474 }
475
476 if (name == this_identifier)
477 {
478 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
479 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
480 }
481
482 if (fn == current_function_decl)
483 insert_capture_proxy (var);
484 else
485 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), obj: var);
486
487 return var;
488}
489
490static GTY(()) tree ptr_id;
491static GTY(()) tree max_id;
492
493/* Return a struct containing a pointer and a length for lambda capture of
494 an array of runtime length. */
495
496static tree
497vla_capture_type (tree array_type)
498{
499 tree type = xref_tag (record_type, make_anon_name ());
500 xref_basetypes (type, NULL_TREE);
501 type = begin_class_definition (type);
502 if (!ptr_id)
503 {
504 ptr_id = get_identifier ("ptr");
505 max_id = get_identifier ("max");
506 }
507 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
508 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
509 finish_member_declaration (field);
510 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
511 finish_member_declaration (field);
512 return finish_struct (type, NULL_TREE);
513}
514
515/* From an ID and INITIALIZER, create a capture (by reference if
516 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
517 and return it. If ID is `this', BY_REFERENCE_P says whether
518 `*this' is captured by reference. */
519
520tree
521add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
522 bool explicit_init_p, unsigned *name_independent_cnt)
523{
524 char *buf;
525 tree type, member, name;
526 bool vla = false;
527 bool variadic = false;
528 tree initializer = orig_init;
529
530 if (PACK_EXPANSION_P (initializer))
531 {
532 initializer = PACK_EXPANSION_PATTERN (initializer);
533 variadic = true;
534 }
535
536 if (TREE_CODE (initializer) == TREE_LIST
537 /* A pack expansion might end up with multiple elements. */
538 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
539 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
540 tf_warning_or_error);
541 type = TREE_TYPE (initializer);
542 if (type == error_mark_node)
543 return error_mark_node;
544
545 if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
546 {
547 vla = true;
548 if (!by_reference_p)
549 error ("array of runtime bound cannot be captured by copy, "
550 "only by reference");
551
552 /* For a VLA, we capture the address of the first element and the
553 maximum index, and then reconstruct the VLA for the proxy. */
554 tree elt = cp_build_array_ref (input_location, initializer,
555 integer_zero_node, tf_warning_or_error);
556 initializer = build_constructor_va (init_list_type_node, 2,
557 NULL_TREE, build_address (elt),
558 NULL_TREE, array_type_nelts (type));
559 type = vla_capture_type (array_type: type);
560 }
561 else if (!dependent_type_p (type)
562 && variably_modified_type_p (type, NULL_TREE))
563 {
564 sorry ("capture of variably-modified type %qT that is not an N3639 array "
565 "of runtime bound", type);
566 if (TREE_CODE (type) == ARRAY_TYPE
567 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
568 inform (input_location, "because the array element type %qT has "
569 "variable size", TREE_TYPE (type));
570 return error_mark_node;
571 }
572 else
573 {
574 type = lambda_capture_field_type (expr: initializer, explicit_init_p,
575 by_reference_p);
576 if (type == error_mark_node)
577 return error_mark_node;
578
579 if (id == this_identifier && !by_reference_p)
580 {
581 gcc_assert (INDIRECT_TYPE_P (type));
582 type = TREE_TYPE (type);
583 initializer = cp_build_fold_indirect_ref (initializer);
584 }
585
586 if (dependent_type_p (type))
587 ;
588 else if (id != this_identifier && by_reference_p)
589 {
590 if (!lvalue_p (initializer))
591 {
592 error ("cannot capture %qE by reference", initializer);
593 return error_mark_node;
594 }
595 }
596 else
597 {
598 /* Capture by copy requires a complete type. */
599 type = complete_type (type);
600 if (!COMPLETE_TYPE_P (type))
601 {
602 error ("capture by copy of incomplete type %qT", type);
603 cxx_incomplete_type_inform (type);
604 return error_mark_node;
605 }
606 else if (!verify_type_context (input_location,
607 TCTX_CAPTURE_BY_COPY, type))
608 return error_mark_node;
609 }
610 }
611
612 /* Add __ to the beginning of the field name so that user code
613 won't find the field with name lookup. We can't just leave the name
614 unset because template instantiation uses the name to find
615 instantiated fields. */
616 if (id_equal (id, str: "_") && name_independent_cnt)
617 {
618 if (*name_independent_cnt == 0)
619 name = get_identifier ("___");
620 else
621 {
622 /* For 2nd and later name-independent capture use
623 unique names. */
624 char buf2[5 + (HOST_BITS_PER_INT + 2) / 3];
625 sprintf (s: buf2, format: "___.%u", *name_independent_cnt);
626 name = get_identifier (buf2);
627 }
628 name_independent_cnt[0]++;
629 }
630 else
631 {
632 buf = XALLOCAVEC (char, IDENTIFIER_LENGTH (id) + 3);
633 buf[1] = buf[0] = '_';
634 memcpy (dest: buf + 2, IDENTIFIER_POINTER (id),
635 IDENTIFIER_LENGTH (id) + 1);
636 name = get_identifier (buf);
637 }
638
639 if (variadic)
640 {
641 type = make_pack_expansion (type);
642 if (explicit_init_p)
643 /* With an explicit initializer 'type' is auto, which isn't really a
644 parameter pack in this context. We will want as many fields as we
645 have elements in the expansion of the initializer, so use its packs
646 instead. */
647 {
648 PACK_EXPANSION_PARAMETER_PACKS (type)
649 = uses_parameter_packs (initializer);
650 PACK_EXPANSION_AUTO_P (type) = true;
651 }
652 }
653
654 /* Make member variable. */
655 member = build_decl (input_location, FIELD_DECL, name, type);
656 DECL_VLA_CAPTURE_P (member) = vla;
657
658 if (!explicit_init_p)
659 /* Normal captures are invisible to name lookup but uses are replaced
660 with references to the capture field; we implement this by only
661 really making them invisible in unevaluated context; see
662 qualify_lookup. For now, let's make explicitly initialized captures
663 always visible. */
664 DECL_NORMAL_CAPTURE_P (member) = true;
665
666 if (id == this_identifier)
667 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
668
669 /* Add it to the appropriate closure class if we've started it. */
670 if (current_class_type
671 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
672 {
673 if (COMPLETE_TYPE_P (current_class_type))
674 internal_error ("trying to capture %qD in instantiation of "
675 "generic lambda", id);
676 finish_member_declaration (member);
677 }
678
679 tree listmem = member;
680 if (variadic)
681 {
682 listmem = make_pack_expansion (member);
683 initializer = orig_init;
684 }
685 LAMBDA_EXPR_CAPTURE_LIST (lambda)
686 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
687
688 if (LAMBDA_EXPR_CLOSURE (lambda))
689 return build_capture_proxy (member, init: initializer);
690 /* For explicit captures we haven't started the function yet, so we wait
691 and build the proxy from cp_parser_lambda_body. */
692 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
693 return NULL_TREE;
694}
695
696/* Register all the capture members on the list CAPTURES, which is the
697 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
698
699void
700register_capture_members (tree captures)
701{
702 if (captures == NULL_TREE)
703 return;
704
705 register_capture_members (TREE_CHAIN (captures));
706
707 tree field = TREE_PURPOSE (captures);
708 if (PACK_EXPANSION_P (field))
709 field = PACK_EXPANSION_PATTERN (field);
710
711 finish_member_declaration (field);
712}
713
714/* Similar to add_capture, except this works on a stack of nested lambdas.
715 BY_REFERENCE_P in this case is derived from the default capture mode.
716 Returns the capture for the lambda at the bottom of the stack. */
717
718tree
719add_default_capture (tree lambda_stack, tree id, tree initializer)
720{
721 bool this_capture_p = (id == this_identifier);
722 tree var = NULL_TREE;
723 tree saved_class_type = current_class_type;
724
725 for (tree node = lambda_stack;
726 node;
727 node = TREE_CHAIN (node))
728 {
729 tree lambda = TREE_VALUE (node);
730
731 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
732 if (DECL_PACK_P (initializer))
733 initializer = make_pack_expansion (initializer);
734 var = add_capture (lambda,
735 id,
736 orig_init: initializer,
737 /*by_reference_p=*/
738 (this_capture_p
739 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
740 == CPLD_REFERENCE)),
741 /*explicit_init_p=*/false, NULL);
742 initializer = convert_from_reference (var);
743
744 /* Warn about deprecated implicit capture of this via [=]. */
745 if (cxx_dialect >= cxx20
746 && this_capture_p
747 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
748 {
749 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
750 "implicit capture of %qE via %<[=]%> is deprecated "
751 "in C++20", this_identifier))
752 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
753 "%<*this%> capture");
754 }
755 }
756
757 current_class_type = saved_class_type;
758
759 return var;
760}
761
762/* Return the capture pertaining to a use of 'this' in LAMBDA, in the
763 form of an INDIRECT_REF, possibly adding it through default
764 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative,
765 try to capture but don't complain if we can't. */
766
767tree
768lambda_expr_this_capture (tree lambda, int add_capture_p)
769{
770 tree result;
771
772 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
773
774 /* In unevaluated context this isn't an odr-use, so don't capture. */
775 if (cp_unevaluated_operand)
776 add_capture_p = false;
777
778 /* Try to default capture 'this' if we can. */
779 if (!this_capture)
780 {
781 tree lambda_stack = NULL_TREE;
782 tree init = NULL_TREE;
783 bool saw_complete = false;
784
785 /* If we are in a lambda function, we can move out until we hit:
786 1. a non-lambda function or NSDMI,
787 2. a lambda function capturing 'this', or
788 3. a non-default capturing lambda function. */
789 for (tree tlambda = lambda; ;)
790 {
791 if (add_capture_p
792 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
793 /* tlambda won't let us capture 'this'. */
794 break;
795
796 if (add_capture_p)
797 lambda_stack = tree_cons (NULL_TREE,
798 tlambda,
799 lambda_stack);
800
801 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
802 if (COMPLETE_TYPE_P (closure))
803 /* We're instantiating a generic lambda op(), the containing
804 scope may be gone. */
805 saw_complete = true;
806
807 tree containing_function
808 = decl_function_context (TYPE_NAME (closure));
809
810 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
811 if (ex && TREE_CODE (ex) == FIELD_DECL)
812 {
813 /* Lambda in an NSDMI. We don't have a function to look up
814 'this' in, but we can find (or rebuild) the fake one from
815 inject_this_parameter. */
816 if (!containing_function && !saw_complete)
817 /* If we're parsing a lambda in a non-local class,
818 we can find the fake 'this' in scope_chain. */
819 init = scope_chain->x_current_class_ptr;
820 else
821 /* Otherwise it's either gone or buried in
822 function_context_stack, so make another. */
823 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
824 TYPE_UNQUALIFIED);
825 gcc_checking_assert
826 (init && (TREE_TYPE (TREE_TYPE (init))
827 == current_nonlambda_class_type ()));
828 break;
829 }
830
831 if (containing_function == NULL_TREE)
832 /* We ran out of scopes; there's no 'this' to capture. */
833 break;
834
835 if (!LAMBDA_FUNCTION_P (containing_function))
836 {
837 /* We found a non-lambda function.
838 There is no this pointer in xobj member functions. */
839 if (DECL_IOBJ_MEMBER_FUNCTION_P (containing_function))
840 /* First parameter is 'this'. */
841 init = DECL_ARGUMENTS (containing_function);
842 break;
843 }
844
845 tlambda
846 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
847
848 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
849 {
850 /* An outer lambda has already captured 'this'. */
851 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
852 break;
853 }
854 }
855
856 if (init)
857 {
858 if (add_capture_p)
859 this_capture = add_default_capture (lambda_stack,
860 /*id=*/this_identifier,
861 initializer: init);
862 else
863 this_capture = init;
864 }
865 }
866
867 if (cp_unevaluated_operand)
868 result = this_capture;
869 else if (!this_capture)
870 {
871 if (add_capture_p == 1)
872 {
873 error ("%<this%> was not captured for this lambda function");
874 result = error_mark_node;
875 }
876 else
877 result = NULL_TREE;
878 }
879 else
880 {
881 /* To make sure that current_class_ref is for the lambda. */
882 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
883 == LAMBDA_EXPR_CLOSURE (lambda));
884
885 result = this_capture;
886
887 /* If 'this' is captured, each use of 'this' is transformed into an
888 access to the corresponding unnamed data member of the closure
889 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
890 ensures that the transformed expression is an rvalue. ] */
891 result = rvalue (result);
892 }
893
894 return result;
895}
896
897/* Return the innermost LAMBDA_EXPR we're currently in, if any. */
898
899tree
900current_lambda_expr (void)
901{
902 tree type = current_class_type;
903 while (type && !LAMBDA_TYPE_P (type))
904 type = decl_type_context (TYPE_NAME (type));
905 if (type)
906 return CLASSTYPE_LAMBDA_EXPR (type);
907 else
908 return NULL_TREE;
909}
910
911/* Return the current LAMBDA_EXPR, if this is a resolvable dummy
912 object. NULL otherwise.. */
913
914static tree
915resolvable_dummy_lambda (tree object)
916{
917 if (!is_dummy_object (object))
918 return NULL_TREE;
919
920 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
921 gcc_assert (!TYPE_PTR_P (type));
922
923 if (type != current_class_type
924 && current_class_type
925 && LAMBDA_TYPE_P (current_class_type)
926 && lambda_function (current_class_type)
927 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
928 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
929
930 return NULL_TREE;
931}
932
933/* We don't want to capture 'this' until we know we need it, i.e. after
934 overload resolution has chosen a non-static member function. At that
935 point we call this function to turn a dummy object into a use of the
936 'this' capture. */
937
938tree
939maybe_resolve_dummy (tree object, bool add_capture_p)
940{
941 if (tree lam = resolvable_dummy_lambda (object))
942 if (tree cap = lambda_expr_this_capture (lambda: lam, add_capture_p))
943 if (cap != error_mark_node)
944 object = build_fold_indirect_ref (cap);
945
946 return object;
947}
948
949/* When parsing a generic lambda containing an argument-dependent
950 member function call we defer overload resolution to instantiation
951 time. But we have to know now whether to capture this or not.
952 Do that if FNS contains any non-static fns.
953 The std doesn't anticipate this case, but I expect this to be the
954 outcome of discussion. */
955
956void
957maybe_generic_this_capture (tree object, tree fns)
958{
959 if (tree lam = resolvable_dummy_lambda (object))
960 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
961 {
962 /* We've not yet captured, so look at the function set of
963 interest. */
964 if (BASELINK_P (fns))
965 fns = BASELINK_FUNCTIONS (fns);
966 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
967 if (id_expr)
968 fns = TREE_OPERAND (fns, 0);
969
970 for (lkp_iterator iter (fns); iter; ++iter)
971 if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
972 || TREE_CODE (*iter) == TEMPLATE_DECL)
973 && DECL_IOBJ_MEMBER_FUNCTION_P (*iter))
974 {
975 /* Found a non-static member. Capture this. */
976 lambda_expr_this_capture (lambda: lam, /*maybe*/add_capture_p: -1);
977 break;
978 }
979 }
980}
981
982/* Returns the innermost non-lambda function. */
983
984tree
985current_nonlambda_function (void)
986{
987 tree fn = current_function_decl;
988 while (fn && LAMBDA_FUNCTION_P (fn))
989 fn = decl_function_context (fn);
990 return fn;
991}
992
993/* Returns the method basetype of the innermost non-lambda function, including
994 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
995
996tree
997nonlambda_method_basetype (void)
998{
999 if (!current_class_ref)
1000 return NULL_TREE;
1001
1002 tree type = current_class_type;
1003 if (!type || !LAMBDA_TYPE_P (type))
1004 return type;
1005
1006 while (true)
1007 {
1008 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1009 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
1010 if (ex && TREE_CODE (ex) == FIELD_DECL)
1011 /* Lambda in an NSDMI. */
1012 return DECL_CONTEXT (ex);
1013
1014 tree fn = TYPE_CONTEXT (type);
1015 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
1016 || !DECL_IOBJ_MEMBER_FUNCTION_P (fn))
1017 /* No enclosing non-lambda method. */
1018 return NULL_TREE;
1019 if (!LAMBDA_FUNCTION_P (fn))
1020 /* Found an enclosing non-lambda method. */
1021 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1022 type = DECL_CONTEXT (fn);
1023 }
1024}
1025
1026/* Like current_scope, but looking through lambdas. */
1027
1028tree
1029current_nonlambda_scope (void)
1030{
1031 tree scope = current_scope ();
1032 for (;;)
1033 {
1034 if (TREE_CODE (scope) == FUNCTION_DECL
1035 && LAMBDA_FUNCTION_P (scope))
1036 {
1037 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
1038 continue;
1039 }
1040 else if (LAMBDA_TYPE_P (scope))
1041 {
1042 scope = CP_TYPE_CONTEXT (scope);
1043 continue;
1044 }
1045 break;
1046 }
1047 return scope;
1048}
1049
1050/* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1051 indicated FN and NARGS, but do not initialize the return type or any of the
1052 argument slots. */
1053
1054static tree
1055prepare_op_call (tree fn, int nargs)
1056{
1057 tree t;
1058
1059 t = build_vl_exp (CALL_EXPR, nargs + 3);
1060 CALL_EXPR_FN (t) = fn;
1061 CALL_EXPR_STATIC_CHAIN (t) = NULL;
1062
1063 return t;
1064}
1065
1066/* Return true iff CALLOP is the op() for a generic lambda. */
1067
1068bool
1069generic_lambda_fn_p (tree callop)
1070{
1071 return (LAMBDA_FUNCTION_P (callop)
1072 && DECL_TEMPLATE_INFO (callop)
1073 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1074}
1075
1076/* If the closure TYPE has a static op(), also add a conversion to function
1077 pointer. */
1078
1079void
1080maybe_add_lambda_conv_op (tree type)
1081{
1082 bool nested = (cfun != NULL);
1083 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1084 tree callop = lambda_function (lambda: type);
1085 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1086
1087 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1088 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1089 return;
1090
1091 if (processing_template_decl)
1092 return;
1093
1094 bool const generic_lambda_p = generic_lambda_fn_p (callop);
1095
1096 if (!generic_lambda_p && undeduced_auto_decl (callop))
1097 {
1098 /* If the op() wasn't deduced due to errors, give up. */
1099 gcc_assert (errorcount || sorrycount);
1100 return;
1101 }
1102
1103 /* Non-generic non-capturing lambdas only have a conversion function to
1104 pointer to function when the trailing requires-clause's constraints are
1105 satisfied. */
1106 if (!generic_lambda_p && !constraints_satisfied_p (callop))
1107 return;
1108
1109 /* Non-template conversion operators are defined directly with build_call_a
1110 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1111 deferred and the CALL is built in-place. In the case of a deduced return
1112 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1113 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1114 the return expression may differ in flags from those in the body CALL. In
1115 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1116 the body CALL, but not in DECLTYPE_CALL. */
1117
1118 vec<tree, va_gc> *direct_argvec = 0;
1119 tree decltype_call = 0, call = 0;
1120 tree optype = TREE_TYPE (callop);
1121 tree fn_result = TREE_TYPE (optype);
1122
1123 tree thisarg = NULL_TREE;
1124 if (TREE_CODE (optype) == METHOD_TYPE)
1125 thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
1126 if (generic_lambda_p)
1127 {
1128 ++processing_template_decl;
1129
1130 /* Prepare the dependent member call for the static member function
1131 '_FUN' and, potentially, prepare another call to be used in a decltype
1132 return expression for a deduced return call op to allow for simple
1133 implementation of the conversion operator. */
1134
1135 tree objfn;
1136 int nargs = list_length (DECL_ARGUMENTS (callop));
1137 if (thisarg)
1138 {
1139 tree instance = cp_build_fold_indirect_ref (thisarg);
1140 objfn = lookup_template_function (DECL_NAME (callop),
1141 DECL_TI_ARGS (callop));
1142 objfn = build_min (COMPONENT_REF, NULL_TREE,
1143 instance, objfn, NULL_TREE);
1144 --nargs;
1145 call = prepare_op_call (fn: objfn, nargs);
1146 }
1147 else
1148 objfn = callop;
1149
1150 if (type_uses_auto (fn_result))
1151 decltype_call = prepare_op_call (fn: objfn, nargs);
1152 }
1153 else if (thisarg)
1154 {
1155 direct_argvec = make_tree_vector ();
1156 direct_argvec->quick_push (obj: thisarg);
1157 }
1158
1159 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1160 declare the static member function "_FUN" below. For each arg append to
1161 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1162 call args (for the template case). If a parameter pack is found, expand
1163 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1164
1165 tree fn_args = NULL_TREE;
1166 {
1167 int ix = 0;
1168 tree src = FUNCTION_FIRST_USER_PARM (callop);
1169 tree tgt = NULL;
1170
1171 if (!thisarg && !decltype_call)
1172 src = NULL_TREE;
1173 while (src)
1174 {
1175 tree new_node = copy_node (src);
1176 /* We set DECL_CONTEXT of NEW_NODE to the statfn below.
1177 Notice this is creating a recursive type! */
1178
1179 /* Clear TREE_ADDRESSABLE on thunk arguments. */
1180 TREE_ADDRESSABLE (new_node) = 0;
1181
1182 if (!fn_args)
1183 fn_args = tgt = new_node;
1184 else
1185 {
1186 TREE_CHAIN (tgt) = new_node;
1187 tgt = new_node;
1188 }
1189
1190 mark_exp_read (tgt);
1191
1192 if (generic_lambda_p)
1193 {
1194 tree a = tgt;
1195 if (thisarg)
1196 {
1197 if (DECL_PACK_P (tgt))
1198 {
1199 a = make_pack_expansion (a);
1200 PACK_EXPANSION_LOCAL_P (a) = true;
1201 }
1202 CALL_EXPR_ARG (call, ix) = a;
1203 }
1204
1205 if (decltype_call)
1206 {
1207 /* Avoid capturing variables in this context. */
1208 ++cp_unevaluated_operand;
1209 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1210 --cp_unevaluated_operand;
1211 }
1212
1213 ++ix;
1214 }
1215 else
1216 vec_safe_push (v&: direct_argvec, obj: tgt);
1217
1218 src = TREE_CHAIN (src);
1219 }
1220 }
1221
1222 if (generic_lambda_p)
1223 {
1224 if (decltype_call)
1225 {
1226 fn_result = finish_decltype_type
1227 (decltype_call, /*id_expression_or_member_access_p=*/false,
1228 tf_warning_or_error);
1229 }
1230 }
1231 else if (thisarg)
1232 {
1233 /* Don't warn on deprecated or unavailable lambda declarations, unless
1234 the lambda is actually called. */
1235 auto du = make_temp_override (var&: deprecated_state,
1236 overrider: UNAVAILABLE_DEPRECATED_SUPPRESS);
1237 call = build_call_a (callop, direct_argvec->length (),
1238 direct_argvec->address ());
1239 }
1240
1241 if (thisarg)
1242 {
1243 CALL_FROM_THUNK_P (call) = 1;
1244 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1245 }
1246
1247 tree stattype
1248 = build_function_type (fn_result, FUNCTION_FIRST_USER_PARMTYPE (callop));
1249 stattype = (cp_build_type_attribute_variant
1250 (stattype, TYPE_ATTRIBUTES (optype)));
1251 if (flag_noexcept_type
1252 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1253 stattype = build_exception_variant (stattype, noexcept_true_spec);
1254
1255 if (generic_lambda_p)
1256 --processing_template_decl;
1257
1258 /* First build up the conversion op. */
1259
1260 tree rettype = build_pointer_type (stattype);
1261 tree name = make_conv_op_name (rettype);
1262 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1263 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1264 /* DR 1722: The conversion function should be noexcept. */
1265 fntype = build_exception_variant (fntype, noexcept_true_spec);
1266 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1267 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1268 tree fn = convfn;
1269 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1270 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1271 grokclassfn (type, fn, NO_SPECIAL);
1272 set_linkage_according_to_type (type, fn);
1273 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1274 DECL_IN_AGGR_P (fn) = 1;
1275 DECL_ARTIFICIAL (fn) = 1;
1276 DECL_NOT_REALLY_EXTERN (fn) = 1;
1277 DECL_DECLARED_INLINE_P (fn) = 1;
1278 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1279 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1280 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1281 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1282
1283 if (nested_def)
1284 DECL_INTERFACE_KNOWN (fn) = 1;
1285
1286 if (generic_lambda_p)
1287 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1288
1289 add_method (type, fn, false);
1290
1291 if (thisarg == NULL_TREE)
1292 {
1293 /* For static lambda, just return operator(). */
1294 if (nested)
1295 push_function_context ();
1296 else
1297 /* Still increment function_depth so that we don't GC in the
1298 middle of an expression. */
1299 ++function_depth;
1300
1301 /* Generate the body of the conversion op. */
1302
1303 start_preparsed_function (convfn, NULL_TREE,
1304 SF_PRE_PARSED | SF_INCLASS_INLINE);
1305 tree body = begin_function_body ();
1306 tree compound_stmt = begin_compound_stmt (0);
1307
1308 /* decl_needed_p needs to see that it's used. */
1309 TREE_USED (callop) = 1;
1310 finish_return_stmt (decay_conversion (callop, tf_warning_or_error));
1311
1312 finish_compound_stmt (compound_stmt);
1313 finish_function_body (body);
1314
1315 fn = finish_function (/*inline_p=*/true);
1316 if (!generic_lambda_p)
1317 expand_or_defer_fn (fn);
1318
1319 if (nested)
1320 pop_function_context ();
1321 else
1322 --function_depth;
1323 return;
1324 }
1325
1326 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1327 the conversion op is used. */
1328 if (varargs_function_p (callop))
1329 {
1330 DECL_DELETED_FN (fn) = 1;
1331 return;
1332 }
1333
1334 /* Now build up the thunk to be returned. */
1335
1336 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1337 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1338 fn = statfn;
1339 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1340 grokclassfn (type, fn, NO_SPECIAL);
1341 set_linkage_according_to_type (type, fn);
1342 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1343 DECL_IN_AGGR_P (fn) = 1;
1344 DECL_ARTIFICIAL (fn) = 1;
1345 DECL_NOT_REALLY_EXTERN (fn) = 1;
1346 DECL_DECLARED_INLINE_P (fn) = 1;
1347 DECL_STATIC_FUNCTION_P (fn) = 1;
1348 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1349 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1350 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1351 DECL_ARGUMENTS (fn) = fn_args;
1352 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1353 {
1354 /* Avoid duplicate -Wshadow warnings. */
1355 DECL_NAME (arg) = NULL_TREE;
1356 DECL_CONTEXT (arg) = fn;
1357 }
1358 if (nested_def)
1359 DECL_INTERFACE_KNOWN (fn) = 1;
1360
1361 if (generic_lambda_p)
1362 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1363
1364 if (flag_sanitize & SANITIZE_NULL)
1365 /* Don't UBsan this function; we're deliberately calling op() with a null
1366 object argument. */
1367 add_no_sanitize_value (node: fn, flags: SANITIZE_UNDEFINED);
1368
1369 add_method (type, fn, false);
1370
1371 if (nested)
1372 push_function_context ();
1373 else
1374 /* Still increment function_depth so that we don't GC in the
1375 middle of an expression. */
1376 ++function_depth;
1377
1378 /* Generate the body of the thunk. */
1379
1380 start_preparsed_function (statfn, NULL_TREE,
1381 SF_PRE_PARSED | SF_INCLASS_INLINE);
1382 tree body = begin_function_body ();
1383 tree compound_stmt = begin_compound_stmt (0);
1384 if (!generic_lambda_p)
1385 {
1386 set_flags_from_callee (call);
1387 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1388 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1389 }
1390 call = convert_from_reference (call);
1391 finish_return_stmt (call);
1392
1393 finish_compound_stmt (compound_stmt);
1394 finish_function_body (body);
1395
1396 fn = finish_function (/*inline_p=*/true);
1397 if (!generic_lambda_p)
1398 expand_or_defer_fn (fn);
1399
1400 /* Generate the body of the conversion op. */
1401
1402 start_preparsed_function (convfn, NULL_TREE,
1403 SF_PRE_PARSED | SF_INCLASS_INLINE);
1404 body = begin_function_body ();
1405 compound_stmt = begin_compound_stmt (0);
1406
1407 /* decl_needed_p needs to see that it's used. */
1408 TREE_USED (statfn) = 1;
1409 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1410
1411 finish_compound_stmt (compound_stmt);
1412 finish_function_body (body);
1413
1414 fn = finish_function (/*inline_p=*/true);
1415 if (!generic_lambda_p)
1416 expand_or_defer_fn (fn);
1417
1418 if (nested)
1419 pop_function_context ();
1420 else
1421 --function_depth;
1422}
1423
1424/* True if FN is the static function "_FUN" that gets returned from the lambda
1425 conversion operator. */
1426
1427bool
1428lambda_static_thunk_p (tree fn)
1429{
1430 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1431 && DECL_ARTIFICIAL (fn)
1432 && DECL_STATIC_FUNCTION_P (fn)
1433 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1434}
1435
1436bool
1437call_from_lambda_thunk_p (tree call)
1438{
1439 return (CALL_FROM_THUNK_P (call)
1440 && lambda_static_thunk_p (fn: current_function_decl));
1441}
1442
1443/* Returns true iff VAL is a lambda-related declaration which should
1444 be ignored by unqualified lookup. */
1445
1446bool
1447is_lambda_ignored_entity (tree val)
1448{
1449 /* Look past normal, non-VLA capture proxies. */
1450 if (is_normal_capture_proxy (decl: val)
1451 && !variably_modified_type_p (TREE_TYPE (val), NULL_TREE))
1452 return true;
1453
1454 /* Always ignore lambda fields, their names are only for debugging. */
1455 if (TREE_CODE (val) == FIELD_DECL
1456 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1457 return true;
1458
1459 /* None of the lookups that use qualify_lookup want the op() from the
1460 lambda; they want the one from the enclosing class. */
1461 if (tree fns = maybe_get_fns (val))
1462 if (LAMBDA_FUNCTION_P (OVL_FIRST (fns)))
1463 return true;
1464
1465 return false;
1466}
1467
1468/* Lambdas that appear in variable initializer or default argument
1469 scope get that in their mangling, so we need to record it. Also,
1470 multiple lambdas in the same scope may need a mangling
1471 discriminator. In ABI <= 17, there is a single per-scope sequence
1472 number. In ABI >= 18, there are per-scope per-signature sequence
1473 numbers. */
1474struct GTY(()) lambda_sig_count
1475{
1476 tree fn; // The lambda fn whose sig this is.
1477 unsigned count;
1478};
1479struct GTY(()) lambda_discriminator
1480{
1481 tree scope;
1482 unsigned nesting; // Inside a function, VAR_DECLs get the function
1483 // as scope. This counts that nesting.
1484 unsigned count; // The per-scope counter.
1485 vec<lambda_sig_count, va_gc> *discriminators; // Per-signature counters
1486};
1487// The current scope.
1488static GTY(()) lambda_discriminator lambda_scope;
1489// Stack of previous scopes.
1490static GTY(()) vec<lambda_discriminator, va_gc> *lambda_scope_stack;
1491
1492// Push DECL as lambda extra scope, also new discriminator counters.
1493
1494void
1495start_lambda_scope (tree decl)
1496{
1497 gcc_checking_assert (decl);
1498 if (current_function_decl && VAR_P (decl))
1499 // If we're inside a function, we ignore variable scope. Don't push.
1500 lambda_scope.nesting++;
1501 else
1502 {
1503 vec_safe_push (v&: lambda_scope_stack, obj: lambda_scope);
1504 lambda_scope.scope = decl;
1505 lambda_scope.nesting = 0;
1506 lambda_scope.count = 0;
1507 lambda_scope.discriminators = nullptr;
1508 }
1509}
1510
1511// Pop from the current lambda extra scope.
1512
1513void
1514finish_lambda_scope (void)
1515{
1516 if (!lambda_scope.nesting--)
1517 {
1518 lambda_scope = lambda_scope_stack->last ();
1519 lambda_scope_stack->pop ();
1520 }
1521}
1522
1523// Record the current lambda scope into LAMBDA
1524
1525void
1526record_lambda_scope (tree lambda)
1527{
1528 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope.scope;
1529 if (lambda_scope.scope)
1530 {
1531 tree closure = LAMBDA_EXPR_CLOSURE (lambda);
1532 gcc_checking_assert (closure);
1533 maybe_key_decl (ctx: lambda_scope.scope, TYPE_NAME (closure));
1534 }
1535}
1536
1537// Compare lambda template heads TMPL_A and TMPL_B, used for both
1538// templated lambdas, and template template parameters of said lambda.
1539
1540static bool
1541compare_lambda_template_head (tree tmpl_a, tree tmpl_b)
1542{
1543 // We only need one level of template parms
1544 tree inner_a = INNERMOST_TEMPLATE_PARMS (DECL_TEMPLATE_PARMS (tmpl_a));
1545 tree inner_b = INNERMOST_TEMPLATE_PARMS (DECL_TEMPLATE_PARMS (tmpl_b));
1546
1547 // We only compare explicit template parms, ignoring trailing
1548 // synthetic ones.
1549 int len_a = TREE_VEC_LENGTH (inner_a);
1550 int len_b = TREE_VEC_LENGTH (inner_b);
1551
1552 for (int ix = 0, len = MAX (len_a, len_b); ix != len; ix++)
1553 {
1554 tree parm_a = NULL_TREE;
1555 if (ix < len_a)
1556 {
1557 parm_a = TREE_VEC_ELT (inner_a, ix);
1558 if (parm_a == error_mark_node)
1559 return false;
1560 parm_a = TREE_VALUE (parm_a);
1561 if (parm_a == error_mark_node)
1562 return false;
1563 if (DECL_VIRTUAL_P (parm_a))
1564 parm_a = NULL_TREE;
1565 }
1566
1567 tree parm_b = NULL_TREE;
1568 if (ix < len_b)
1569 {
1570 parm_b = TREE_VEC_ELT (inner_b, ix);
1571 if (parm_b == error_mark_node)
1572 return false;
1573 parm_b = TREE_VALUE (parm_b);
1574 if (parm_b == error_mark_node)
1575 return false;
1576 if (DECL_VIRTUAL_P (parm_b))
1577 parm_b = NULL_TREE;
1578 }
1579
1580 if (!parm_a && !parm_b)
1581 // we're done
1582 break;
1583
1584 if (!(parm_a && parm_b))
1585 return false;
1586
1587 if (TREE_CODE (parm_a) != TREE_CODE (parm_b))
1588 return false;
1589
1590 if (TREE_CODE (parm_a) == PARM_DECL)
1591 {
1592 if (TEMPLATE_PARM_PARAMETER_PACK (DECL_INITIAL (parm_a))
1593 != TEMPLATE_PARM_PARAMETER_PACK (DECL_INITIAL (parm_b)))
1594 return false;
1595
1596 if (!same_type_p (TREE_TYPE (parm_a), TREE_TYPE (parm_b)))
1597 return false;
1598 }
1599 else
1600 {
1601 if (TEMPLATE_TYPE_PARAMETER_PACK (TREE_TYPE (parm_a))
1602 != TEMPLATE_TYPE_PARAMETER_PACK (TREE_TYPE (parm_b)))
1603 return false;
1604
1605 if (TREE_CODE (parm_a) != TEMPLATE_DECL)
1606 gcc_checking_assert (TREE_CODE (parm_a) == TYPE_DECL);
1607 else if (!compare_lambda_template_head (tmpl_a: parm_a, tmpl_b: parm_b))
1608 return false;
1609 }
1610 }
1611
1612 return true;
1613}
1614
1615// Compare lambda signatures FN_A and FN_B, they may be TEMPLATE_DECLs too.
1616
1617static bool
1618compare_lambda_sig (tree fn_a, tree fn_b)
1619{
1620 if (TREE_CODE (fn_a) == TEMPLATE_DECL
1621 && TREE_CODE (fn_b) == TEMPLATE_DECL)
1622 {
1623 if (!compare_lambda_template_head (tmpl_a: fn_a, tmpl_b: fn_b))
1624 return false;
1625 fn_a = DECL_TEMPLATE_RESULT (fn_a);
1626 fn_b = DECL_TEMPLATE_RESULT (fn_b);
1627 }
1628 else if (TREE_CODE (fn_a) == TEMPLATE_DECL
1629 || TREE_CODE (fn_b) == TEMPLATE_DECL)
1630 return false;
1631
1632 if (fn_a == error_mark_node
1633 || fn_b == error_mark_node)
1634 return false;
1635
1636 for (tree args_a = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn_a))),
1637 args_b = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn_b)));
1638 args_a || args_b;
1639 args_a = TREE_CHAIN (args_a), args_b = TREE_CHAIN (args_b))
1640 {
1641 if (!args_a || !args_b)
1642 return false;
1643 // This check also deals with differing variadicness
1644 if (!same_type_p (TREE_VALUE (args_a), TREE_VALUE (args_b)))
1645 return false;
1646 }
1647
1648 return true;
1649}
1650
1651// Record the per-scope discriminator of LAMBDA. If the extra scope
1652// is empty, we must use the empty scope counter, which might not be
1653// the live one.
1654
1655void
1656record_lambda_scope_discriminator (tree lambda)
1657{
1658 auto *slot = (vec_safe_is_empty (v: lambda_scope_stack)
1659 || LAMBDA_EXPR_EXTRA_SCOPE (lambda)
1660 ? &lambda_scope : lambda_scope_stack->begin ());
1661 LAMBDA_EXPR_SCOPE_ONLY_DISCRIMINATOR (lambda) = slot->count++;
1662}
1663
1664// Record the per-scope per-signature discriminator of LAMBDA. If the
1665// extra scope is empty, we must use the empty scope counter, which
1666// might not be the live one.
1667
1668void
1669record_lambda_scope_sig_discriminator (tree lambda, tree fn)
1670{
1671 auto *slot = (vec_safe_is_empty (v: lambda_scope_stack)
1672 || LAMBDA_EXPR_EXTRA_SCOPE (lambda)
1673 ? &lambda_scope : lambda_scope_stack->begin ());
1674 gcc_checking_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == slot->scope);
1675
1676 // A linear search, we're not expecting this to be a big list, and
1677 // this avoids needing a signature hash function.
1678 lambda_sig_count *sig;
1679 if (unsigned ix = vec_safe_length (v: slot->discriminators))
1680 for (sig = slot->discriminators->begin (); ix--; sig++)
1681 if (compare_lambda_sig (fn_a: fn, fn_b: sig->fn))
1682 goto found;
1683 {
1684 lambda_sig_count init = {.fn: fn, .count: 0};
1685 sig = vec_safe_push (v&: slot->discriminators, obj: init);
1686 }
1687 found:
1688 LAMBDA_EXPR_SCOPE_SIG_DISCRIMINATOR (lambda) = sig->count++;
1689}
1690
1691tree
1692start_lambda_function (tree fco, tree lambda_expr)
1693{
1694 /* Let the front end know that we are going to be defining this
1695 function. */
1696 start_preparsed_function (fco,
1697 NULL_TREE,
1698 SF_PRE_PARSED | SF_INCLASS_INLINE);
1699
1700 tree body = begin_function_body ();
1701
1702 /* Push the proxies for any explicit captures. */
1703 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1704 cap = TREE_CHAIN (cap))
1705 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1706
1707 return body;
1708}
1709
1710/* Subroutine of prune_lambda_captures: CAP is a node in
1711 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1712 might optimize away the capture, or NULL_TREE if there is no such
1713 variable. */
1714
1715static tree
1716var_to_maybe_prune (tree cap)
1717{
1718 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1719 /* Don't prune explicit captures. */
1720 return NULL_TREE;
1721
1722 tree mem = TREE_PURPOSE (cap);
1723 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1724 /* Packs and init-captures aren't captures of constant vars. */
1725 return NULL_TREE;
1726
1727 tree init = TREE_VALUE (cap);
1728 if (is_normal_capture_proxy (decl: init))
1729 init = DECL_CAPTURED_VARIABLE (init);
1730 if (decl_constant_var_p (init))
1731 return init;
1732
1733 return NULL_TREE;
1734}
1735
1736/* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1737 for constant variables are actually used in the lambda body.
1738
1739 There will always be a DECL_EXPR for the capture proxy; remember it when we
1740 see it, but replace it with any other use. */
1741
1742static tree
1743mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1744{
1745 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1746
1747 tree var = NULL_TREE;
1748 if (TREE_CODE (*t) == DECL_EXPR)
1749 {
1750 tree decl = DECL_EXPR_DECL (*t);
1751 if (is_constant_capture_proxy (decl))
1752 {
1753 var = DECL_CAPTURED_VARIABLE (decl);
1754 *walk_subtrees = 0;
1755 }
1756 }
1757 else if (!location_wrapper_p (exp: *t) /* is_capture_proxy dislikes them. */
1758 && is_constant_capture_proxy (decl: *t))
1759 var = DECL_CAPTURED_VARIABLE (*t);
1760
1761 if (var)
1762 {
1763 tree *&slot = const_vars.get_or_insert (k: var);
1764 if (!slot || VAR_P (*t))
1765 slot = t;
1766 }
1767
1768 return NULL_TREE;
1769}
1770
1771/* We're at the end of processing a lambda; go back and remove any captures of
1772 constant variables for which we've folded away all uses. */
1773
1774static void
1775prune_lambda_captures (tree body)
1776{
1777 tree lam = current_lambda_expr ();
1778 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1779 /* No uses were optimized away. */
1780 return;
1781 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1782 /* No default captures, and we don't prune explicit captures. */
1783 return;
1784 /* Don't bother pruning in a template, we'll prune at instantiation time. */
1785 if (dependent_type_p (TREE_TYPE (lam)))
1786 return;
1787
1788 hash_map<tree,tree*> const_vars;
1789
1790 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1791
1792 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1793 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1794 {
1795 tree cap = *capp;
1796 if (tree var = var_to_maybe_prune (cap))
1797 {
1798 tree **use = const_vars.get (k: var);
1799 if (use && TREE_CODE (**use) == DECL_EXPR)
1800 {
1801 /* All uses of this capture were folded away, leaving only the
1802 proxy declaration. */
1803
1804 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1805 *capp = TREE_CHAIN (cap);
1806
1807 /* And out of TYPE_FIELDS. */
1808 tree field = TREE_PURPOSE (cap);
1809 while (*fieldp != field)
1810 fieldp = &DECL_CHAIN (*fieldp);
1811 *fieldp = DECL_CHAIN (*fieldp);
1812
1813 /* And remove the capture proxy declaration. */
1814 **use = void_node;
1815 continue;
1816 }
1817 }
1818
1819 capp = &TREE_CHAIN (cap);
1820 }
1821}
1822
1823// Record the per-scope per-signature discriminator of LAMBDA. If the
1824// extra scope is empty, we must use the empty scope counter, which
1825// might not be the live one.
1826
1827void
1828finish_lambda_function (tree body)
1829{
1830 finish_function_body (body);
1831
1832 prune_lambda_captures (body);
1833
1834 /* Finish the function and generate code for it if necessary. */
1835 tree fn = finish_function (/*inline_p=*/true);
1836
1837 /* Only expand if the call op is not a template. */
1838 if (!DECL_TEMPLATE_INFO (fn))
1839 expand_or_defer_fn (fn);
1840}
1841
1842#include "gt-cp-lambda.h"
1843

source code of gcc/cp/lambda.cc